Chris Lattner | b0cfa6d | 2002-08-09 18:55:18 +0000 | [diff] [blame] | 1 | //===- SchedGraph.cpp - Scheduling Graph Implementation -------------------===// |
| 2 | // |
| 3 | // Scheduling graph based on SSA graph plus extra dependence edges capturing |
| 4 | // dependences due to machine resources (machine registers, CC registers, and |
| 5 | // any others). |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 8 | |
Chris Lattner | 46cbff6 | 2001-09-14 16:56:32 +0000 | [diff] [blame] | 9 | #include "SchedGraph.h" |
Chris Lattner | 2fbfdcf | 2002-04-07 20:49:59 +0000 | [diff] [blame] | 10 | #include "llvm/Function.h" |
Chris Lattner | b00c582 | 2001-10-02 03:41:24 +0000 | [diff] [blame] | 11 | #include "llvm/iOther.h" |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 12 | #include "llvm/CodeGen/MachineCodeForInstruction.h" |
| 13 | #include "llvm/CodeGen/MachineFunction.h" |
| 14 | #include "llvm/Target/TargetInstrInfo.h" |
| 15 | #include "llvm/Target/TargetMachine.h" |
| 16 | #include "llvm/Target/TargetRegInfo.h" |
| 17 | #include "Support/STLExtras.h" |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 18 | |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 19 | //*********************** Internal Data Structures *************************/ |
| 20 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 21 | // The following two types need to be classes, not typedefs, so we can use |
| 22 | // opaque declarations in SchedGraph.h |
| 23 | // |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 24 | struct RefVec: public std::vector<std::pair<SchedGraphNode*, int> > { |
| 25 | typedef std::vector<std::pair<SchedGraphNode*,int> >::iterator iterator; |
| 26 | typedef |
| 27 | std::vector<std::pair<SchedGraphNode*,int> >::const_iterator const_iterator; |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 28 | }; |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 29 | |
Chris Lattner | 80c685f | 2001-10-13 06:51:01 +0000 | [diff] [blame] | 30 | struct RegToRefVecMap: public hash_map<int, RefVec> { |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 31 | typedef hash_map<int, RefVec>:: iterator iterator; |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 32 | typedef hash_map<int, RefVec>::const_iterator const_iterator; |
| 33 | }; |
| 34 | |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 35 | struct ValueToDefVecMap: public hash_map<const Value*, RefVec> { |
| 36 | typedef hash_map<const Value*, RefVec>:: iterator iterator; |
| 37 | typedef hash_map<const Value*, RefVec>::const_iterator const_iterator; |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 38 | }; |
| 39 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 40 | |
| 41 | // |
| 42 | // class SchedGraphNode |
| 43 | // |
| 44 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 45 | SchedGraphNode::SchedGraphNode(unsigned NID, MachineBasicBlock *mbb, |
| 46 | int indexInBB, const TargetMachine& Target) |
Tanya Lattner | 8dc9982 | 2003-08-28 15:30:40 +0000 | [diff] [blame] | 47 | : SchedGraphNodeCommon(NID,indexInBB), MBB(mbb), MI(mbb ? (*mbb)[indexInBB] : 0) { |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 48 | if (MI) { |
| 49 | MachineOpCode mopCode = MI->getOpCode(); |
| 50 | latency = Target.getInstrInfo().hasResultInterlock(mopCode) |
| 51 | ? Target.getInstrInfo().minLatency(mopCode) |
| 52 | : Target.getInstrInfo().maxLatency(mopCode); |
| 53 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 54 | } |
| 55 | |
John Criswell | c9afb49 | 2003-08-28 21:43:17 +0000 | [diff] [blame] | 56 | // |
| 57 | // Method: SchedGraphNode Destructor |
| 58 | // |
| 59 | // Description: |
| 60 | // Free memory allocated by the SchedGraphNode object. |
| 61 | // |
| 62 | // Notes: |
| 63 | // Do not delete the edges here. The base class will take care of that. |
| 64 | // Only handle subclass specific stuff here (where currently there is |
| 65 | // none). |
| 66 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 67 | SchedGraphNode::~SchedGraphNode() { |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 68 | } |
| 69 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 70 | // |
| 71 | // class SchedGraph |
| 72 | // |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 73 | SchedGraph::SchedGraph(MachineBasicBlock &mbb, const TargetMachine& target) |
| 74 | : MBB(mbb) { |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 75 | buildGraph(target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 76 | } |
| 77 | |
John Criswell | c9afb49 | 2003-08-28 21:43:17 +0000 | [diff] [blame] | 78 | // |
| 79 | // Method: SchedGraph Destructor |
| 80 | // |
| 81 | // Description: |
| 82 | // This method deletes memory allocated by the SchedGraph object. |
| 83 | // |
| 84 | // Notes: |
| 85 | // Do not delete the graphRoot or graphLeaf here. The base class handles |
| 86 | // that bit of work. |
| 87 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 88 | SchedGraph::~SchedGraph() { |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 89 | for (const_iterator I = begin(); I != end(); ++I) |
Chris Lattner | f3dd05c | 2002-04-09 05:15:33 +0000 | [diff] [blame] | 90 | delete I->second; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 91 | } |
| 92 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 93 | void SchedGraph::dump() const { |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 94 | std::cerr << " Sched Graph for Basic Block: "; |
| 95 | std::cerr << MBB.getBasicBlock()->getName() |
| 96 | << " (" << MBB.getBasicBlock() << ")"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 97 | |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 98 | std::cerr << "\n\n Actual Root nodes : "; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 99 | for (unsigned i=0, N=graphRoot->outEdges.size(); i < N; i++) |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 100 | std::cerr << graphRoot->outEdges[i]->getSink()->getNodeId() |
| 101 | << ((i == N-1)? "" : ", "); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 102 | |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 103 | std::cerr << "\n Graph Nodes:\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 104 | for (const_iterator I=begin(); I != end(); ++I) |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 105 | std::cerr << "\n" << *I->second; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 106 | |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 107 | std::cerr << "\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 108 | } |
| 109 | |
| 110 | |
Vikram S. Adve | 8b6d245 | 2001-09-18 12:50:40 +0000 | [diff] [blame] | 111 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 112 | void SchedGraph::addDummyEdges() { |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 113 | assert(graphRoot->outEdges.size() == 0); |
| 114 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 115 | for (const_iterator I=begin(); I != end(); ++I) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 116 | SchedGraphNode* node = (*I).second; |
| 117 | assert(node != graphRoot && node != graphLeaf); |
| 118 | if (node->beginInEdges() == node->endInEdges()) |
| 119 | (void) new SchedGraphEdge(graphRoot, node, SchedGraphEdge::CtrlDep, |
| 120 | SchedGraphEdge::NonDataDep, 0); |
| 121 | if (node->beginOutEdges() == node->endOutEdges()) |
| 122 | (void) new SchedGraphEdge(node, graphLeaf, SchedGraphEdge::CtrlDep, |
| 123 | SchedGraphEdge::NonDataDep, 0); |
| 124 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 125 | } |
| 126 | |
| 127 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 128 | void SchedGraph::addCDEdges(const TerminatorInst* term, |
| 129 | const TargetMachine& target) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 130 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Chris Lattner | 0861b0c | 2002-02-03 07:29:45 +0000 | [diff] [blame] | 131 | MachineCodeForInstruction &termMvec = MachineCodeForInstruction::get(term); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 132 | |
| 133 | // Find the first branch instr in the sequence of machine instrs for term |
| 134 | // |
| 135 | unsigned first = 0; |
Vikram S. Adve | acf0f70 | 2002-10-13 00:39:22 +0000 | [diff] [blame] | 136 | while (! mii.isBranch(termMvec[first]->getOpCode()) && |
| 137 | ! mii.isReturn(termMvec[first]->getOpCode())) |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 138 | ++first; |
| 139 | assert(first < termMvec.size() && |
Vikram S. Adve | acf0f70 | 2002-10-13 00:39:22 +0000 | [diff] [blame] | 140 | "No branch instructions for terminator? Ok, but weird!"); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 141 | if (first == termMvec.size()) |
| 142 | return; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 143 | |
Chris Lattner | b0cfa6d | 2002-08-09 18:55:18 +0000 | [diff] [blame] | 144 | SchedGraphNode* firstBrNode = getGraphNodeForInstr(termMvec[first]); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 145 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 146 | // Add CD edges from each instruction in the sequence to the |
| 147 | // *last preceding* branch instr. in the sequence |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 148 | // Use a latency of 0 because we only need to prevent out-of-order issue. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 149 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 150 | for (unsigned i = termMvec.size(); i > first+1; --i) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 151 | SchedGraphNode* toNode = getGraphNodeForInstr(termMvec[i-1]); |
| 152 | assert(toNode && "No node for instr generated for branch/ret?"); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 153 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 154 | for (unsigned j = i-1; j != 0; --j) |
| 155 | if (mii.isBranch(termMvec[j-1]->getOpCode()) || |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 156 | mii.isReturn(termMvec[j-1]->getOpCode())) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 157 | SchedGraphNode* brNode = getGraphNodeForInstr(termMvec[j-1]); |
| 158 | assert(brNode && "No node for instr generated for branch/ret?"); |
| 159 | (void) new SchedGraphEdge(brNode, toNode, SchedGraphEdge::CtrlDep, |
| 160 | SchedGraphEdge::NonDataDep, 0); |
| 161 | break; // only one incoming edge is enough |
| 162 | } |
| 163 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 164 | |
| 165 | // Add CD edges from each instruction preceding the first branch |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 166 | // to the first branch. Use a latency of 0 as above. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 167 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 168 | for (unsigned i = first; i != 0; --i) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 169 | SchedGraphNode* fromNode = getGraphNodeForInstr(termMvec[i-1]); |
| 170 | assert(fromNode && "No node for instr generated for branch?"); |
| 171 | (void) new SchedGraphEdge(fromNode, firstBrNode, SchedGraphEdge::CtrlDep, |
| 172 | SchedGraphEdge::NonDataDep, 0); |
| 173 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 174 | |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 175 | // Now add CD edges to the first branch instruction in the sequence from |
| 176 | // all preceding instructions in the basic block. Use 0 latency again. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 177 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 178 | for (unsigned i=0, N=MBB.size(); i < N; i++) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 179 | if (MBB[i] == termMvec[first]) // reached the first branch |
| 180 | break; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 181 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 182 | SchedGraphNode* fromNode = this->getGraphNodeForInstr(MBB[i]); |
| 183 | if (fromNode == NULL) |
| 184 | continue; // dummy instruction, e.g., PHI |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 185 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 186 | (void) new SchedGraphEdge(fromNode, firstBrNode, |
| 187 | SchedGraphEdge::CtrlDep, |
| 188 | SchedGraphEdge::NonDataDep, 0); |
| 189 | |
| 190 | // If we find any other machine instructions (other than due to |
| 191 | // the terminator) that also have delay slots, add an outgoing edge |
| 192 | // from the instruction to the instructions in the delay slots. |
| 193 | // |
| 194 | unsigned d = mii.getNumDelaySlots(MBB[i]->getOpCode()); |
| 195 | assert(i+d < N && "Insufficient delay slots for instruction?"); |
| 196 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 197 | for (unsigned j=1; j <= d; j++) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 198 | SchedGraphNode* toNode = this->getGraphNodeForInstr(MBB[i+j]); |
| 199 | assert(toNode && "No node for machine instr in delay slot?"); |
| 200 | (void) new SchedGraphEdge(fromNode, toNode, |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 201 | SchedGraphEdge::CtrlDep, |
| 202 | SchedGraphEdge::NonDataDep, 0); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 203 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 204 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 205 | } |
| 206 | |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 207 | static const int SG_LOAD_REF = 0; |
| 208 | static const int SG_STORE_REF = 1; |
| 209 | static const int SG_CALL_REF = 2; |
| 210 | |
| 211 | static const unsigned int SG_DepOrderArray[][3] = { |
| 212 | { SchedGraphEdge::NonDataDep, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 213 | SchedGraphEdge::AntiDep, |
| 214 | SchedGraphEdge::AntiDep }, |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 215 | { SchedGraphEdge::TrueDep, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 216 | SchedGraphEdge::OutputDep, |
| 217 | SchedGraphEdge::TrueDep | SchedGraphEdge::OutputDep }, |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 218 | { SchedGraphEdge::TrueDep, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 219 | SchedGraphEdge::AntiDep | SchedGraphEdge::OutputDep, |
| 220 | SchedGraphEdge::TrueDep | SchedGraphEdge::AntiDep |
| 221 | | SchedGraphEdge::OutputDep } |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 222 | }; |
| 223 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 224 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 225 | // Add a dependence edge between every pair of machine load/store/call |
| 226 | // instructions, where at least one is a store or a call. |
| 227 | // Use latency 1 just to ensure that memory operations are ordered; |
| 228 | // latency does not otherwise matter (true dependences enforce that). |
| 229 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 230 | void SchedGraph::addMemEdges(const std::vector<SchedGraphNode*>& memNodeVec, |
| 231 | const TargetMachine& target) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 232 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 233 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 234 | // Instructions in memNodeVec are in execution order within the basic block, |
| 235 | // so simply look at all pairs <memNodeVec[i], memNodeVec[j: j > i]>. |
| 236 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 237 | for (unsigned im=0, NM=memNodeVec.size(); im < NM; im++) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 238 | MachineOpCode fromOpCode = memNodeVec[im]->getOpCode(); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 239 | int fromType = (mii.isCall(fromOpCode)? SG_CALL_REF |
| 240 | : (mii.isLoad(fromOpCode)? SG_LOAD_REF |
| 241 | : SG_STORE_REF)); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 242 | for (unsigned jm=im+1; jm < NM; jm++) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 243 | MachineOpCode toOpCode = memNodeVec[jm]->getOpCode(); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 244 | int toType = (mii.isCall(toOpCode)? SG_CALL_REF |
| 245 | : (mii.isLoad(toOpCode)? SG_LOAD_REF |
| 246 | : SG_STORE_REF)); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 247 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 248 | if (fromType != SG_LOAD_REF || toType != SG_LOAD_REF) |
| 249 | (void) new SchedGraphEdge(memNodeVec[im], memNodeVec[jm], |
| 250 | SchedGraphEdge::MemoryDep, |
| 251 | SG_DepOrderArray[fromType][toType], 1); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 252 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 253 | } |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 254 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 255 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 256 | // Add edges from/to CC reg instrs to/from call instrs. |
| 257 | // Essentially this prevents anything that sets or uses a CC reg from being |
| 258 | // reordered w.r.t. a call. |
| 259 | // Use a latency of 0 because we only need to prevent out-of-order issue, |
| 260 | // like with control dependences. |
| 261 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 262 | void SchedGraph::addCallDepEdges(const std::vector<SchedGraphNode*>& callDepNodeVec, |
| 263 | const TargetMachine& target) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 264 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 265 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 266 | // Instructions in memNodeVec are in execution order within the basic block, |
| 267 | // so simply look at all pairs <memNodeVec[i], memNodeVec[j: j > i]>. |
| 268 | // |
| 269 | for (unsigned ic=0, NC=callDepNodeVec.size(); ic < NC; ic++) |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 270 | if (mii.isCall(callDepNodeVec[ic]->getOpCode())) { |
| 271 | // Add SG_CALL_REF edges from all preds to this instruction. |
| 272 | for (unsigned jc=0; jc < ic; jc++) |
| 273 | (void) new SchedGraphEdge(callDepNodeVec[jc], callDepNodeVec[ic], |
| 274 | SchedGraphEdge::MachineRegister, |
| 275 | MachineIntRegsRID, 0); |
| 276 | |
| 277 | // And do the same from this instruction to all successors. |
| 278 | for (unsigned jc=ic+1; jc < NC; jc++) |
| 279 | (void) new SchedGraphEdge(callDepNodeVec[ic], callDepNodeVec[jc], |
| 280 | SchedGraphEdge::MachineRegister, |
| 281 | MachineIntRegsRID, 0); |
| 282 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 283 | |
| 284 | #ifdef CALL_DEP_NODE_VEC_CANNOT_WORK |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 285 | // Find the call instruction nodes and put them in a vector. |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 286 | std::vector<SchedGraphNode*> callNodeVec; |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 287 | for (unsigned im=0, NM=memNodeVec.size(); im < NM; im++) |
| 288 | if (mii.isCall(memNodeVec[im]->getOpCode())) |
| 289 | callNodeVec.push_back(memNodeVec[im]); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 290 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 291 | // Now walk the entire basic block, looking for CC instructions *and* |
| 292 | // call instructions, and keep track of the order of the instructions. |
| 293 | // Use the call node vec to quickly find earlier and later call nodes |
| 294 | // relative to the current CC instruction. |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 295 | // |
| 296 | int lastCallNodeIdx = -1; |
| 297 | for (unsigned i=0, N=bbMvec.size(); i < N; i++) |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 298 | if (mii.isCall(bbMvec[i]->getOpCode())) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 299 | ++lastCallNodeIdx; |
| 300 | for ( ; lastCallNodeIdx < (int)callNodeVec.size(); ++lastCallNodeIdx) |
| 301 | if (callNodeVec[lastCallNodeIdx]->getMachineInstr() == bbMvec[i]) |
| 302 | break; |
| 303 | assert(lastCallNodeIdx < (int)callNodeVec.size() && "Missed Call?"); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 304 | } |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 305 | else if (mii.isCCInstr(bbMvec[i]->getOpCode())) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 306 | // Add incoming/outgoing edges from/to preceding/later calls |
| 307 | SchedGraphNode* ccNode = this->getGraphNodeForInstr(bbMvec[i]); |
| 308 | int j=0; |
| 309 | for ( ; j <= lastCallNodeIdx; j++) |
| 310 | (void) new SchedGraphEdge(callNodeVec[j], ccNode, |
| 311 | MachineCCRegsRID, 0); |
| 312 | for ( ; j < (int) callNodeVec.size(); j++) |
| 313 | (void) new SchedGraphEdge(ccNode, callNodeVec[j], |
| 314 | MachineCCRegsRID, 0); |
| 315 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 316 | #endif |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 317 | } |
| 318 | |
| 319 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 320 | void SchedGraph::addMachineRegEdges(RegToRefVecMap& regToRefVecMap, |
| 321 | const TargetMachine& target) { |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 322 | // This code assumes that two registers with different numbers are |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 323 | // not aliased! |
| 324 | // |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 325 | for (RegToRefVecMap::iterator I = regToRefVecMap.begin(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 326 | I != regToRefVecMap.end(); ++I) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 327 | int regNum = (*I).first; |
| 328 | RefVec& regRefVec = (*I).second; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 329 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 330 | // regRefVec is ordered by control flow order in the basic block |
| 331 | for (unsigned i=0; i < regRefVec.size(); ++i) { |
| 332 | SchedGraphNode* node = regRefVec[i].first; |
| 333 | unsigned int opNum = regRefVec[i].second; |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 334 | const MachineOperand& mop = |
| 335 | node->getMachineInstr()->getExplOrImplOperand(opNum); |
| 336 | bool isDef = mop.opIsDefOnly(); |
| 337 | bool isDefAndUse = mop.opIsDefAndUse(); |
Vikram S. Adve | 0baf1c0 | 2002-07-08 22:59:23 +0000 | [diff] [blame] | 338 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 339 | for (unsigned p=0; p < i; ++p) { |
| 340 | SchedGraphNode* prevNode = regRefVec[p].first; |
| 341 | if (prevNode != node) { |
| 342 | unsigned int prevOpNum = regRefVec[p].second; |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 343 | const MachineOperand& prevMop = |
| 344 | prevNode->getMachineInstr()->getExplOrImplOperand(prevOpNum); |
| 345 | bool prevIsDef = prevMop.opIsDefOnly(); |
| 346 | bool prevIsDefAndUse = prevMop.opIsDefAndUse(); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 347 | if (isDef) { |
| 348 | if (prevIsDef) |
| 349 | new SchedGraphEdge(prevNode, node, regNum, |
| 350 | SchedGraphEdge::OutputDep); |
| 351 | if (!prevIsDef || prevIsDefAndUse) |
| 352 | new SchedGraphEdge(prevNode, node, regNum, |
| 353 | SchedGraphEdge::AntiDep); |
| 354 | } |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 355 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 356 | if (prevIsDef) |
| 357 | if (!isDef || isDefAndUse) |
| 358 | new SchedGraphEdge(prevNode, node, regNum, |
| 359 | SchedGraphEdge::TrueDep); |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 360 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 361 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 362 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 363 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 364 | } |
| 365 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 366 | |
Vikram S. Adve | 0baf1c0 | 2002-07-08 22:59:23 +0000 | [diff] [blame] | 367 | // Adds dependences to/from refNode from/to all other defs |
| 368 | // in the basic block. refNode may be a use, a def, or both. |
| 369 | // We do not consider other uses because we are not building use-use deps. |
| 370 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 371 | void SchedGraph::addEdgesForValue(SchedGraphNode* refNode, |
| 372 | const RefVec& defVec, |
| 373 | const Value* defValue, |
| 374 | bool refNodeIsDef, |
| 375 | bool refNodeIsDefAndUse, |
| 376 | const TargetMachine& target) { |
Vikram S. Adve | 0baf1c0 | 2002-07-08 22:59:23 +0000 | [diff] [blame] | 377 | bool refNodeIsUse = !refNodeIsDef || refNodeIsDefAndUse; |
| 378 | |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 379 | // Add true or output dep edges from all def nodes before refNode in BB. |
| 380 | // Add anti or output dep edges to all def nodes after refNode. |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 381 | for (RefVec::const_iterator I=defVec.begin(), E=defVec.end(); I != E; ++I) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 382 | if ((*I).first == refNode) |
| 383 | continue; // Dont add any self-loops |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 384 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 385 | if ((*I).first->getOrigIndexInBB() < refNode->getOrigIndexInBB()) { |
| 386 | // (*).first is before refNode |
| 387 | if (refNodeIsDef) |
| 388 | (void) new SchedGraphEdge((*I).first, refNode, defValue, |
| 389 | SchedGraphEdge::OutputDep); |
| 390 | if (refNodeIsUse) |
| 391 | (void) new SchedGraphEdge((*I).first, refNode, defValue, |
| 392 | SchedGraphEdge::TrueDep); |
| 393 | } else { |
| 394 | // (*).first is after refNode |
| 395 | if (refNodeIsDef) |
| 396 | (void) new SchedGraphEdge(refNode, (*I).first, defValue, |
| 397 | SchedGraphEdge::OutputDep); |
| 398 | if (refNodeIsUse) |
| 399 | (void) new SchedGraphEdge(refNode, (*I).first, defValue, |
| 400 | SchedGraphEdge::AntiDep); |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 401 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 402 | } |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 403 | } |
| 404 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 405 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 406 | void SchedGraph::addEdgesForInstruction(const MachineInstr& MI, |
| 407 | const ValueToDefVecMap& valueToDefVecMap, |
| 408 | const TargetMachine& target) { |
Chris Lattner | 133f079 | 2002-10-28 04:45:29 +0000 | [diff] [blame] | 409 | SchedGraphNode* node = getGraphNodeForInstr(&MI); |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 410 | if (node == NULL) |
| 411 | return; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 412 | |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 413 | // Add edges for all operands of the machine instruction. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 414 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 415 | for (unsigned i = 0, numOps = MI.getNumOperands(); i != numOps; ++i) { |
| 416 | switch (MI.getOperand(i).getType()) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 417 | case MachineOperand::MO_VirtualRegister: |
| 418 | case MachineOperand::MO_CCRegister: |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 419 | if (const Value* srcI = MI.getOperand(i).getVRegValue()) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 420 | ValueToDefVecMap::const_iterator I = valueToDefVecMap.find(srcI); |
| 421 | if (I != valueToDefVecMap.end()) |
| 422 | addEdgesForValue(node, I->second, srcI, |
Vikram S. Adve | 5f2180c | 2003-05-27 00:05:23 +0000 | [diff] [blame] | 423 | MI.getOperand(i).opIsDefOnly(), |
| 424 | MI.getOperand(i).opIsDefAndUse(), target); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 425 | } |
| 426 | break; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 427 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 428 | case MachineOperand::MO_MachineRegister: |
| 429 | break; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 430 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 431 | case MachineOperand::MO_SignExtendedImmed: |
| 432 | case MachineOperand::MO_UnextendedImmed: |
| 433 | case MachineOperand::MO_PCRelativeDisp: |
| 434 | break; // nothing to do for immediate fields |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 435 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 436 | default: |
| 437 | assert(0 && "Unknown machine operand type in SchedGraph builder"); |
| 438 | break; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 439 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 440 | } |
Vikram S. Adve | 8d0ffa5 | 2001-10-11 04:22:45 +0000 | [diff] [blame] | 441 | |
| 442 | // Add edges for values implicitly used by the machine instruction. |
| 443 | // Examples include function arguments to a Call instructions or the return |
| 444 | // value of a Ret instruction. |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 445 | // |
Chris Lattner | 133f079 | 2002-10-28 04:45:29 +0000 | [diff] [blame] | 446 | for (unsigned i=0, N=MI.getNumImplicitRefs(); i < N; ++i) |
Vikram S. Adve | 5f2180c | 2003-05-27 00:05:23 +0000 | [diff] [blame] | 447 | if (MI.getImplicitOp(i).opIsUse() || MI.getImplicitOp(i).opIsDefAndUse()) |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 448 | if (const Value* srcI = MI.getImplicitRef(i)) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 449 | ValueToDefVecMap::const_iterator I = valueToDefVecMap.find(srcI); |
| 450 | if (I != valueToDefVecMap.end()) |
| 451 | addEdgesForValue(node, I->second, srcI, |
Vikram S. Adve | 5f2180c | 2003-05-27 00:05:23 +0000 | [diff] [blame] | 452 | MI.getImplicitOp(i).opIsDefOnly(), |
| 453 | MI.getImplicitOp(i).opIsDefAndUse(), target); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 454 | } |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 455 | } |
| 456 | |
| 457 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 458 | void SchedGraph::findDefUseInfoAtInstr(const TargetMachine& target, |
| 459 | SchedGraphNode* node, |
| 460 | std::vector<SchedGraphNode*>& memNodeVec, |
| 461 | std::vector<SchedGraphNode*>& callDepNodeVec, |
| 462 | RegToRefVecMap& regToRefVecMap, |
| 463 | ValueToDefVecMap& valueToDefVecMap) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 464 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 465 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 466 | MachineOpCode opCode = node->getOpCode(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 467 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 468 | if (mii.isCall(opCode) || mii.isCCInstr(opCode)) |
| 469 | callDepNodeVec.push_back(node); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 470 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 471 | if (mii.isLoad(opCode) || mii.isStore(opCode) || mii.isCall(opCode)) |
| 472 | memNodeVec.push_back(node); |
| 473 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 474 | // Collect the register references and value defs. for explicit operands |
| 475 | // |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 476 | const MachineInstr& MI = *node->getMachineInstr(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 477 | for (int i=0, numOps = (int) MI.getNumOperands(); i < numOps; i++) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 478 | const MachineOperand& mop = MI.getOperand(i); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 479 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 480 | // if this references a register other than the hardwired |
| 481 | // "zero" register, record the reference. |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 482 | if (mop.hasAllocatedReg()) { |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 483 | int regNum = mop.getAllocatedRegNum(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 484 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 485 | // If this is not a dummy zero register, record the reference in order |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 486 | if (regNum != target.getRegInfo().getZeroRegNum()) |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 487 | regToRefVecMap[mop.getAllocatedRegNum()] |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 488 | .push_back(std::make_pair(node, i)); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 489 | |
| 490 | // If this is a volatile register, add the instruction to callDepVec |
| 491 | // (only if the node is not already on the callDepVec!) |
| 492 | if (callDepNodeVec.size() == 0 || callDepNodeVec.back() != node) |
| 493 | { |
| 494 | unsigned rcid; |
| 495 | int regInClass = target.getRegInfo().getClassRegNum(regNum, rcid); |
| 496 | if (target.getRegInfo().getMachineRegClass(rcid) |
| 497 | ->isRegVolatile(regInClass)) |
| 498 | callDepNodeVec.push_back(node); |
| 499 | } |
| 500 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 501 | continue; // nothing more to do |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 502 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 503 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 504 | // ignore all other non-def operands |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 505 | if (!MI.getOperand(i).opIsDefOnly() && |
| 506 | !MI.getOperand(i).opIsDefAndUse()) |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 507 | continue; |
| 508 | |
| 509 | // We must be defining a value. |
| 510 | assert((mop.getType() == MachineOperand::MO_VirtualRegister || |
| 511 | mop.getType() == MachineOperand::MO_CCRegister) |
| 512 | && "Do not expect any other kind of operand to be defined!"); |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 513 | assert(mop.getVRegValue() != NULL && "Null value being defined?"); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 514 | |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 515 | valueToDefVecMap[mop.getVRegValue()].push_back(std::make_pair(node, i)); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 516 | } |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 517 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 518 | // |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 519 | // Collect value defs. for implicit operands. They may have allocated |
| 520 | // physical registers also. |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 521 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 522 | for (unsigned i=0, N = MI.getNumImplicitRefs(); i != N; ++i) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 523 | const MachineOperand& mop = MI.getImplicitOp(i); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 524 | if (mop.hasAllocatedReg()) { |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 525 | int regNum = mop.getAllocatedRegNum(); |
| 526 | if (regNum != target.getRegInfo().getZeroRegNum()) |
| 527 | regToRefVecMap[mop.getAllocatedRegNum()] |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 528 | .push_back(std::make_pair(node, i + MI.getNumOperands())); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 529 | continue; // nothing more to do |
| 530 | } |
| 531 | |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 532 | if (mop.opIsDefOnly() || mop.opIsDefAndUse()) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 533 | assert(MI.getImplicitRef(i) != NULL && "Null value being defined?"); |
| 534 | valueToDefVecMap[MI.getImplicitRef(i)].push_back(std::make_pair(node, |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 535 | -i)); |
| 536 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 537 | } |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 538 | } |
| 539 | |
| 540 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 541 | void SchedGraph::buildNodesForBB(const TargetMachine& target, |
| 542 | MachineBasicBlock& MBB, |
| 543 | std::vector<SchedGraphNode*>& memNodeVec, |
| 544 | std::vector<SchedGraphNode*>& callDepNodeVec, |
| 545 | RegToRefVecMap& regToRefVecMap, |
| 546 | ValueToDefVecMap& valueToDefVecMap) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 547 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | 5b43af9 | 2001-11-11 01:23:27 +0000 | [diff] [blame] | 548 | |
| 549 | // Build graph nodes for each VM instruction and gather def/use info. |
| 550 | // Do both those together in a single pass over all machine instructions. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 551 | for (unsigned i=0; i < MBB.size(); i++) |
| 552 | if (!mii.isDummyPhiInstr(MBB[i]->getOpCode())) { |
| 553 | SchedGraphNode* node = new SchedGraphNode(getNumNodes(), &MBB, i, target); |
| 554 | noteGraphNodeForInstr(MBB[i], node); |
| 555 | |
| 556 | // Remember all register references and value defs |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 557 | findDefUseInfoAtInstr(target, node, memNodeVec, callDepNodeVec, |
| 558 | regToRefVecMap, valueToDefVecMap); |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 559 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 560 | } |
| 561 | |
| 562 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 563 | void SchedGraph::buildGraph(const TargetMachine& target) { |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 564 | // Use this data structure to note all machine operands that compute |
| 565 | // ordinary LLVM values. These must be computed defs (i.e., instructions). |
| 566 | // Note that there may be multiple machine instructions that define |
| 567 | // each Value. |
| 568 | ValueToDefVecMap valueToDefVecMap; |
| 569 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 570 | // Use this data structure to note all memory instructions. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 571 | // We use this to add memory dependence edges without a second full walk. |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 572 | std::vector<SchedGraphNode*> memNodeVec; |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 573 | |
| 574 | // Use this data structure to note all instructions that access physical |
| 575 | // registers that can be modified by a call (including call instructions) |
| 576 | std::vector<SchedGraphNode*> callDepNodeVec; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 577 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 578 | // Use this data structure to note any uses or definitions of |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 579 | // machine registers so we can add edges for those later without |
| 580 | // extra passes over the nodes. |
| 581 | // The vector holds an ordered list of references to the machine reg, |
| 582 | // ordered according to control-flow order. This only works for a |
| 583 | // single basic block, hence the assertion. Each reference is identified |
| 584 | // by the pair: <node, operand-number>. |
| 585 | // |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 586 | RegToRefVecMap regToRefVecMap; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 587 | |
| 588 | // Make a dummy root node. We'll add edges to the real roots later. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 589 | graphRoot = new SchedGraphNode(0, NULL, -1, target); |
| 590 | graphLeaf = new SchedGraphNode(1, NULL, -1, target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 591 | |
| 592 | //---------------------------------------------------------------- |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 593 | // First add nodes for all the machine instructions in the basic block |
| 594 | // because this greatly simplifies identifying which edges to add. |
| 595 | // Do this one VM instruction at a time since the SchedGraphNode needs that. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 596 | // Also, remember the load/store instructions to add memory deps later. |
| 597 | //---------------------------------------------------------------- |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 598 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 599 | buildNodesForBB(target, MBB, memNodeVec, callDepNodeVec, |
| 600 | regToRefVecMap, valueToDefVecMap); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 601 | |
| 602 | //---------------------------------------------------------------- |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 603 | // Now add edges for the following (all are incoming edges except (4)): |
| 604 | // (1) operands of the machine instruction, including hidden operands |
| 605 | // (2) machine register dependences |
| 606 | // (3) memory load/store dependences |
| 607 | // (3) other resource dependences for the machine instruction, if any |
| 608 | // (4) output dependences when multiple machine instructions define the |
| 609 | // same value; all must have been generated from a single VM instrn |
| 610 | // (5) control dependences to branch instructions generated for the |
| 611 | // terminator instruction of the BB. Because of delay slots and |
| 612 | // 2-way conditional branches, multiple CD edges are needed |
| 613 | // (see addCDEdges for details). |
| 614 | // Also, note any uses or defs of machine registers. |
| 615 | // |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 616 | //---------------------------------------------------------------- |
| 617 | |
| 618 | // First, add edges to the terminator instruction of the basic block. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 619 | this->addCDEdges(MBB.getBasicBlock()->getTerminator(), target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 620 | |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 621 | // Then add memory dep edges: store->load, load->store, and store->store. |
| 622 | // Call instructions are treated as both load and store. |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 623 | this->addMemEdges(memNodeVec, target); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 624 | |
| 625 | // Then add edges between call instructions and CC set/use instructions |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 626 | this->addCallDepEdges(callDepNodeVec, target); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 627 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 628 | // Then add incoming def-use (SSA) edges for each machine instruction. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 629 | for (unsigned i=0, N=MBB.size(); i < N; i++) |
| 630 | addEdgesForInstruction(*MBB[i], valueToDefVecMap, target); |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 631 | |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 632 | #ifdef NEED_SEPARATE_NONSSA_EDGES_CODE |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 633 | // Then add non-SSA edges for all VM instructions in the block. |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 634 | // We assume that all machine instructions that define a value are |
| 635 | // generated from the VM instruction corresponding to that value. |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 636 | // TODO: This could probably be done much more efficiently. |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 637 | for (BasicBlock::const_iterator II = bb->begin(); II != bb->end(); ++II) |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 638 | this->addNonSSAEdgesForValue(*II, target); |
Chris Lattner | 4ed17ba | 2001-11-26 18:56:52 +0000 | [diff] [blame] | 639 | #endif //NEED_SEPARATE_NONSSA_EDGES_CODE |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 640 | |
| 641 | // Then add edges for dependences on machine registers |
| 642 | this->addMachineRegEdges(regToRefVecMap, target); |
| 643 | |
| 644 | // Finally, add edges from the dummy root and to dummy leaf |
| 645 | this->addDummyEdges(); |
| 646 | } |
| 647 | |
| 648 | |
| 649 | // |
| 650 | // class SchedGraphSet |
| 651 | // |
Chris Lattner | 2fbfdcf | 2002-04-07 20:49:59 +0000 | [diff] [blame] | 652 | SchedGraphSet::SchedGraphSet(const Function* _function, |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 653 | const TargetMachine& target) : |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 654 | function(_function) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 655 | buildGraphsForMethod(function, target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 656 | } |
| 657 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 658 | SchedGraphSet::~SchedGraphSet() { |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 659 | // delete all the graphs |
Chris Lattner | f3dd05c | 2002-04-09 05:15:33 +0000 | [diff] [blame] | 660 | for(iterator I = begin(), E = end(); I != E; ++I) |
| 661 | delete *I; // destructor is a friend |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 662 | } |
| 663 | |
| 664 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 665 | void SchedGraphSet::dump() const { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 666 | std::cerr << "======== Sched graphs for function `" << function->getName() |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 667 | << "' ========\n\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 668 | |
| 669 | for (const_iterator I=begin(); I != end(); ++I) |
Vikram S. Adve | cf8a98f | 2002-03-24 03:40:59 +0000 | [diff] [blame] | 670 | (*I)->dump(); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 671 | |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 672 | std::cerr << "\n====== End graphs for function `" << function->getName() |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 673 | << "' ========\n\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 674 | } |
| 675 | |
| 676 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 677 | void SchedGraphSet::buildGraphsForMethod(const Function *F, |
| 678 | const TargetMachine& target) { |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 679 | MachineFunction &MF = MachineFunction::get(F); |
| 680 | for (MachineFunction::iterator I = MF.begin(), E = MF.end(); I != E; ++I) |
| 681 | addGraph(new SchedGraph(*I, target)); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 682 | } |
| 683 | |
| 684 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 685 | void SchedGraphEdge::print(std::ostream &os) const { |
| 686 | os << "edge [" << src->getNodeId() << "] -> [" |
| 687 | << sink->getNodeId() << "] : "; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 688 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 689 | switch(depType) { |
| 690 | case SchedGraphEdge::CtrlDep: |
| 691 | os<< "Control Dep"; |
| 692 | break; |
| 693 | case SchedGraphEdge::ValueDep: |
| 694 | os<< "Reg Value " << val; |
| 695 | break; |
| 696 | case SchedGraphEdge::MemoryDep: |
| 697 | os<< "Memory Dep"; |
| 698 | break; |
| 699 | case SchedGraphEdge::MachineRegister: |
| 700 | os<< "Reg " << machineRegNum; |
| 701 | break; |
| 702 | case SchedGraphEdge::MachineResource: |
| 703 | os<<"Resource "<< resourceId; |
| 704 | break; |
| 705 | default: |
| 706 | assert(0); |
| 707 | break; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 708 | } |
| 709 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 710 | os << " : delay = " << minDelay << "\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 711 | } |
| 712 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 713 | void SchedGraphNode::print(std::ostream &os) const { |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 714 | os << std::string(8, ' ') |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 715 | << "Node " << ID << " : " |
| 716 | << "latency = " << latency << "\n" << std::string(12, ' '); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 717 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 718 | if (getMachineInstr() == NULL) |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 719 | os << "(Dummy node)\n"; |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 720 | else { |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 721 | os << *getMachineInstr() << "\n" << std::string(12, ' '); |
| 722 | os << inEdges.size() << " Incoming Edges:\n"; |
| 723 | for (unsigned i=0, N = inEdges.size(); i < N; i++) |
| 724 | os << std::string(16, ' ') << *inEdges[i]; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 725 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 726 | os << std::string(12, ' ') << outEdges.size() |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 727 | << " Outgoing Edges:\n"; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 728 | for (unsigned i=0, N= outEdges.size(); i < N; i++) |
| 729 | os << std::string(16, ' ') << *outEdges[i]; |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 730 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 731 | } |