Chris Lattner | b0cfa6d | 2002-08-09 18:55:18 +0000 | [diff] [blame] | 1 | //===- SchedGraph.cpp - Scheduling Graph Implementation -------------------===// |
John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file was developed by the LLVM research group and is distributed under |
| 6 | // the University of Illinois Open Source License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | b0cfa6d | 2002-08-09 18:55:18 +0000 | [diff] [blame] | 9 | // |
| 10 | // Scheduling graph based on SSA graph plus extra dependence edges capturing |
| 11 | // dependences due to machine resources (machine registers, CC registers, and |
| 12 | // any others). |
| 13 | // |
| 14 | //===----------------------------------------------------------------------===// |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 15 | |
Chris Lattner | 46cbff6 | 2001-09-14 16:56:32 +0000 | [diff] [blame] | 16 | #include "SchedGraph.h" |
Chris Lattner | 2fbfdcf | 2002-04-07 20:49:59 +0000 | [diff] [blame] | 17 | #include "llvm/Function.h" |
Chris Lattner | b00c582 | 2001-10-02 03:41:24 +0000 | [diff] [blame] | 18 | #include "llvm/iOther.h" |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 19 | #include "llvm/CodeGen/MachineCodeForInstruction.h" |
| 20 | #include "llvm/CodeGen/MachineFunction.h" |
| 21 | #include "llvm/Target/TargetInstrInfo.h" |
| 22 | #include "llvm/Target/TargetMachine.h" |
| 23 | #include "llvm/Target/TargetRegInfo.h" |
| 24 | #include "Support/STLExtras.h" |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 25 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 26 | namespace llvm { |
| 27 | |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 28 | //*********************** Internal Data Structures *************************/ |
| 29 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 30 | // The following two types need to be classes, not typedefs, so we can use |
| 31 | // opaque declarations in SchedGraph.h |
| 32 | // |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 33 | struct RefVec: public std::vector<std::pair<SchedGraphNode*, int> > { |
| 34 | typedef std::vector<std::pair<SchedGraphNode*,int> >::iterator iterator; |
| 35 | typedef |
| 36 | std::vector<std::pair<SchedGraphNode*,int> >::const_iterator const_iterator; |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 37 | }; |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 38 | |
Chris Lattner | 80c685f | 2001-10-13 06:51:01 +0000 | [diff] [blame] | 39 | struct RegToRefVecMap: public hash_map<int, RefVec> { |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 40 | typedef hash_map<int, RefVec>:: iterator iterator; |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 41 | typedef hash_map<int, RefVec>::const_iterator const_iterator; |
| 42 | }; |
| 43 | |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 44 | struct ValueToDefVecMap: public hash_map<const Value*, RefVec> { |
| 45 | typedef hash_map<const Value*, RefVec>:: iterator iterator; |
| 46 | typedef hash_map<const Value*, RefVec>::const_iterator const_iterator; |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 47 | }; |
| 48 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 49 | |
| 50 | // |
| 51 | // class SchedGraphNode |
| 52 | // |
| 53 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 54 | SchedGraphNode::SchedGraphNode(unsigned NID, MachineBasicBlock *mbb, |
| 55 | int indexInBB, const TargetMachine& Target) |
Tanya Lattner | 8dc9982 | 2003-08-28 15:30:40 +0000 | [diff] [blame] | 56 | : SchedGraphNodeCommon(NID,indexInBB), MBB(mbb), MI(mbb ? (*mbb)[indexInBB] : 0) { |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 57 | if (MI) { |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 58 | MachineOpCode mopCode = MI->getOpcode(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 59 | latency = Target.getInstrInfo().hasResultInterlock(mopCode) |
| 60 | ? Target.getInstrInfo().minLatency(mopCode) |
| 61 | : Target.getInstrInfo().maxLatency(mopCode); |
| 62 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 63 | } |
| 64 | |
John Criswell | c9afb49 | 2003-08-28 21:43:17 +0000 | [diff] [blame] | 65 | // |
| 66 | // Method: SchedGraphNode Destructor |
| 67 | // |
| 68 | // Description: |
| 69 | // Free memory allocated by the SchedGraphNode object. |
| 70 | // |
| 71 | // Notes: |
| 72 | // Do not delete the edges here. The base class will take care of that. |
| 73 | // Only handle subclass specific stuff here (where currently there is |
| 74 | // none). |
| 75 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 76 | SchedGraphNode::~SchedGraphNode() { |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 77 | } |
| 78 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 79 | // |
| 80 | // class SchedGraph |
| 81 | // |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 82 | SchedGraph::SchedGraph(MachineBasicBlock &mbb, const TargetMachine& target) |
| 83 | : MBB(mbb) { |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 84 | buildGraph(target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 85 | } |
| 86 | |
John Criswell | c9afb49 | 2003-08-28 21:43:17 +0000 | [diff] [blame] | 87 | // |
| 88 | // Method: SchedGraph Destructor |
| 89 | // |
| 90 | // Description: |
| 91 | // This method deletes memory allocated by the SchedGraph object. |
| 92 | // |
| 93 | // Notes: |
| 94 | // Do not delete the graphRoot or graphLeaf here. The base class handles |
| 95 | // that bit of work. |
| 96 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 97 | SchedGraph::~SchedGraph() { |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 98 | for (const_iterator I = begin(); I != end(); ++I) |
Chris Lattner | f3dd05c | 2002-04-09 05:15:33 +0000 | [diff] [blame] | 99 | delete I->second; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 100 | } |
| 101 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 102 | void SchedGraph::dump() const { |
Brian Gaeke | 0dc5753 | 2004-02-09 18:42:05 +0000 | [diff] [blame] | 103 | std::cerr << " Sched Graph for Basic Block: " |
| 104 | << MBB.getBasicBlock()->getName() |
| 105 | << " (" << MBB.getBasicBlock() << ")" |
| 106 | << "\n\n Actual Root nodes: "; |
| 107 | for (SchedGraphNodeCommon::const_iterator I = graphRoot->beginOutEdges(), |
| 108 | E = graphRoot->endOutEdges(); |
| 109 | I != E; ++I) { |
| 110 | std::cerr << (*I)->getSink ()->getNodeId (); |
| 111 | if (I + 1 != E) { std::cerr << ", "; } |
| 112 | } |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 113 | std::cerr << "\n Graph Nodes:\n"; |
Brian Gaeke | 0dc5753 | 2004-02-09 18:42:05 +0000 | [diff] [blame] | 114 | for (const_iterator I = begin(), E = end(); I != E; ++I) |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 115 | std::cerr << "\n" << *I->second; |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 116 | std::cerr << "\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 117 | } |
| 118 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 119 | void SchedGraph::addDummyEdges() { |
Brian Gaeke | 0dc5753 | 2004-02-09 18:42:05 +0000 | [diff] [blame] | 120 | assert(graphRoot->getNumOutEdges() == 0); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 121 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 122 | for (const_iterator I=begin(); I != end(); ++I) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 123 | SchedGraphNode* node = (*I).second; |
| 124 | assert(node != graphRoot && node != graphLeaf); |
| 125 | if (node->beginInEdges() == node->endInEdges()) |
| 126 | (void) new SchedGraphEdge(graphRoot, node, SchedGraphEdge::CtrlDep, |
| 127 | SchedGraphEdge::NonDataDep, 0); |
| 128 | if (node->beginOutEdges() == node->endOutEdges()) |
| 129 | (void) new SchedGraphEdge(node, graphLeaf, SchedGraphEdge::CtrlDep, |
| 130 | SchedGraphEdge::NonDataDep, 0); |
| 131 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 132 | } |
| 133 | |
| 134 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 135 | void SchedGraph::addCDEdges(const TerminatorInst* term, |
| 136 | const TargetMachine& target) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 137 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Chris Lattner | 0861b0c | 2002-02-03 07:29:45 +0000 | [diff] [blame] | 138 | MachineCodeForInstruction &termMvec = MachineCodeForInstruction::get(term); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 139 | |
| 140 | // Find the first branch instr in the sequence of machine instrs for term |
| 141 | // |
| 142 | unsigned first = 0; |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 143 | while (! mii.isBranch(termMvec[first]->getOpcode()) && |
| 144 | ! mii.isReturn(termMvec[first]->getOpcode())) |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 145 | ++first; |
| 146 | assert(first < termMvec.size() && |
Vikram S. Adve | acf0f70 | 2002-10-13 00:39:22 +0000 | [diff] [blame] | 147 | "No branch instructions for terminator? Ok, but weird!"); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 148 | if (first == termMvec.size()) |
| 149 | return; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 150 | |
Chris Lattner | b0cfa6d | 2002-08-09 18:55:18 +0000 | [diff] [blame] | 151 | SchedGraphNode* firstBrNode = getGraphNodeForInstr(termMvec[first]); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 152 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 153 | // Add CD edges from each instruction in the sequence to the |
| 154 | // *last preceding* branch instr. in the sequence |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 155 | // Use a latency of 0 because we only need to prevent out-of-order issue. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 156 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 157 | for (unsigned i = termMvec.size(); i > first+1; --i) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 158 | SchedGraphNode* toNode = getGraphNodeForInstr(termMvec[i-1]); |
| 159 | assert(toNode && "No node for instr generated for branch/ret?"); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 160 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 161 | for (unsigned j = i-1; j != 0; --j) |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 162 | if (mii.isBranch(termMvec[j-1]->getOpcode()) || |
| 163 | mii.isReturn(termMvec[j-1]->getOpcode())) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 164 | SchedGraphNode* brNode = getGraphNodeForInstr(termMvec[j-1]); |
| 165 | assert(brNode && "No node for instr generated for branch/ret?"); |
| 166 | (void) new SchedGraphEdge(brNode, toNode, SchedGraphEdge::CtrlDep, |
| 167 | SchedGraphEdge::NonDataDep, 0); |
| 168 | break; // only one incoming edge is enough |
| 169 | } |
| 170 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 171 | |
| 172 | // Add CD edges from each instruction preceding the first branch |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 173 | // to the first branch. Use a latency of 0 as above. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 174 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 175 | for (unsigned i = first; i != 0; --i) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 176 | SchedGraphNode* fromNode = getGraphNodeForInstr(termMvec[i-1]); |
| 177 | assert(fromNode && "No node for instr generated for branch?"); |
| 178 | (void) new SchedGraphEdge(fromNode, firstBrNode, SchedGraphEdge::CtrlDep, |
| 179 | SchedGraphEdge::NonDataDep, 0); |
| 180 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 181 | |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 182 | // Now add CD edges to the first branch instruction in the sequence from |
| 183 | // all preceding instructions in the basic block. Use 0 latency again. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 184 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 185 | for (unsigned i=0, N=MBB.size(); i < N; i++) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 186 | if (MBB[i] == termMvec[first]) // reached the first branch |
| 187 | break; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 188 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 189 | SchedGraphNode* fromNode = this->getGraphNodeForInstr(MBB[i]); |
| 190 | if (fromNode == NULL) |
| 191 | continue; // dummy instruction, e.g., PHI |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 192 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 193 | (void) new SchedGraphEdge(fromNode, firstBrNode, |
| 194 | SchedGraphEdge::CtrlDep, |
| 195 | SchedGraphEdge::NonDataDep, 0); |
| 196 | |
| 197 | // If we find any other machine instructions (other than due to |
| 198 | // the terminator) that also have delay slots, add an outgoing edge |
| 199 | // from the instruction to the instructions in the delay slots. |
| 200 | // |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 201 | unsigned d = mii.getNumDelaySlots(MBB[i]->getOpcode()); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 202 | assert(i+d < N && "Insufficient delay slots for instruction?"); |
| 203 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 204 | for (unsigned j=1; j <= d; j++) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 205 | SchedGraphNode* toNode = this->getGraphNodeForInstr(MBB[i+j]); |
| 206 | assert(toNode && "No node for machine instr in delay slot?"); |
| 207 | (void) new SchedGraphEdge(fromNode, toNode, |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 208 | SchedGraphEdge::CtrlDep, |
| 209 | SchedGraphEdge::NonDataDep, 0); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 210 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 211 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 212 | } |
| 213 | |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 214 | static const int SG_LOAD_REF = 0; |
| 215 | static const int SG_STORE_REF = 1; |
| 216 | static const int SG_CALL_REF = 2; |
| 217 | |
| 218 | static const unsigned int SG_DepOrderArray[][3] = { |
| 219 | { SchedGraphEdge::NonDataDep, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 220 | SchedGraphEdge::AntiDep, |
| 221 | SchedGraphEdge::AntiDep }, |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 222 | { SchedGraphEdge::TrueDep, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 223 | SchedGraphEdge::OutputDep, |
| 224 | SchedGraphEdge::TrueDep | SchedGraphEdge::OutputDep }, |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 225 | { SchedGraphEdge::TrueDep, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 226 | SchedGraphEdge::AntiDep | SchedGraphEdge::OutputDep, |
| 227 | SchedGraphEdge::TrueDep | SchedGraphEdge::AntiDep |
| 228 | | SchedGraphEdge::OutputDep } |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 229 | }; |
| 230 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 231 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 232 | // Add a dependence edge between every pair of machine load/store/call |
| 233 | // instructions, where at least one is a store or a call. |
| 234 | // Use latency 1 just to ensure that memory operations are ordered; |
| 235 | // latency does not otherwise matter (true dependences enforce that). |
| 236 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 237 | void SchedGraph::addMemEdges(const std::vector<SchedGraphNode*>& memNodeVec, |
| 238 | const TargetMachine& target) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 239 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 240 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 241 | // Instructions in memNodeVec are in execution order within the basic block, |
| 242 | // so simply look at all pairs <memNodeVec[i], memNodeVec[j: j > i]>. |
| 243 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 244 | for (unsigned im=0, NM=memNodeVec.size(); im < NM; im++) { |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 245 | MachineOpCode fromOpCode = memNodeVec[im]->getOpcode(); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 246 | int fromType = (mii.isCall(fromOpCode)? SG_CALL_REF |
| 247 | : (mii.isLoad(fromOpCode)? SG_LOAD_REF |
| 248 | : SG_STORE_REF)); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 249 | for (unsigned jm=im+1; jm < NM; jm++) { |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 250 | MachineOpCode toOpCode = memNodeVec[jm]->getOpcode(); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 251 | int toType = (mii.isCall(toOpCode)? SG_CALL_REF |
| 252 | : (mii.isLoad(toOpCode)? SG_LOAD_REF |
| 253 | : SG_STORE_REF)); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 254 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 255 | if (fromType != SG_LOAD_REF || toType != SG_LOAD_REF) |
| 256 | (void) new SchedGraphEdge(memNodeVec[im], memNodeVec[jm], |
| 257 | SchedGraphEdge::MemoryDep, |
| 258 | SG_DepOrderArray[fromType][toType], 1); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 259 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 260 | } |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 261 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 262 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 263 | // Add edges from/to CC reg instrs to/from call instrs. |
| 264 | // Essentially this prevents anything that sets or uses a CC reg from being |
| 265 | // reordered w.r.t. a call. |
| 266 | // Use a latency of 0 because we only need to prevent out-of-order issue, |
| 267 | // like with control dependences. |
| 268 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 269 | void SchedGraph::addCallDepEdges(const std::vector<SchedGraphNode*>& callDepNodeVec, |
| 270 | const TargetMachine& target) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 271 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 272 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 273 | // Instructions in memNodeVec are in execution order within the basic block, |
| 274 | // so simply look at all pairs <memNodeVec[i], memNodeVec[j: j > i]>. |
| 275 | // |
| 276 | for (unsigned ic=0, NC=callDepNodeVec.size(); ic < NC; ic++) |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 277 | if (mii.isCall(callDepNodeVec[ic]->getOpcode())) { |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 278 | // Add SG_CALL_REF edges from all preds to this instruction. |
| 279 | for (unsigned jc=0; jc < ic; jc++) |
| 280 | (void) new SchedGraphEdge(callDepNodeVec[jc], callDepNodeVec[ic], |
| 281 | SchedGraphEdge::MachineRegister, |
| 282 | MachineIntRegsRID, 0); |
| 283 | |
| 284 | // And do the same from this instruction to all successors. |
| 285 | for (unsigned jc=ic+1; jc < NC; jc++) |
| 286 | (void) new SchedGraphEdge(callDepNodeVec[ic], callDepNodeVec[jc], |
| 287 | SchedGraphEdge::MachineRegister, |
| 288 | MachineIntRegsRID, 0); |
| 289 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 290 | |
| 291 | #ifdef CALL_DEP_NODE_VEC_CANNOT_WORK |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 292 | // Find the call instruction nodes and put them in a vector. |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 293 | std::vector<SchedGraphNode*> callNodeVec; |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 294 | for (unsigned im=0, NM=memNodeVec.size(); im < NM; im++) |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 295 | if (mii.isCall(memNodeVec[im]->getOpcode())) |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 296 | callNodeVec.push_back(memNodeVec[im]); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 297 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 298 | // Now walk the entire basic block, looking for CC instructions *and* |
| 299 | // call instructions, and keep track of the order of the instructions. |
| 300 | // Use the call node vec to quickly find earlier and later call nodes |
| 301 | // relative to the current CC instruction. |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 302 | // |
| 303 | int lastCallNodeIdx = -1; |
| 304 | for (unsigned i=0, N=bbMvec.size(); i < N; i++) |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 305 | if (mii.isCall(bbMvec[i]->getOpcode())) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 306 | ++lastCallNodeIdx; |
| 307 | for ( ; lastCallNodeIdx < (int)callNodeVec.size(); ++lastCallNodeIdx) |
| 308 | if (callNodeVec[lastCallNodeIdx]->getMachineInstr() == bbMvec[i]) |
| 309 | break; |
| 310 | assert(lastCallNodeIdx < (int)callNodeVec.size() && "Missed Call?"); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 311 | } |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 312 | else if (mii.isCCInstr(bbMvec[i]->getOpcode())) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 313 | // Add incoming/outgoing edges from/to preceding/later calls |
| 314 | SchedGraphNode* ccNode = this->getGraphNodeForInstr(bbMvec[i]); |
| 315 | int j=0; |
| 316 | for ( ; j <= lastCallNodeIdx; j++) |
| 317 | (void) new SchedGraphEdge(callNodeVec[j], ccNode, |
| 318 | MachineCCRegsRID, 0); |
| 319 | for ( ; j < (int) callNodeVec.size(); j++) |
| 320 | (void) new SchedGraphEdge(ccNode, callNodeVec[j], |
| 321 | MachineCCRegsRID, 0); |
| 322 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 323 | #endif |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 324 | } |
| 325 | |
| 326 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 327 | void SchedGraph::addMachineRegEdges(RegToRefVecMap& regToRefVecMap, |
| 328 | const TargetMachine& target) { |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 329 | // This code assumes that two registers with different numbers are |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 330 | // not aliased! |
| 331 | // |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 332 | for (RegToRefVecMap::iterator I = regToRefVecMap.begin(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 333 | I != regToRefVecMap.end(); ++I) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 334 | int regNum = (*I).first; |
| 335 | RefVec& regRefVec = (*I).second; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 336 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 337 | // regRefVec is ordered by control flow order in the basic block |
| 338 | for (unsigned i=0; i < regRefVec.size(); ++i) { |
| 339 | SchedGraphNode* node = regRefVec[i].first; |
| 340 | unsigned int opNum = regRefVec[i].second; |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 341 | const MachineOperand& mop = |
| 342 | node->getMachineInstr()->getExplOrImplOperand(opNum); |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 343 | bool isDef = mop.isDef() && !mop.isUse(); |
| 344 | bool isDefAndUse = mop.isDef() && mop.isUse(); |
Vikram S. Adve | 0baf1c0 | 2002-07-08 22:59:23 +0000 | [diff] [blame] | 345 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 346 | for (unsigned p=0; p < i; ++p) { |
| 347 | SchedGraphNode* prevNode = regRefVec[p].first; |
| 348 | if (prevNode != node) { |
| 349 | unsigned int prevOpNum = regRefVec[p].second; |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 350 | const MachineOperand& prevMop = |
| 351 | prevNode->getMachineInstr()->getExplOrImplOperand(prevOpNum); |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 352 | bool prevIsDef = prevMop.isDef() && !prevMop.isUse(); |
| 353 | bool prevIsDefAndUse = prevMop.isDef() && prevMop.isUse(); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 354 | if (isDef) { |
| 355 | if (prevIsDef) |
| 356 | new SchedGraphEdge(prevNode, node, regNum, |
| 357 | SchedGraphEdge::OutputDep); |
| 358 | if (!prevIsDef || prevIsDefAndUse) |
| 359 | new SchedGraphEdge(prevNode, node, regNum, |
| 360 | SchedGraphEdge::AntiDep); |
| 361 | } |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 362 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 363 | if (prevIsDef) |
| 364 | if (!isDef || isDefAndUse) |
| 365 | new SchedGraphEdge(prevNode, node, regNum, |
| 366 | SchedGraphEdge::TrueDep); |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 367 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 368 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 369 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 370 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 371 | } |
| 372 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 373 | |
Vikram S. Adve | 0baf1c0 | 2002-07-08 22:59:23 +0000 | [diff] [blame] | 374 | // Adds dependences to/from refNode from/to all other defs |
| 375 | // in the basic block. refNode may be a use, a def, or both. |
| 376 | // We do not consider other uses because we are not building use-use deps. |
| 377 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 378 | void SchedGraph::addEdgesForValue(SchedGraphNode* refNode, |
| 379 | const RefVec& defVec, |
| 380 | const Value* defValue, |
| 381 | bool refNodeIsDef, |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 382 | bool refNodeIsUse, |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 383 | const TargetMachine& target) { |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 384 | // Add true or output dep edges from all def nodes before refNode in BB. |
| 385 | // Add anti or output dep edges to all def nodes after refNode. |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 386 | for (RefVec::const_iterator I=defVec.begin(), E=defVec.end(); I != E; ++I) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 387 | if ((*I).first == refNode) |
| 388 | continue; // Dont add any self-loops |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 389 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 390 | if ((*I).first->getOrigIndexInBB() < refNode->getOrigIndexInBB()) { |
| 391 | // (*).first is before refNode |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 392 | if (refNodeIsDef && !refNodeIsUse) |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 393 | (void) new SchedGraphEdge((*I).first, refNode, defValue, |
| 394 | SchedGraphEdge::OutputDep); |
| 395 | if (refNodeIsUse) |
| 396 | (void) new SchedGraphEdge((*I).first, refNode, defValue, |
| 397 | SchedGraphEdge::TrueDep); |
| 398 | } else { |
| 399 | // (*).first is after refNode |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 400 | if (refNodeIsDef && !refNodeIsUse) |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 401 | (void) new SchedGraphEdge(refNode, (*I).first, defValue, |
| 402 | SchedGraphEdge::OutputDep); |
| 403 | if (refNodeIsUse) |
| 404 | (void) new SchedGraphEdge(refNode, (*I).first, defValue, |
| 405 | SchedGraphEdge::AntiDep); |
Vikram S. Adve | 200a435 | 2001-11-12 18:53:43 +0000 | [diff] [blame] | 406 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 407 | } |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 408 | } |
| 409 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 410 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 411 | void SchedGraph::addEdgesForInstruction(const MachineInstr& MI, |
| 412 | const ValueToDefVecMap& valueToDefVecMap, |
| 413 | const TargetMachine& target) { |
Chris Lattner | 133f079 | 2002-10-28 04:45:29 +0000 | [diff] [blame] | 414 | SchedGraphNode* node = getGraphNodeForInstr(&MI); |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 415 | if (node == NULL) |
| 416 | return; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 417 | |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 418 | // Add edges for all operands of the machine instruction. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 419 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 420 | for (unsigned i = 0, numOps = MI.getNumOperands(); i != numOps; ++i) { |
| 421 | switch (MI.getOperand(i).getType()) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 422 | case MachineOperand::MO_VirtualRegister: |
| 423 | case MachineOperand::MO_CCRegister: |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 424 | if (const Value* srcI = MI.getOperand(i).getVRegValue()) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 425 | ValueToDefVecMap::const_iterator I = valueToDefVecMap.find(srcI); |
| 426 | if (I != valueToDefVecMap.end()) |
| 427 | addEdgesForValue(node, I->second, srcI, |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 428 | MI.getOperand(i).isDef(), MI.getOperand(i).isUse(), |
| 429 | target); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 430 | } |
| 431 | break; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 432 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 433 | case MachineOperand::MO_MachineRegister: |
| 434 | break; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 435 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 436 | case MachineOperand::MO_SignExtendedImmed: |
| 437 | case MachineOperand::MO_UnextendedImmed: |
| 438 | case MachineOperand::MO_PCRelativeDisp: |
Misha Brukman | e2bf0a2 | 2003-11-06 00:04:11 +0000 | [diff] [blame] | 439 | case MachineOperand::MO_ConstantPoolIndex: |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 440 | break; // nothing to do for immediate fields |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 441 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 442 | default: |
| 443 | assert(0 && "Unknown machine operand type in SchedGraph builder"); |
| 444 | break; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 445 | } |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 446 | } |
Vikram S. Adve | 8d0ffa5 | 2001-10-11 04:22:45 +0000 | [diff] [blame] | 447 | |
| 448 | // Add edges for values implicitly used by the machine instruction. |
| 449 | // Examples include function arguments to a Call instructions or the return |
| 450 | // value of a Ret instruction. |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 451 | // |
Chris Lattner | 133f079 | 2002-10-28 04:45:29 +0000 | [diff] [blame] | 452 | for (unsigned i=0, N=MI.getNumImplicitRefs(); i < N; ++i) |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 453 | if (MI.getImplicitOp(i).isUse()) |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 454 | if (const Value* srcI = MI.getImplicitRef(i)) { |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 455 | ValueToDefVecMap::const_iterator I = valueToDefVecMap.find(srcI); |
| 456 | if (I != valueToDefVecMap.end()) |
| 457 | addEdgesForValue(node, I->second, srcI, |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 458 | MI.getImplicitOp(i).isDef(), |
| 459 | MI.getImplicitOp(i).isUse(), target); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 460 | } |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 461 | } |
| 462 | |
| 463 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 464 | void SchedGraph::findDefUseInfoAtInstr(const TargetMachine& target, |
| 465 | SchedGraphNode* node, |
| 466 | std::vector<SchedGraphNode*>& memNodeVec, |
| 467 | std::vector<SchedGraphNode*>& callDepNodeVec, |
| 468 | RegToRefVecMap& regToRefVecMap, |
| 469 | ValueToDefVecMap& valueToDefVecMap) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 470 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 471 | |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 472 | MachineOpCode opCode = node->getOpcode(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 473 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 474 | if (mii.isCall(opCode) || mii.isCCInstr(opCode)) |
| 475 | callDepNodeVec.push_back(node); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 476 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 477 | if (mii.isLoad(opCode) || mii.isStore(opCode) || mii.isCall(opCode)) |
| 478 | memNodeVec.push_back(node); |
| 479 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 480 | // Collect the register references and value defs. for explicit operands |
| 481 | // |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 482 | const MachineInstr& MI = *node->getMachineInstr(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 483 | for (int i=0, numOps = (int) MI.getNumOperands(); i < numOps; i++) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 484 | const MachineOperand& mop = MI.getOperand(i); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 485 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 486 | // if this references a register other than the hardwired |
| 487 | // "zero" register, record the reference. |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 488 | if (mop.hasAllocatedReg()) { |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 489 | int regNum = mop.getAllocatedRegNum(); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 490 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 491 | // If this is not a dummy zero register, record the reference in order |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 492 | if (regNum != target.getRegInfo().getZeroRegNum()) |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 493 | regToRefVecMap[mop.getAllocatedRegNum()] |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 494 | .push_back(std::make_pair(node, i)); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 495 | |
| 496 | // If this is a volatile register, add the instruction to callDepVec |
| 497 | // (only if the node is not already on the callDepVec!) |
| 498 | if (callDepNodeVec.size() == 0 || callDepNodeVec.back() != node) |
| 499 | { |
| 500 | unsigned rcid; |
| 501 | int regInClass = target.getRegInfo().getClassRegNum(regNum, rcid); |
| 502 | if (target.getRegInfo().getMachineRegClass(rcid) |
| 503 | ->isRegVolatile(regInClass)) |
| 504 | callDepNodeVec.push_back(node); |
| 505 | } |
| 506 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 507 | continue; // nothing more to do |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 508 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 509 | |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 510 | // ignore all other non-def operands |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 511 | if (!MI.getOperand(i).isDef()) |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 512 | continue; |
| 513 | |
| 514 | // We must be defining a value. |
| 515 | assert((mop.getType() == MachineOperand::MO_VirtualRegister || |
| 516 | mop.getType() == MachineOperand::MO_CCRegister) |
| 517 | && "Do not expect any other kind of operand to be defined!"); |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 518 | assert(mop.getVRegValue() != NULL && "Null value being defined?"); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 519 | |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 520 | valueToDefVecMap[mop.getVRegValue()].push_back(std::make_pair(node, i)); |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 521 | } |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 522 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 523 | // |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 524 | // Collect value defs. for implicit operands. They may have allocated |
| 525 | // physical registers also. |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 526 | // |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 527 | for (unsigned i=0, N = MI.getNumImplicitRefs(); i != N; ++i) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 528 | const MachineOperand& mop = MI.getImplicitOp(i); |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 529 | if (mop.hasAllocatedReg()) { |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 530 | int regNum = mop.getAllocatedRegNum(); |
| 531 | if (regNum != target.getRegInfo().getZeroRegNum()) |
| 532 | regToRefVecMap[mop.getAllocatedRegNum()] |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 533 | .push_back(std::make_pair(node, i + MI.getNumOperands())); |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 534 | continue; // nothing more to do |
| 535 | } |
| 536 | |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 537 | if (mop.isDef()) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 538 | assert(MI.getImplicitRef(i) != NULL && "Null value being defined?"); |
Alkis Evlogimenos | 4d7af65 | 2003-12-14 13:24:17 +0000 | [diff] [blame] | 539 | valueToDefVecMap[MI.getImplicitRef(i)].push_back( |
| 540 | std::make_pair(node, -i)); |
Vikram S. Adve | 74d15d3 | 2003-07-02 01:16:01 +0000 | [diff] [blame] | 541 | } |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 542 | } |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 543 | } |
| 544 | |
| 545 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 546 | void SchedGraph::buildNodesForBB(const TargetMachine& target, |
| 547 | MachineBasicBlock& MBB, |
| 548 | std::vector<SchedGraphNode*>& memNodeVec, |
| 549 | std::vector<SchedGraphNode*>& callDepNodeVec, |
| 550 | RegToRefVecMap& regToRefVecMap, |
| 551 | ValueToDefVecMap& valueToDefVecMap) { |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 552 | const TargetInstrInfo& mii = target.getInstrInfo(); |
Vikram S. Adve | 5b43af9 | 2001-11-11 01:23:27 +0000 | [diff] [blame] | 553 | |
| 554 | // Build graph nodes for each VM instruction and gather def/use info. |
| 555 | // Do both those together in a single pass over all machine instructions. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 556 | for (unsigned i=0; i < MBB.size(); i++) |
Brian Gaeke | 918cdd4 | 2004-02-12 01:34:05 +0000 | [diff] [blame^] | 557 | if (!mii.isDummyPhiInstr(MBB[i]->getOpcode())) { |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 558 | SchedGraphNode* node = new SchedGraphNode(getNumNodes(), &MBB, i, target); |
| 559 | noteGraphNodeForInstr(MBB[i], node); |
| 560 | |
| 561 | // Remember all register references and value defs |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 562 | findDefUseInfoAtInstr(target, node, memNodeVec, callDepNodeVec, |
| 563 | regToRefVecMap, valueToDefVecMap); |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 564 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 565 | } |
| 566 | |
| 567 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 568 | void SchedGraph::buildGraph(const TargetMachine& target) { |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 569 | // Use this data structure to note all machine operands that compute |
| 570 | // ordinary LLVM values. These must be computed defs (i.e., instructions). |
| 571 | // Note that there may be multiple machine instructions that define |
| 572 | // each Value. |
| 573 | ValueToDefVecMap valueToDefVecMap; |
| 574 | |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 575 | // Use this data structure to note all memory instructions. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 576 | // We use this to add memory dependence edges without a second full walk. |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 577 | std::vector<SchedGraphNode*> memNodeVec; |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 578 | |
| 579 | // Use this data structure to note all instructions that access physical |
| 580 | // registers that can be modified by a call (including call instructions) |
| 581 | std::vector<SchedGraphNode*> callDepNodeVec; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 582 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 583 | // Use this data structure to note any uses or definitions of |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 584 | // machine registers so we can add edges for those later without |
| 585 | // extra passes over the nodes. |
| 586 | // The vector holds an ordered list of references to the machine reg, |
| 587 | // ordered according to control-flow order. This only works for a |
| 588 | // single basic block, hence the assertion. Each reference is identified |
| 589 | // by the pair: <node, operand-number>. |
| 590 | // |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 591 | RegToRefVecMap regToRefVecMap; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 592 | |
| 593 | // Make a dummy root node. We'll add edges to the real roots later. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 594 | graphRoot = new SchedGraphNode(0, NULL, -1, target); |
| 595 | graphLeaf = new SchedGraphNode(1, NULL, -1, target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 596 | |
| 597 | //---------------------------------------------------------------- |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 598 | // First add nodes for all the machine instructions in the basic block |
| 599 | // because this greatly simplifies identifying which edges to add. |
| 600 | // Do this one VM instruction at a time since the SchedGraphNode needs that. |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 601 | // Also, remember the load/store instructions to add memory deps later. |
| 602 | //---------------------------------------------------------------- |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 603 | |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 604 | buildNodesForBB(target, MBB, memNodeVec, callDepNodeVec, |
| 605 | regToRefVecMap, valueToDefVecMap); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 606 | |
| 607 | //---------------------------------------------------------------- |
Vikram S. Adve | 5316f8f | 2001-09-30 23:36:58 +0000 | [diff] [blame] | 608 | // Now add edges for the following (all are incoming edges except (4)): |
| 609 | // (1) operands of the machine instruction, including hidden operands |
| 610 | // (2) machine register dependences |
| 611 | // (3) memory load/store dependences |
| 612 | // (3) other resource dependences for the machine instruction, if any |
| 613 | // (4) output dependences when multiple machine instructions define the |
| 614 | // same value; all must have been generated from a single VM instrn |
| 615 | // (5) control dependences to branch instructions generated for the |
| 616 | // terminator instruction of the BB. Because of delay slots and |
| 617 | // 2-way conditional branches, multiple CD edges are needed |
| 618 | // (see addCDEdges for details). |
| 619 | // Also, note any uses or defs of machine registers. |
| 620 | // |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 621 | //---------------------------------------------------------------- |
| 622 | |
| 623 | // First, add edges to the terminator instruction of the basic block. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 624 | this->addCDEdges(MBB.getBasicBlock()->getTerminator(), target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 625 | |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 626 | // Then add memory dep edges: store->load, load->store, and store->store. |
| 627 | // Call instructions are treated as both load and store. |
Vikram S. Adve | e64574c | 2001-11-08 05:20:23 +0000 | [diff] [blame] | 628 | this->addMemEdges(memNodeVec, target); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 629 | |
| 630 | // Then add edges between call instructions and CC set/use instructions |
Vikram S. Adve | 7952d60 | 2003-05-31 07:37:05 +0000 | [diff] [blame] | 631 | this->addCallDepEdges(callDepNodeVec, target); |
Vikram S. Adve | a93bbac | 2001-10-28 21:43:33 +0000 | [diff] [blame] | 632 | |
Vikram S. Adve | c352d2c | 2001-11-05 04:04:23 +0000 | [diff] [blame] | 633 | // Then add incoming def-use (SSA) edges for each machine instruction. |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 634 | for (unsigned i=0, N=MBB.size(); i < N; i++) |
| 635 | addEdgesForInstruction(*MBB[i], valueToDefVecMap, target); |
Brian Gaeke | 0dc5753 | 2004-02-09 18:42:05 +0000 | [diff] [blame] | 636 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 637 | // Then add edges for dependences on machine registers |
| 638 | this->addMachineRegEdges(regToRefVecMap, target); |
Brian Gaeke | 0dc5753 | 2004-02-09 18:42:05 +0000 | [diff] [blame] | 639 | |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 640 | // Finally, add edges from the dummy root and to dummy leaf |
| 641 | this->addDummyEdges(); |
| 642 | } |
| 643 | |
| 644 | |
| 645 | // |
| 646 | // class SchedGraphSet |
| 647 | // |
Chris Lattner | 2fbfdcf | 2002-04-07 20:49:59 +0000 | [diff] [blame] | 648 | SchedGraphSet::SchedGraphSet(const Function* _function, |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 649 | const TargetMachine& target) : |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 650 | function(_function) { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 651 | buildGraphsForMethod(function, target); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 652 | } |
| 653 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 654 | SchedGraphSet::~SchedGraphSet() { |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 655 | // delete all the graphs |
Chris Lattner | f3dd05c | 2002-04-09 05:15:33 +0000 | [diff] [blame] | 656 | for(iterator I = begin(), E = end(); I != E; ++I) |
| 657 | delete *I; // destructor is a friend |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 658 | } |
| 659 | |
| 660 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 661 | void SchedGraphSet::dump() const { |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 662 | std::cerr << "======== Sched graphs for function `" << function->getName() |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 663 | << "' ========\n\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 664 | |
| 665 | for (const_iterator I=begin(); I != end(); ++I) |
Vikram S. Adve | cf8a98f | 2002-03-24 03:40:59 +0000 | [diff] [blame] | 666 | (*I)->dump(); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 667 | |
Tanya Lattner | b6489f3 | 2003-08-25 22:42:20 +0000 | [diff] [blame] | 668 | std::cerr << "\n====== End graphs for function `" << function->getName() |
Misha Brukman | c2312df | 2003-05-22 21:24:35 +0000 | [diff] [blame] | 669 | << "' ========\n\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 670 | } |
| 671 | |
| 672 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 673 | void SchedGraphSet::buildGraphsForMethod(const Function *F, |
| 674 | const TargetMachine& target) { |
Chris Lattner | fb3a0aed | 2002-10-28 18:50:08 +0000 | [diff] [blame] | 675 | MachineFunction &MF = MachineFunction::get(F); |
| 676 | for (MachineFunction::iterator I = MF.begin(), E = MF.end(); I != E; ++I) |
| 677 | addGraph(new SchedGraph(*I, target)); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 678 | } |
| 679 | |
| 680 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 681 | void SchedGraphEdge::print(std::ostream &os) const { |
| 682 | os << "edge [" << src->getNodeId() << "] -> [" |
| 683 | << sink->getNodeId() << "] : "; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 684 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 685 | switch(depType) { |
| 686 | case SchedGraphEdge::CtrlDep: |
| 687 | os<< "Control Dep"; |
| 688 | break; |
| 689 | case SchedGraphEdge::ValueDep: |
| 690 | os<< "Reg Value " << val; |
| 691 | break; |
| 692 | case SchedGraphEdge::MemoryDep: |
| 693 | os<< "Memory Dep"; |
| 694 | break; |
| 695 | case SchedGraphEdge::MachineRegister: |
| 696 | os<< "Reg " << machineRegNum; |
| 697 | break; |
| 698 | case SchedGraphEdge::MachineResource: |
| 699 | os<<"Resource "<< resourceId; |
| 700 | break; |
| 701 | default: |
| 702 | assert(0); |
| 703 | break; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 704 | } |
| 705 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 706 | os << " : delay = " << minDelay << "\n"; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 707 | } |
| 708 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 709 | void SchedGraphNode::print(std::ostream &os) const { |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 710 | os << std::string(8, ' ') |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 711 | << "Node " << ID << " : " |
| 712 | << "latency = " << latency << "\n" << std::string(12, ' '); |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 713 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 714 | if (getMachineInstr() == NULL) |
Chris Lattner | 697954c | 2002-01-20 22:54:45 +0000 | [diff] [blame] | 715 | os << "(Dummy node)\n"; |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 716 | else { |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 717 | os << *getMachineInstr() << "\n" << std::string(12, ' '); |
| 718 | os << inEdges.size() << " Incoming Edges:\n"; |
| 719 | for (unsigned i=0, N = inEdges.size(); i < N; i++) |
| 720 | os << std::string(16, ' ') << *inEdges[i]; |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 721 | |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 722 | os << std::string(12, ' ') << outEdges.size() |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 723 | << " Outgoing Edges:\n"; |
Tanya Lattner | c50ee55 | 2003-08-27 02:42:58 +0000 | [diff] [blame] | 724 | for (unsigned i=0, N= outEdges.size(); i < N; i++) |
| 725 | os << std::string(16, ' ') << *outEdges[i]; |
Misha Brukman | 6b77ec4 | 2003-05-22 21:49:18 +0000 | [diff] [blame] | 726 | } |
Vikram S. Adve | 78ef139 | 2001-08-28 23:06:02 +0000 | [diff] [blame] | 727 | } |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 728 | |
| 729 | } // End llvm namespace |