Dan Gohman | f90d3b0 | 2008-12-08 17:50:35 +0000 | [diff] [blame] | 1 | //===---- ScheduleDAGInstrs.cpp - MachineInstr Rescheduling ---------------===// |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 10 | /// \file This implements the ScheduleDAGInstrs class, which implements |
| 11 | /// re-scheduling of MachineInstrs. |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 15 | #include "llvm/CodeGen/ScheduleDAGInstrs.h" |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/IntEqClasses.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 17 | #include "llvm/ADT/SmallPtrSet.h" |
| 18 | #include "llvm/ADT/SmallSet.h" |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 19 | #include "llvm/Analysis/AliasAnalysis.h" |
Dan Gohman | a4fcd24 | 2010-12-15 20:02:24 +0000 | [diff] [blame] | 20 | #include "llvm/Analysis/ValueTracking.h" |
Matthias Braun | d4f6409 | 2016-01-20 00:23:32 +0000 | [diff] [blame] | 21 | #include "llvm/CodeGen/LiveIntervalAnalysis.h" |
Dan Gohman | dddc1ac | 2008-12-16 03:25:46 +0000 | [diff] [blame] | 22 | #include "llvm/CodeGen/MachineFunctionPass.h" |
Arnold Schwaighofer | f54b73d | 2015-05-08 23:52:00 +0000 | [diff] [blame] | 23 | #include "llvm/CodeGen/MachineFrameInfo.h" |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 24 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Dan Gohman | 48b185d | 2009-09-25 20:36:54 +0000 | [diff] [blame] | 25 | #include "llvm/CodeGen/MachineMemOperand.h" |
Dan Gohman | dddc1ac | 2008-12-16 03:25:46 +0000 | [diff] [blame] | 26 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
Dan Gohman | 3aab10b | 2008-12-04 01:35:46 +0000 | [diff] [blame] | 27 | #include "llvm/CodeGen/PseudoSourceValue.h" |
Andrew Trick | 88517f6 | 2012-06-06 19:47:35 +0000 | [diff] [blame] | 28 | #include "llvm/CodeGen/RegisterPressure.h" |
Andrew Trick | cd1c2f9 | 2012-11-28 05:13:24 +0000 | [diff] [blame] | 29 | #include "llvm/CodeGen/ScheduleDFS.h" |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 30 | #include "llvm/IR/Function.h" |
| 31 | #include "llvm/IR/Type.h" |
Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 32 | #include "llvm/IR/Operator.h" |
Andrew Trick | da01ba3 | 2012-05-15 18:59:41 +0000 | [diff] [blame] | 33 | #include "llvm/Support/CommandLine.h" |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 34 | #include "llvm/Support/Debug.h" |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 35 | #include "llvm/Support/Format.h" |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 36 | #include "llvm/Support/raw_ostream.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 37 | #include "llvm/Target/TargetInstrInfo.h" |
| 38 | #include "llvm/Target/TargetMachine.h" |
| 39 | #include "llvm/Target/TargetRegisterInfo.h" |
| 40 | #include "llvm/Target/TargetSubtargetInfo.h" |
Andrew Trick | c01b004 | 2013-08-23 17:48:43 +0000 | [diff] [blame] | 41 | |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 42 | using namespace llvm; |
| 43 | |
Chandler Carruth | 1b9dde0 | 2014-04-22 02:02:50 +0000 | [diff] [blame] | 44 | #define DEBUG_TYPE "misched" |
| 45 | |
Andrew Trick | da01ba3 | 2012-05-15 18:59:41 +0000 | [diff] [blame] | 46 | static cl::opt<bool> EnableAASchedMI("enable-aa-sched-mi", cl::Hidden, |
| 47 | cl::ZeroOrMore, cl::init(false), |
Jonas Paulsson | bf408bb | 2015-01-07 13:20:57 +0000 | [diff] [blame] | 48 | cl::desc("Enable use of AA during MI DAG construction")); |
Andrew Trick | da01ba3 | 2012-05-15 18:59:41 +0000 | [diff] [blame] | 49 | |
Hal Finkel | dbebb52 | 2014-01-25 19:24:54 +0000 | [diff] [blame] | 50 | static cl::opt<bool> UseTBAA("use-tbaa-in-sched-mi", cl::Hidden, |
Jonas Paulsson | bf408bb | 2015-01-07 13:20:57 +0000 | [diff] [blame] | 51 | cl::init(true), cl::desc("Enable use of TBAA during MI DAG construction")); |
Hal Finkel | dbebb52 | 2014-01-25 19:24:54 +0000 | [diff] [blame] | 52 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 53 | // Note: the two options below might be used in tuning compile time vs |
| 54 | // output quality. Setting HugeRegion so large that it will never be |
| 55 | // reached means best-effort, but may be slow. |
| 56 | |
| 57 | // When Stores and Loads maps (or NonAliasStores and NonAliasLoads) |
| 58 | // together hold this many SUs, a reduction of maps will be done. |
| 59 | static cl::opt<unsigned> HugeRegion("dag-maps-huge-region", cl::Hidden, |
| 60 | cl::init(1000), cl::desc("The limit to use while constructing the DAG " |
| 61 | "prior to scheduling, at which point a trade-off " |
| 62 | "is made to avoid excessive compile time.")); |
| 63 | |
Mehdi Amini | 59ae854 | 2016-04-16 04:58:30 +0000 | [diff] [blame] | 64 | static cl::opt<unsigned> ReductionSize( |
| 65 | "dag-maps-reduction-size", cl::Hidden, |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 66 | cl::desc("A huge scheduling region will have maps reduced by this many " |
Mehdi Amini | 59ae854 | 2016-04-16 04:58:30 +0000 | [diff] [blame] | 67 | "nodes at a time. Defaults to HugeRegion / 2.")); |
| 68 | |
| 69 | static unsigned getReductionSize() { |
| 70 | // Always reduce a huge region with half of the elements, except |
| 71 | // when user sets this number explicitly. |
| 72 | if (ReductionSize.getNumOccurrences() == 0) |
| 73 | return HugeRegion / 2; |
| 74 | return ReductionSize; |
| 75 | } |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 76 | |
| 77 | static void dumpSUList(ScheduleDAGInstrs::SUList &L) { |
| 78 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
| 79 | dbgs() << "{ "; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 80 | for (const SUnit *su : L) { |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 81 | dbgs() << "SU(" << su->NodeNum << ")"; |
| 82 | if (su != L.back()) |
| 83 | dbgs() << ", "; |
| 84 | } |
| 85 | dbgs() << "}\n"; |
| 86 | #endif |
| 87 | } |
| 88 | |
Dan Gohman | 619ef48 | 2009-01-15 19:20:50 +0000 | [diff] [blame] | 89 | ScheduleDAGInstrs::ScheduleDAGInstrs(MachineFunction &mf, |
Alexey Samsonov | 8968e6d | 2014-08-20 19:36:05 +0000 | [diff] [blame] | 90 | const MachineLoopInfo *mli, |
Matthias Braun | 93563e7 | 2015-11-03 01:53:29 +0000 | [diff] [blame] | 91 | bool RemoveKillFlags) |
Matthias Braun | b17e8b1 | 2015-12-04 19:54:24 +0000 | [diff] [blame] | 92 | : ScheduleDAG(mf), MLI(mli), MFI(mf.getFrameInfo()), |
Matthias Braun | 93563e7 | 2015-11-03 01:53:29 +0000 | [diff] [blame] | 93 | RemoveKillFlags(RemoveKillFlags), CanHandleTerminators(false), |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 94 | TrackLaneMasks(false), AAForDep(nullptr), BarrierChain(nullptr), |
| 95 | UnknownValue(UndefValue::get( |
| 96 | Type::getVoidTy(mf.getFunction()->getContext()))), |
| 97 | FirstDbgValue(nullptr) { |
Devang Patel | e5feef0 | 2011-06-02 20:07:12 +0000 | [diff] [blame] | 98 | DbgValues.clear(); |
Andrew Trick | 9b63513 | 2012-09-18 18:20:00 +0000 | [diff] [blame] | 99 | |
Eric Christopher | 2c63549 | 2015-01-27 07:54:39 +0000 | [diff] [blame] | 100 | const TargetSubtargetInfo &ST = mf.getSubtarget(); |
Pete Cooper | 1175945 | 2014-09-02 17:43:54 +0000 | [diff] [blame] | 101 | SchedModel.init(ST.getSchedModel(), &ST, TII); |
Evan Cheng | f0236e0 | 2009-10-18 19:58:47 +0000 | [diff] [blame] | 102 | } |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 103 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 104 | /// This is the function that does the work of looking through basic |
| 105 | /// ptrtoint+arithmetic+inttoptr sequences. |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 106 | static const Value *getUnderlyingObjectFromInt(const Value *V) { |
| 107 | do { |
Dan Gohman | 58b0e71 | 2009-07-17 20:58:59 +0000 | [diff] [blame] | 108 | if (const Operator *U = dyn_cast<Operator>(V)) { |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 109 | // If we find a ptrtoint, we can transfer control back to the |
| 110 | // regular getUnderlyingObjectFromInt. |
Dan Gohman | 58b0e71 | 2009-07-17 20:58:59 +0000 | [diff] [blame] | 111 | if (U->getOpcode() == Instruction::PtrToInt) |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 112 | return U->getOperand(0); |
Andrew Trick | 0be1936 | 2012-11-28 03:42:49 +0000 | [diff] [blame] | 113 | // If we find an add of a constant, a multiplied value, or a phi, it's |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 114 | // likely that the other operand will lead us to the base |
| 115 | // object. We don't have to worry about the case where the |
Dan Gohman | 6c0c219 | 2009-08-07 01:26:06 +0000 | [diff] [blame] | 116 | // object address is somehow being computed by the multiply, |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 117 | // because our callers only care when the result is an |
Nick Lewycky | 1a32954 | 2012-10-26 04:27:49 +0000 | [diff] [blame] | 118 | // identifiable object. |
Dan Gohman | 58b0e71 | 2009-07-17 20:58:59 +0000 | [diff] [blame] | 119 | if (U->getOpcode() != Instruction::Add || |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 120 | (!isa<ConstantInt>(U->getOperand(1)) && |
Andrew Trick | 0be1936 | 2012-11-28 03:42:49 +0000 | [diff] [blame] | 121 | Operator::getOpcode(U->getOperand(1)) != Instruction::Mul && |
| 122 | !isa<PHINode>(U->getOperand(1)))) |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 123 | return V; |
| 124 | V = U->getOperand(0); |
| 125 | } else { |
| 126 | return V; |
| 127 | } |
Duncan Sands | 19d0b47 | 2010-02-16 11:11:14 +0000 | [diff] [blame] | 128 | assert(V->getType()->isIntegerTy() && "Unexpected operand type!"); |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 129 | } while (1); |
| 130 | } |
| 131 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 132 | /// This is a wrapper around GetUnderlyingObjects and adds support for basic |
| 133 | /// ptrtoint+arithmetic+inttoptr sequences. |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 134 | static void getUnderlyingObjects(const Value *V, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 135 | SmallVectorImpl<Value *> &Objects, |
| 136 | const DataLayout &DL) { |
Nick Lewycky | aad475b | 2014-04-15 07:22:52 +0000 | [diff] [blame] | 137 | SmallPtrSet<const Value *, 16> Visited; |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 138 | SmallVector<const Value *, 4> Working(1, V); |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 139 | do { |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 140 | V = Working.pop_back_val(); |
| 141 | |
| 142 | SmallVector<Value *, 4> Objs; |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 143 | GetUnderlyingObjects(const_cast<Value *>(V), Objs, DL); |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 144 | |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 145 | for (Value *V : Objs) { |
David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 146 | if (!Visited.insert(V).second) |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 147 | continue; |
| 148 | if (Operator::getOpcode(V) == Instruction::IntToPtr) { |
| 149 | const Value *O = |
| 150 | getUnderlyingObjectFromInt(cast<User>(V)->getOperand(0)); |
| 151 | if (O->getType()->isPointerTy()) { |
| 152 | Working.push_back(O); |
| 153 | continue; |
| 154 | } |
| 155 | } |
| 156 | Objects.push_back(const_cast<Value *>(V)); |
| 157 | } |
| 158 | } while (!Working.empty()); |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 159 | } |
| 160 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 161 | /// If this machine instr has memory reference information and it can be tracked |
| 162 | /// to a normal reference to a known object, return the Value for that object. |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 163 | static void getUnderlyingObjectsForInstr(const MachineInstr *MI, |
Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 164 | const MachineFrameInfo &MFI, |
Mehdi Amini | a28d91d | 2015-03-10 02:37:25 +0000 | [diff] [blame] | 165 | UnderlyingObjectsVector &Objects, |
| 166 | const DataLayout &DL) { |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 167 | auto allMMOsOkay = [&]() { |
| 168 | for (const MachineMemOperand *MMO : MI->memoperands()) { |
| 169 | if (MMO->isVolatile()) |
| 170 | return false; |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 171 | |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 172 | if (const PseudoSourceValue *PSV = MMO->getPseudoValue()) { |
| 173 | // Function that contain tail calls don't have unique PseudoSourceValue |
| 174 | // objects. Two PseudoSourceValues might refer to the same or |
| 175 | // overlapping locations. The client code calling this function assumes |
| 176 | // this is not the case. So return a conservative answer of no known |
| 177 | // object. |
Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 178 | if (MFI.hasTailCall()) |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 179 | return false; |
Geoff Berry | c0739d8 | 2016-04-12 15:50:19 +0000 | [diff] [blame] | 180 | |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 181 | // For now, ignore PseudoSourceValues which may alias LLVM IR values |
| 182 | // because the code that uses this function has no way to cope with |
| 183 | // such aliases. |
Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 184 | if (PSV->isAliased(&MFI)) |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 185 | return false; |
Geoff Berry | c0739d8 | 2016-04-12 15:50:19 +0000 | [diff] [blame] | 186 | |
Matthias Braun | 941a705 | 2016-07-28 18:40:00 +0000 | [diff] [blame] | 187 | bool MayAlias = PSV->mayAlias(&MFI); |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 188 | Objects.push_back(UnderlyingObjectsVector::value_type(PSV, MayAlias)); |
| 189 | } else if (const Value *V = MMO->getValue()) { |
| 190 | SmallVector<Value *, 4> Objs; |
| 191 | getUnderlyingObjects(V, Objs, DL); |
Geoff Berry | c0739d8 | 2016-04-12 15:50:19 +0000 | [diff] [blame] | 192 | |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 193 | for (Value *V : Objs) { |
| 194 | if (!isIdentifiedObject(V)) |
| 195 | return false; |
| 196 | |
| 197 | Objects.push_back(UnderlyingObjectsVector::value_type(V, true)); |
Geoff Berry | c0739d8 | 2016-04-12 15:50:19 +0000 | [diff] [blame] | 198 | } |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 199 | } else |
| 200 | return false; |
Geoff Berry | c0739d8 | 2016-04-12 15:50:19 +0000 | [diff] [blame] | 201 | } |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 202 | return true; |
| 203 | }; |
| 204 | |
| 205 | if (!allMMOsOkay()) |
| 206 | Objects.clear(); |
Dan Gohman | 1ee0d41 | 2009-01-30 02:49:14 +0000 | [diff] [blame] | 207 | } |
| 208 | |
Andrew Trick | 7405c6d | 2012-04-20 20:05:21 +0000 | [diff] [blame] | 209 | void ScheduleDAGInstrs::startBlock(MachineBasicBlock *bb) { |
| 210 | BB = bb; |
Dan Gohman | b954343 | 2009-02-10 23:27:53 +0000 | [diff] [blame] | 211 | } |
| 212 | |
Andrew Trick | 52226d4 | 2012-03-07 23:00:49 +0000 | [diff] [blame] | 213 | void ScheduleDAGInstrs::finishBlock() { |
Andrew Trick | 51ee936 | 2012-04-20 20:24:33 +0000 | [diff] [blame] | 214 | // Subclasses should no longer refer to the old block. |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 215 | BB = nullptr; |
Andrew Trick | 60cf03e | 2012-03-07 05:21:52 +0000 | [diff] [blame] | 216 | } |
| 217 | |
Andrew Trick | 60cf03e | 2012-03-07 05:21:52 +0000 | [diff] [blame] | 218 | void ScheduleDAGInstrs::enterRegion(MachineBasicBlock *bb, |
| 219 | MachineBasicBlock::iterator begin, |
| 220 | MachineBasicBlock::iterator end, |
Andrew Trick | a53e101 | 2013-08-23 17:48:33 +0000 | [diff] [blame] | 221 | unsigned regioninstrs) { |
Andrew Trick | 7405c6d | 2012-04-20 20:05:21 +0000 | [diff] [blame] | 222 | assert(bb == BB && "startBlock should set BB"); |
Andrew Trick | 8c207e4 | 2012-03-09 04:29:02 +0000 | [diff] [blame] | 223 | RegionBegin = begin; |
| 224 | RegionEnd = end; |
Andrew Trick | a53e101 | 2013-08-23 17:48:33 +0000 | [diff] [blame] | 225 | NumRegionInstrs = regioninstrs; |
Andrew Trick | 60cf03e | 2012-03-07 05:21:52 +0000 | [diff] [blame] | 226 | } |
| 227 | |
Andrew Trick | 60cf03e | 2012-03-07 05:21:52 +0000 | [diff] [blame] | 228 | void ScheduleDAGInstrs::exitRegion() { |
| 229 | // Nothing to do. |
| 230 | } |
| 231 | |
Andrew Trick | 52226d4 | 2012-03-07 23:00:49 +0000 | [diff] [blame] | 232 | void ScheduleDAGInstrs::addSchedBarrierDeps() { |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 233 | MachineInstr *ExitMI = RegionEnd != BB->end() ? &*RegionEnd : nullptr; |
Evan Cheng | 15459b6 | 2010-10-23 02:10:46 +0000 | [diff] [blame] | 234 | ExitSU.setInstr(ExitMI); |
Matthias Braun | 325cd2c | 2016-11-11 01:34:21 +0000 | [diff] [blame] | 235 | // Add dependencies on the defs and uses of the instruction. |
| 236 | if (ExitMI) { |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 237 | for (const MachineOperand &MO : ExitMI->operands()) { |
Evan Cheng | 15459b6 | 2010-10-23 02:10:46 +0000 | [diff] [blame] | 238 | if (!MO.isReg() || MO.isDef()) continue; |
| 239 | unsigned Reg = MO.getReg(); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 240 | if (TargetRegisterInfo::isPhysicalRegister(Reg)) { |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 241 | Uses.insert(PhysRegSUOper(&ExitSU, -1, Reg)); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 242 | } else if (TargetRegisterInfo::isVirtualRegister(Reg) && MO.readsReg()) { |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 243 | addVRegUseDeps(&ExitSU, ExitMI->getOperandNo(&MO)); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 244 | } |
Evan Cheng | 15459b6 | 2010-10-23 02:10:46 +0000 | [diff] [blame] | 245 | } |
Matthias Braun | 325cd2c | 2016-11-11 01:34:21 +0000 | [diff] [blame] | 246 | } |
| 247 | if (!ExitMI || (!ExitMI->isCall() && !ExitMI->isBarrier())) { |
Evan Cheng | 15459b6 | 2010-10-23 02:10:46 +0000 | [diff] [blame] | 248 | // For others, e.g. fallthrough, conditional branch, assume the exit |
Evan Cheng | cbdf7e8 | 2010-10-27 23:17:17 +0000 | [diff] [blame] | 249 | // uses all the registers that are livein to the successor blocks. |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 250 | for (const MachineBasicBlock *Succ : BB->successors()) { |
| 251 | for (const auto &LI : Succ->liveins()) { |
Matthias Braun | d9da162 | 2015-09-09 18:08:03 +0000 | [diff] [blame] | 252 | if (!Uses.contains(LI.PhysReg)) |
| 253 | Uses.insert(PhysRegSUOper(&ExitSU, -1, LI.PhysReg)); |
Evan Cheng | cbdf7e8 | 2010-10-27 23:17:17 +0000 | [diff] [blame] | 254 | } |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 255 | } |
Evan Cheng | 15459b6 | 2010-10-23 02:10:46 +0000 | [diff] [blame] | 256 | } |
| 257 | } |
| 258 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 259 | /// MO is an operand of SU's instruction that defines a physical register. Adds |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 260 | /// data dependencies from SU to any uses of the physical register. |
Andrew Trick | ae53561 | 2012-08-23 00:39:43 +0000 | [diff] [blame] | 261 | void ScheduleDAGInstrs::addPhysRegDataDeps(SUnit *SU, unsigned OperIdx) { |
| 262 | const MachineOperand &MO = SU->getInstr()->getOperand(OperIdx); |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 263 | assert(MO.isDef() && "expect physreg def"); |
| 264 | |
| 265 | // Ask the target if address-backscheduling is desirable, and if so how much. |
Eric Christopher | 2c63549 | 2015-01-27 07:54:39 +0000 | [diff] [blame] | 266 | const TargetSubtargetInfo &ST = MF.getSubtarget(); |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 267 | |
Jakob Stoklund Olesen | 54038d7 | 2012-06-01 23:28:30 +0000 | [diff] [blame] | 268 | for (MCRegAliasIterator Alias(MO.getReg(), TRI, true); |
| 269 | Alias.isValid(); ++Alias) { |
Andrew Trick | 9dbbd3e | 2012-02-24 07:04:55 +0000 | [diff] [blame] | 270 | if (!Uses.contains(*Alias)) |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 271 | continue; |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 272 | for (Reg2SUnitsMap::iterator I = Uses.find(*Alias); I != Uses.end(); ++I) { |
| 273 | SUnit *UseSU = I->SU; |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 274 | if (UseSU == SU) |
| 275 | continue; |
Andrew Trick | 07dced6 | 2012-10-08 18:54:00 +0000 | [diff] [blame] | 276 | |
Andrew Trick | 07dced6 | 2012-10-08 18:54:00 +0000 | [diff] [blame] | 277 | // Adjust the dependence latency using operand def/use information, |
| 278 | // then allow the target to perform its own adjustments. |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 279 | int UseOp = I->OpIdx; |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 280 | MachineInstr *RegUse = nullptr; |
Andrew Trick | f1ff84c | 2012-11-12 19:28:57 +0000 | [diff] [blame] | 281 | SDep Dep; |
| 282 | if (UseOp < 0) |
| 283 | Dep = SDep(SU, SDep::Artificial); |
| 284 | else { |
Andrew Trick | e833e1c | 2013-04-13 06:07:40 +0000 | [diff] [blame] | 285 | // Set the hasPhysRegDefs only for physreg defs that have a use within |
| 286 | // the scheduling region. |
| 287 | SU->hasPhysRegDefs = true; |
Andrew Trick | f1ff84c | 2012-11-12 19:28:57 +0000 | [diff] [blame] | 288 | Dep = SDep(SU, SDep::Data, *Alias); |
| 289 | RegUse = UseSU->getInstr(); |
Andrew Trick | f1ff84c | 2012-11-12 19:28:57 +0000 | [diff] [blame] | 290 | } |
| 291 | Dep.setLatency( |
Andrew Trick | de2109e | 2013-06-15 04:49:57 +0000 | [diff] [blame] | 292 | SchedModel.computeOperandLatency(SU->getInstr(), OperIdx, RegUse, |
| 293 | UseOp)); |
Andrew Trick | 4544606 | 2012-06-05 21:11:27 +0000 | [diff] [blame] | 294 | |
Andrew Trick | f1ff84c | 2012-11-12 19:28:57 +0000 | [diff] [blame] | 295 | ST.adjustSchedDependency(SU, UseSU, Dep); |
| 296 | UseSU->addPred(Dep); |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 297 | } |
| 298 | } |
| 299 | } |
| 300 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 301 | /// \brief Adds register dependencies (data, anti, and output) from this SUnit |
| 302 | /// to following instructions in the same scheduling region that depend the |
| 303 | /// physical register referenced at OperIdx. |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 304 | void ScheduleDAGInstrs::addPhysRegDeps(SUnit *SU, unsigned OperIdx) { |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 305 | MachineInstr *MI = SU->getInstr(); |
| 306 | MachineOperand &MO = MI->getOperand(OperIdx); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 307 | unsigned Reg = MO.getReg(); |
Matthias Braun | f29b12d | 2016-11-10 23:46:44 +0000 | [diff] [blame] | 308 | // We do not need to track any dependencies for constant registers. |
| 309 | if (MRI.isConstantPhysReg(Reg)) |
| 310 | return; |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 311 | |
| 312 | // Optionally add output and anti dependencies. For anti |
| 313 | // dependencies we use a latency of 0 because for a multi-issue |
| 314 | // target we want to allow the defining instruction to issue |
| 315 | // in the same cycle as the using instruction. |
| 316 | // TODO: Using a latency of 1 here for output dependencies assumes |
| 317 | // there's no cost for reusing registers. |
| 318 | SDep::Kind Kind = MO.isUse() ? SDep::Anti : SDep::Output; |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 319 | for (MCRegAliasIterator Alias(Reg, TRI, true); Alias.isValid(); ++Alias) { |
Andrew Trick | 9dbbd3e | 2012-02-24 07:04:55 +0000 | [diff] [blame] | 320 | if (!Defs.contains(*Alias)) |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 321 | continue; |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 322 | for (Reg2SUnitsMap::iterator I = Defs.find(*Alias); I != Defs.end(); ++I) { |
| 323 | SUnit *DefSU = I->SU; |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 324 | if (DefSU == &ExitSU) |
| 325 | continue; |
| 326 | if (DefSU != SU && |
| 327 | (Kind != SDep::Output || !MO.isDead() || |
Hal Finkel | 66d7791 | 2014-12-05 02:07:35 +0000 | [diff] [blame] | 328 | !DefSU->getInstr()->registerDefIsDead(*Alias))) { |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 329 | if (Kind == SDep::Anti) |
Andrew Trick | baeaabb | 2012-11-06 03:13:46 +0000 | [diff] [blame] | 330 | DefSU->addPred(SDep(SU, Kind, /*Reg=*/*Alias)); |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 331 | else { |
Andrew Trick | baeaabb | 2012-11-06 03:13:46 +0000 | [diff] [blame] | 332 | SDep Dep(SU, Kind, /*Reg=*/*Alias); |
Andrew Trick | de2109e | 2013-06-15 04:49:57 +0000 | [diff] [blame] | 333 | Dep.setLatency( |
| 334 | SchedModel.computeOutputLatency(MI, OperIdx, DefSU->getInstr())); |
Andrew Trick | baeaabb | 2012-11-06 03:13:46 +0000 | [diff] [blame] | 335 | DefSU->addPred(Dep); |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 336 | } |
| 337 | } |
| 338 | } |
| 339 | } |
| 340 | |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 341 | if (!MO.isDef()) { |
Andrew Trick | e833e1c | 2013-04-13 06:07:40 +0000 | [diff] [blame] | 342 | SU->hasPhysRegUses = true; |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 343 | // Either insert a new Reg2SUnits entry with an empty SUnits list, or |
| 344 | // retrieve the existing SUnits list for this register's uses. |
| 345 | // Push this SUnit on the use list. |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 346 | Uses.insert(PhysRegSUOper(SU, OperIdx, Reg)); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 347 | if (RemoveKillFlags) |
| 348 | MO.setIsKill(false); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 349 | } else { |
Andrew Trick | ae53561 | 2012-08-23 00:39:43 +0000 | [diff] [blame] | 350 | addPhysRegDataDeps(SU, OperIdx); |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 351 | |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 352 | // clear this register's use list |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 353 | if (Uses.contains(Reg)) |
| 354 | Uses.eraseAll(Reg); |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 355 | |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 356 | if (!MO.isDead()) { |
| 357 | Defs.eraseAll(Reg); |
| 358 | } else if (SU->isCall) { |
| 359 | // Calls will not be reordered because of chain dependencies (see |
| 360 | // below). Since call operands are dead, calls may continue to be added |
| 361 | // to the DefList making dependence checking quadratic in the size of |
| 362 | // the block. Instead, we leave only one call at the back of the |
| 363 | // DefList. |
| 364 | Reg2SUnitsMap::RangePair P = Defs.equal_range(Reg); |
| 365 | Reg2SUnitsMap::iterator B = P.first; |
| 366 | Reg2SUnitsMap::iterator I = P.second; |
| 367 | for (bool isBegin = I == B; !isBegin; /* empty */) { |
| 368 | isBegin = (--I) == B; |
| 369 | if (!I->SU->isCall) |
| 370 | break; |
| 371 | I = Defs.erase(I); |
| 372 | } |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 373 | } |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 374 | |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 375 | // Defs are pushed in the order they are visited and never reordered. |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 376 | Defs.insert(PhysRegSUOper(SU, OperIdx, Reg)); |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 377 | } |
| 378 | } |
| 379 | |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 380 | LaneBitmask ScheduleDAGInstrs::getLaneMaskForMO(const MachineOperand &MO) const |
| 381 | { |
| 382 | unsigned Reg = MO.getReg(); |
| 383 | // No point in tracking lanemasks if we don't have interesting subregisters. |
| 384 | const TargetRegisterClass &RC = *MRI.getRegClass(Reg); |
| 385 | if (!RC.HasDisjunctSubRegs) |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 386 | return LaneBitmask::getAll(); |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 387 | |
| 388 | unsigned SubReg = MO.getSubReg(); |
| 389 | if (SubReg == 0) |
| 390 | return RC.getLaneMask(); |
| 391 | return TRI->getSubRegIndexLaneMask(SubReg); |
| 392 | } |
| 393 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 394 | /// Adds register output and data dependencies from this SUnit to instructions |
| 395 | /// that occur later in the same scheduling region if they read from or write to |
| 396 | /// the virtual register defined at OperIdx. |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 397 | /// |
| 398 | /// TODO: Hoist loop induction variable increments. This has to be |
| 399 | /// reevaluated. Generally, IV scheduling should be done before coalescing. |
| 400 | void ScheduleDAGInstrs::addVRegDefDeps(SUnit *SU, unsigned OperIdx) { |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 401 | MachineInstr *MI = SU->getInstr(); |
| 402 | MachineOperand &MO = MI->getOperand(OperIdx); |
| 403 | unsigned Reg = MO.getReg(); |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 404 | |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 405 | LaneBitmask DefLaneMask; |
| 406 | LaneBitmask KillLaneMask; |
| 407 | if (TrackLaneMasks) { |
| 408 | bool IsKill = MO.getSubReg() == 0 || MO.isUndef(); |
| 409 | DefLaneMask = getLaneMaskForMO(MO); |
| 410 | // If we have a <read-undef> flag, none of the lane values comes from an |
| 411 | // earlier instruction. |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 412 | KillLaneMask = IsKill ? LaneBitmask::getAll() : DefLaneMask; |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 413 | |
| 414 | // Clear undef flag, we'll re-add it later once we know which subregister |
| 415 | // Def is first. |
| 416 | MO.setIsUndef(false); |
| 417 | } else { |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 418 | DefLaneMask = LaneBitmask::getAll(); |
| 419 | KillLaneMask = LaneBitmask::getAll(); |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 420 | } |
| 421 | |
| 422 | if (MO.isDead()) { |
| 423 | assert(CurrentVRegUses.find(Reg) == CurrentVRegUses.end() && |
| 424 | "Dead defs should have no uses"); |
| 425 | } else { |
| 426 | // Add data dependence to all uses we found so far. |
| 427 | const TargetSubtargetInfo &ST = MF.getSubtarget(); |
| 428 | for (VReg2SUnitOperIdxMultiMap::iterator I = CurrentVRegUses.find(Reg), |
| 429 | E = CurrentVRegUses.end(); I != E; /*empty*/) { |
| 430 | LaneBitmask LaneMask = I->LaneMask; |
| 431 | // Ignore uses of other lanes. |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 432 | if ((LaneMask & KillLaneMask).none()) { |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 433 | ++I; |
| 434 | continue; |
| 435 | } |
| 436 | |
Krzysztof Parzyszek | ea9f8ce | 2016-12-16 19:11:56 +0000 | [diff] [blame] | 437 | if ((LaneMask & DefLaneMask).any()) { |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 438 | SUnit *UseSU = I->SU; |
| 439 | MachineInstr *Use = UseSU->getInstr(); |
| 440 | SDep Dep(SU, SDep::Data, Reg); |
| 441 | Dep.setLatency(SchedModel.computeOperandLatency(MI, OperIdx, Use, |
| 442 | I->OperandIndex)); |
| 443 | ST.adjustSchedDependency(SU, UseSU, Dep); |
| 444 | UseSU->addPred(Dep); |
| 445 | } |
| 446 | |
| 447 | LaneMask &= ~KillLaneMask; |
| 448 | // If we found a Def for all lanes of this use, remove it from the list. |
Krzysztof Parzyszek | ea9f8ce | 2016-12-16 19:11:56 +0000 | [diff] [blame] | 449 | if (LaneMask.any()) { |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 450 | I->LaneMask = LaneMask; |
| 451 | ++I; |
| 452 | } else |
| 453 | I = CurrentVRegUses.erase(I); |
| 454 | } |
| 455 | } |
| 456 | |
| 457 | // Shortcut: Singly defined vregs do not have output/anti dependencies. |
Andrew Trick | 7979589 | 2012-07-30 23:48:17 +0000 | [diff] [blame] | 458 | if (MRI.hasOneDef(Reg)) |
Andrew Trick | 9405343 | 2012-07-28 01:48:15 +0000 | [diff] [blame] | 459 | return; |
Andrew Trick | db42c6f | 2012-02-22 06:08:13 +0000 | [diff] [blame] | 460 | |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 461 | // Add output dependence to the next nearest defs of this vreg. |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 462 | // |
| 463 | // Unless this definition is dead, the output dependence should be |
| 464 | // transitively redundant with antidependencies from this definition's |
| 465 | // uses. We're conservative for now until we have a way to guarantee the uses |
| 466 | // are not eliminated sometime during scheduling. The output dependence edge |
| 467 | // is also useful if output latency exceeds def-use latency. |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 468 | LaneBitmask LaneMask = DefLaneMask; |
| 469 | for (VReg2SUnit &V2SU : make_range(CurrentVRegDefs.find(Reg), |
| 470 | CurrentVRegDefs.end())) { |
| 471 | // Ignore defs for other lanes. |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 472 | if ((V2SU.LaneMask & LaneMask).none()) |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 473 | continue; |
| 474 | // Add an output dependence. |
| 475 | SUnit *DefSU = V2SU.SU; |
| 476 | // Ignore additional defs of the same lanes in one instruction. This can |
| 477 | // happen because lanemasks are shared for targets with too many |
| 478 | // subregisters. We also use some representration tricks/hacks where we |
| 479 | // add super-register defs/uses, to imply that although we only access parts |
| 480 | // of the reg we care about the full one. |
| 481 | if (DefSU == SU) |
| 482 | continue; |
| 483 | SDep Dep(SU, SDep::Output, Reg); |
| 484 | Dep.setLatency( |
| 485 | SchedModel.computeOutputLatency(MI, OperIdx, DefSU->getInstr())); |
| 486 | DefSU->addPred(Dep); |
| 487 | |
| 488 | // Update current definition. This can get tricky if the def was about a |
| 489 | // bigger lanemask before. We then have to shrink it and create a new |
| 490 | // VReg2SUnit for the non-overlapping part. |
| 491 | LaneBitmask OverlapMask = V2SU.LaneMask & LaneMask; |
| 492 | LaneBitmask NonOverlapMask = V2SU.LaneMask & ~LaneMask; |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 493 | V2SU.SU = SU; |
| 494 | V2SU.LaneMask = OverlapMask; |
Krzysztof Parzyszek | ea9f8ce | 2016-12-16 19:11:56 +0000 | [diff] [blame] | 495 | if (NonOverlapMask.any()) |
Matthias Braun | 4c994ee | 2016-05-25 01:18:00 +0000 | [diff] [blame] | 496 | CurrentVRegDefs.insert(VReg2SUnit(Reg, NonOverlapMask, DefSU)); |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 497 | } |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 498 | // If there was no CurrentVRegDefs entry for some lanes yet, create one. |
Krzysztof Parzyszek | ea9f8ce | 2016-12-16 19:11:56 +0000 | [diff] [blame] | 499 | if (LaneMask.any()) |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 500 | CurrentVRegDefs.insert(VReg2SUnit(Reg, LaneMask, SU)); |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 501 | } |
| 502 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 503 | /// \brief Adds a register data dependency if the instruction that defines the |
| 504 | /// virtual register used at OperIdx is mapped to an SUnit. Add a register |
| 505 | /// antidependency from this SUnit to instructions that occur later in the same |
| 506 | /// scheduling region if they write the virtual register. |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 507 | /// |
| 508 | /// TODO: Handle ExitSU "uses" properly. |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 509 | void ScheduleDAGInstrs::addVRegUseDeps(SUnit *SU, unsigned OperIdx) { |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 510 | const MachineInstr *MI = SU->getInstr(); |
| 511 | const MachineOperand &MO = MI->getOperand(OperIdx); |
| 512 | unsigned Reg = MO.getReg(); |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 513 | |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 514 | // Remember the use. Data dependencies will be added when we find the def. |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 515 | LaneBitmask LaneMask = TrackLaneMasks ? getLaneMaskForMO(MO) |
| 516 | : LaneBitmask::getAll(); |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 517 | CurrentVRegUses.insert(VReg2SUnitOperIdx(Reg, LaneMask, OperIdx, SU)); |
| 518 | |
| 519 | // Add antidependences to the following defs of the vreg. |
| 520 | for (VReg2SUnit &V2SU : make_range(CurrentVRegDefs.find(Reg), |
| 521 | CurrentVRegDefs.end())) { |
| 522 | // Ignore defs for unrelated lanes. |
| 523 | LaneBitmask PrevDefLaneMask = V2SU.LaneMask; |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 524 | if ((PrevDefLaneMask & LaneMask).none()) |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 525 | continue; |
| 526 | if (V2SU.SU == SU) |
| 527 | continue; |
| 528 | |
| 529 | V2SU.SU->addPred(SDep(SU, SDep::Anti, Reg)); |
Andrew Trick | 2bc74c2 | 2013-08-30 04:36:57 +0000 | [diff] [blame] | 530 | } |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 531 | } |
Andrew Trick | 59ac4fb | 2012-01-14 02:17:18 +0000 | [diff] [blame] | 532 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 533 | /// Returns true if MI is an instruction we are unable to reason about |
Andrew Trick | da01ba3 | 2012-05-15 18:59:41 +0000 | [diff] [blame] | 534 | /// (like a call or something with unmodeled side effects). |
| 535 | static inline bool isGlobalMemoryObject(AliasAnalysis *AA, MachineInstr *MI) { |
Rafael Espindola | 84921b9 | 2015-10-24 23:11:13 +0000 | [diff] [blame] | 536 | return MI->isCall() || MI->hasUnmodeledSideEffects() || |
Justin Lebar | d98cf00 | 2016-09-10 01:03:20 +0000 | [diff] [blame] | 537 | (MI->hasOrderedMemoryRef() && !MI->isDereferenceableInvariantLoad(AA)); |
Andrew Trick | da01ba3 | 2012-05-15 18:59:41 +0000 | [diff] [blame] | 538 | } |
| 539 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 540 | void ScheduleDAGInstrs::addChainDependency (SUnit *SUa, SUnit *SUb, |
| 541 | unsigned Latency) { |
Eli Friedman | 93f47e5 | 2017-03-09 23:33:36 +0000 | [diff] [blame^] | 542 | if (SUa->getInstr()->mayAlias(AAForDep, *SUb->getInstr(), UseTBAA)) { |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 543 | SDep Dep(SUa, SDep::MayAliasMem); |
| 544 | Dep.setLatency(Latency); |
Andrew Trick | baeaabb | 2012-11-06 03:13:46 +0000 | [diff] [blame] | 545 | SUb->addPred(Dep); |
| 546 | } |
Andrew Trick | da01ba3 | 2012-05-15 18:59:41 +0000 | [diff] [blame] | 547 | } |
| 548 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 549 | /// \brief Creates an SUnit for each real instruction, numbered in top-down |
| 550 | /// topological order. The instruction order A < B, implies that no edge exists |
| 551 | /// from B to A. |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 552 | /// |
| 553 | /// Map each real instruction to its SUnit. |
| 554 | /// |
Andrew Trick | 8823dec | 2012-03-14 04:00:41 +0000 | [diff] [blame] | 555 | /// After initSUnits, the SUnits vector cannot be resized and the scheduler may |
| 556 | /// hang onto SUnit pointers. We may relax this in the future by using SUnit IDs |
| 557 | /// instead of pointers. |
| 558 | /// |
| 559 | /// MachineScheduler relies on initSUnits numbering the nodes by their order in |
| 560 | /// the original instruction list. |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 561 | void ScheduleDAGInstrs::initSUnits() { |
| 562 | // We'll be allocating one SUnit for each real instruction in the region, |
| 563 | // which is contained within a basic block. |
Andrew Trick | a53e101 | 2013-08-23 17:48:33 +0000 | [diff] [blame] | 564 | SUnits.reserve(NumRegionInstrs); |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 565 | |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 566 | for (MachineInstr &MI : llvm::make_range(RegionBegin, RegionEnd)) { |
| 567 | if (MI.isDebugValue()) |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 568 | continue; |
| 569 | |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 570 | SUnit *SU = newSUnit(&MI); |
| 571 | MISUnitMap[&MI] = SU; |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 572 | |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 573 | SU->isCall = MI.isCall(); |
| 574 | SU->isCommutable = MI.isCommutable(); |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 575 | |
| 576 | // Assign the Latency field of SU using target-provided information. |
Andrew Trick | dd79f0f | 2012-10-10 05:43:09 +0000 | [diff] [blame] | 577 | SU->Latency = SchedModel.computeInstrLatency(SU->getInstr()); |
Andrew Trick | 880e573 | 2013-12-05 17:55:58 +0000 | [diff] [blame] | 578 | |
Andrew Trick | 1766f93 | 2014-04-18 17:35:08 +0000 | [diff] [blame] | 579 | // If this SUnit uses a reserved or unbuffered resource, mark it as such. |
| 580 | // |
Alp Toker | beaca19 | 2014-05-15 01:52:21 +0000 | [diff] [blame] | 581 | // Reserved resources block an instruction from issuing and stall the |
Andrew Trick | 1766f93 | 2014-04-18 17:35:08 +0000 | [diff] [blame] | 582 | // entire pipeline. These are identified by BufferSize=0. |
| 583 | // |
Alp Toker | beaca19 | 2014-05-15 01:52:21 +0000 | [diff] [blame] | 584 | // Unbuffered resources prevent execution of subsequent instructions that |
Andrew Trick | 1766f93 | 2014-04-18 17:35:08 +0000 | [diff] [blame] | 585 | // require the same resources. This is used for in-order execution pipelines |
| 586 | // within an out-of-order core. These are identified by BufferSize=1. |
Andrew Trick | 880e573 | 2013-12-05 17:55:58 +0000 | [diff] [blame] | 587 | if (SchedModel.hasInstrSchedModel()) { |
| 588 | const MCSchedClassDesc *SC = getSchedClass(SU); |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 589 | for (const MCWriteProcResEntry &PRE : |
| 590 | make_range(SchedModel.getWriteProcResBegin(SC), |
| 591 | SchedModel.getWriteProcResEnd(SC))) { |
| 592 | switch (SchedModel.getProcResource(PRE.ProcResourceIdx)->BufferSize) { |
Andrew Trick | 5a22df4 | 2013-12-05 17:56:02 +0000 | [diff] [blame] | 593 | case 0: |
| 594 | SU->hasReservedResource = true; |
| 595 | break; |
| 596 | case 1: |
Andrew Trick | 880e573 | 2013-12-05 17:55:58 +0000 | [diff] [blame] | 597 | SU->isUnbuffered = true; |
| 598 | break; |
Andrew Trick | 5a22df4 | 2013-12-05 17:56:02 +0000 | [diff] [blame] | 599 | default: |
| 600 | break; |
Andrew Trick | 880e573 | 2013-12-05 17:55:58 +0000 | [diff] [blame] | 601 | } |
| 602 | } |
| 603 | } |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 604 | } |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 605 | } |
| 606 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 607 | class ScheduleDAGInstrs::Value2SUsMap : public MapVector<ValueType, SUList> { |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 608 | /// Current total number of SUs in map. |
| 609 | unsigned NumNodes; |
| 610 | |
| 611 | /// 1 for loads, 0 for stores. (see comment in SUList) |
| 612 | unsigned TrueMemOrderLatency; |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 613 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 614 | public: |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 615 | Value2SUsMap(unsigned lat = 0) : NumNodes(0), TrueMemOrderLatency(lat) {} |
| 616 | |
| 617 | /// To keep NumNodes up to date, insert() is used instead of |
| 618 | /// this operator w/ push_back(). |
| 619 | ValueType &operator[](const SUList &Key) { |
| 620 | llvm_unreachable("Don't use. Use insert() instead."); }; |
| 621 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 622 | /// Adds SU to the SUList of V. If Map grows huge, reduce its size by calling |
| 623 | /// reduce(). |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 624 | void inline insert(SUnit *SU, ValueType V) { |
| 625 | MapVector::operator[](V).push_back(SU); |
| 626 | NumNodes++; |
| 627 | } |
| 628 | |
| 629 | /// Clears the list of SUs mapped to V. |
| 630 | void inline clearList(ValueType V) { |
| 631 | iterator Itr = find(V); |
| 632 | if (Itr != end()) { |
| 633 | assert (NumNodes >= Itr->second.size()); |
| 634 | NumNodes -= Itr->second.size(); |
| 635 | |
| 636 | Itr->second.clear(); |
| 637 | } |
| 638 | } |
| 639 | |
| 640 | /// Clears map from all contents. |
| 641 | void clear() { |
| 642 | MapVector<ValueType, SUList>::clear(); |
| 643 | NumNodes = 0; |
| 644 | } |
| 645 | |
| 646 | unsigned inline size() const { return NumNodes; } |
| 647 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 648 | /// Counts the number of SUs in this map after a reduction. |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 649 | void reComputeSize(void) { |
| 650 | NumNodes = 0; |
| 651 | for (auto &I : *this) |
| 652 | NumNodes += I.second.size(); |
| 653 | } |
| 654 | |
| 655 | unsigned inline getTrueMemOrderLatency() const { |
| 656 | return TrueMemOrderLatency; |
| 657 | } |
| 658 | |
| 659 | void dump(); |
| 660 | }; |
| 661 | |
| 662 | void ScheduleDAGInstrs::addChainDependencies(SUnit *SU, |
| 663 | Value2SUsMap &Val2SUsMap) { |
| 664 | for (auto &I : Val2SUsMap) |
| 665 | addChainDependencies(SU, I.second, |
| 666 | Val2SUsMap.getTrueMemOrderLatency()); |
| 667 | } |
| 668 | |
| 669 | void ScheduleDAGInstrs::addChainDependencies(SUnit *SU, |
| 670 | Value2SUsMap &Val2SUsMap, |
| 671 | ValueType V) { |
| 672 | Value2SUsMap::iterator Itr = Val2SUsMap.find(V); |
| 673 | if (Itr != Val2SUsMap.end()) |
| 674 | addChainDependencies(SU, Itr->second, |
| 675 | Val2SUsMap.getTrueMemOrderLatency()); |
| 676 | } |
| 677 | |
| 678 | void ScheduleDAGInstrs::addBarrierChain(Value2SUsMap &map) { |
| 679 | assert (BarrierChain != nullptr); |
| 680 | |
| 681 | for (auto &I : map) { |
| 682 | SUList &sus = I.second; |
| 683 | for (auto *SU : sus) |
| 684 | SU->addPredBarrier(BarrierChain); |
| 685 | } |
| 686 | map.clear(); |
| 687 | } |
| 688 | |
| 689 | void ScheduleDAGInstrs::insertBarrierChain(Value2SUsMap &map) { |
| 690 | assert (BarrierChain != nullptr); |
| 691 | |
| 692 | // Go through all lists of SUs. |
| 693 | for (Value2SUsMap::iterator I = map.begin(), EE = map.end(); I != EE;) { |
| 694 | Value2SUsMap::iterator CurrItr = I++; |
| 695 | SUList &sus = CurrItr->second; |
| 696 | SUList::iterator SUItr = sus.begin(), SUEE = sus.end(); |
| 697 | for (; SUItr != SUEE; ++SUItr) { |
| 698 | // Stop on BarrierChain or any instruction above it. |
| 699 | if ((*SUItr)->NodeNum <= BarrierChain->NodeNum) |
| 700 | break; |
| 701 | |
| 702 | (*SUItr)->addPredBarrier(BarrierChain); |
| 703 | } |
| 704 | |
| 705 | // Remove also the BarrierChain from list if present. |
NAKAMURA Takumi | bc46f62 | 2016-05-02 17:29:55 +0000 | [diff] [blame] | 706 | if (SUItr != SUEE && *SUItr == BarrierChain) |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 707 | SUItr++; |
| 708 | |
| 709 | // Remove all SUs that are now successors of BarrierChain. |
| 710 | if (SUItr != sus.begin()) |
| 711 | sus.erase(sus.begin(), SUItr); |
| 712 | } |
| 713 | |
| 714 | // Remove all entries with empty su lists. |
| 715 | map.remove_if([&](std::pair<ValueType, SUList> &mapEntry) { |
| 716 | return (mapEntry.second.empty()); }); |
| 717 | |
| 718 | // Recompute the size of the map (NumNodes). |
| 719 | map.reComputeSize(); |
| 720 | } |
| 721 | |
Andrew Trick | 8863992 | 2012-04-24 17:56:43 +0000 | [diff] [blame] | 722 | void ScheduleDAGInstrs::buildSchedGraph(AliasAnalysis *AA, |
Andrew Trick | 1a83134 | 2013-08-30 03:49:48 +0000 | [diff] [blame] | 723 | RegPressureTracker *RPTracker, |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 724 | PressureDiffs *PDiffs, |
Matthias Braun | d4f6409 | 2016-01-20 00:23:32 +0000 | [diff] [blame] | 725 | LiveIntervals *LIS, |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 726 | bool TrackLaneMasks) { |
Eric Christopher | 2c63549 | 2015-01-27 07:54:39 +0000 | [diff] [blame] | 727 | const TargetSubtargetInfo &ST = MF.getSubtarget(); |
Hal Finkel | b350ffd | 2013-08-29 03:25:05 +0000 | [diff] [blame] | 728 | bool UseAA = EnableAASchedMI.getNumOccurrences() > 0 ? EnableAASchedMI |
| 729 | : ST.useAA(); |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 730 | AAForDep = UseAA ? AA : nullptr; |
| 731 | |
| 732 | BarrierChain = nullptr; |
Hal Finkel | b350ffd | 2013-08-29 03:25:05 +0000 | [diff] [blame] | 733 | |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 734 | this->TrackLaneMasks = TrackLaneMasks; |
Andrew Trick | 310190e | 2013-09-04 21:00:02 +0000 | [diff] [blame] | 735 | MISUnitMap.clear(); |
| 736 | ScheduleDAG::clearDAG(); |
| 737 | |
Andrew Trick | 46cc9a4 | 2012-02-22 06:08:11 +0000 | [diff] [blame] | 738 | // Create an SUnit for each real instruction. |
| 739 | initSUnits(); |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 740 | |
Andrew Trick | 1a83134 | 2013-08-30 03:49:48 +0000 | [diff] [blame] | 741 | if (PDiffs) |
| 742 | PDiffs->init(SUnits.size()); |
| 743 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 744 | // We build scheduling units by walking a block's instruction list |
| 745 | // from bottom to top. |
Dan Gohman | 3aab10b | 2008-12-04 01:35:46 +0000 | [diff] [blame] | 746 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 747 | // Each MIs' memory operand(s) is analyzed to a list of underlying |
Jonas Paulsson | 2293685 | 2016-02-04 13:08:48 +0000 | [diff] [blame] | 748 | // objects. The SU is then inserted in the SUList(s) mapped from the |
| 749 | // Value(s). Each Value thus gets mapped to lists of SUs depending |
| 750 | // on it, stores and loads kept separately. Two SUs are trivially |
| 751 | // non-aliasing if they both depend on only identified Values and do |
| 752 | // not share any common Value. |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 753 | Value2SUsMap Stores, Loads(1 /*TrueMemOrderLatency*/); |
Dan Gohman | 3aab10b | 2008-12-04 01:35:46 +0000 | [diff] [blame] | 754 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 755 | // Certain memory accesses are known to not alias any SU in Stores |
| 756 | // or Loads, and have therefore their own 'NonAlias' |
| 757 | // domain. E.g. spill / reload instructions never alias LLVM I/R |
Jonas Paulsson | 2293685 | 2016-02-04 13:08:48 +0000 | [diff] [blame] | 758 | // Values. It would be nice to assume that this type of memory |
| 759 | // accesses always have a proper memory operand modelling, and are |
| 760 | // therefore never unanalyzable, but this is conservatively not |
| 761 | // done. |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 762 | Value2SUsMap NonAliasStores, NonAliasLoads(1 /*TrueMemOrderLatency*/); |
| 763 | |
Dale Johannesen | 49de060 | 2010-03-10 22:13:47 +0000 | [diff] [blame] | 764 | // Remove any stale debug info; sometimes BuildSchedGraph is called again |
| 765 | // without emitting the info from the previous call. |
Devang Patel | e5feef0 | 2011-06-02 20:07:12 +0000 | [diff] [blame] | 766 | DbgValues.clear(); |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 767 | FirstDbgValue = nullptr; |
Dale Johannesen | 49de060 | 2010-03-10 22:13:47 +0000 | [diff] [blame] | 768 | |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 769 | assert(Defs.empty() && Uses.empty() && |
| 770 | "Only BuildGraph should update Defs/Uses"); |
Michael Ilseman | 3e3194f | 2013-01-21 18:18:53 +0000 | [diff] [blame] | 771 | Defs.setUniverse(TRI->getNumRegs()); |
| 772 | Uses.setUniverse(TRI->getNumRegs()); |
Andrew Trick | 2e116a4 | 2011-05-06 21:52:52 +0000 | [diff] [blame] | 773 | |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 774 | assert(CurrentVRegDefs.empty() && "nobody else should use CurrentVRegDefs"); |
| 775 | assert(CurrentVRegUses.empty() && "nobody else should use CurrentVRegUses"); |
| 776 | unsigned NumVirtRegs = MRI.getNumVirtRegs(); |
| 777 | CurrentVRegDefs.setUniverse(NumVirtRegs); |
| 778 | CurrentVRegUses.setUniverse(NumVirtRegs); |
| 779 | |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 780 | // Model data dependencies between instructions being scheduled and the |
| 781 | // ExitSU. |
Andrew Trick | 52226d4 | 2012-03-07 23:00:49 +0000 | [diff] [blame] | 782 | addSchedBarrierDeps(); |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 783 | |
Dan Gohman | b954343 | 2009-02-10 23:27:53 +0000 | [diff] [blame] | 784 | // Walk the list of instructions, from bottom moving up. |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 785 | MachineInstr *DbgMI = nullptr; |
Andrew Trick | 8c207e4 | 2012-03-09 04:29:02 +0000 | [diff] [blame] | 786 | for (MachineBasicBlock::iterator MII = RegionEnd, MIE = RegionBegin; |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 787 | MII != MIE; --MII) { |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 788 | MachineInstr &MI = *std::prev(MII); |
| 789 | if (DbgMI) { |
| 790 | DbgValues.push_back(std::make_pair(DbgMI, &MI)); |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 791 | DbgMI = nullptr; |
Devang Patel | e5feef0 | 2011-06-02 20:07:12 +0000 | [diff] [blame] | 792 | } |
| 793 | |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 794 | if (MI.isDebugValue()) { |
| 795 | DbgMI = &MI; |
Dale Johannesen | 49de060 | 2010-03-10 22:13:47 +0000 | [diff] [blame] | 796 | continue; |
| 797 | } |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 798 | SUnit *SU = MISUnitMap[&MI]; |
Andrew Trick | 1a83134 | 2013-08-30 03:49:48 +0000 | [diff] [blame] | 799 | assert(SU && "No SUnit mapped to this MI"); |
| 800 | |
Andrew Trick | 8863992 | 2012-04-24 17:56:43 +0000 | [diff] [blame] | 801 | if (RPTracker) { |
Matthias Braun | b505c76 | 2016-01-12 22:57:35 +0000 | [diff] [blame] | 802 | RegisterOperands RegOpers; |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 803 | RegOpers.collect(MI, *TRI, MRI, TrackLaneMasks, false); |
Matthias Braun | d4f6409 | 2016-01-20 00:23:32 +0000 | [diff] [blame] | 804 | if (TrackLaneMasks) { |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 805 | SlotIndex SlotIdx = LIS->getInstructionIndex(MI); |
Matthias Braun | d4f6409 | 2016-01-20 00:23:32 +0000 | [diff] [blame] | 806 | RegOpers.adjustLaneLiveness(*LIS, MRI, SlotIdx); |
| 807 | } |
Matthias Braun | b505c76 | 2016-01-12 22:57:35 +0000 | [diff] [blame] | 808 | if (PDiffs != nullptr) |
| 809 | PDiffs->addInstruction(SU->NodeNum, RegOpers, MRI); |
| 810 | |
| 811 | RPTracker->recedeSkipDebugValues(); |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 812 | assert(&*RPTracker->getPos() == &MI && "RPTracker in sync"); |
Matthias Braun | b505c76 | 2016-01-12 22:57:35 +0000 | [diff] [blame] | 813 | RPTracker->recede(RegOpers); |
Andrew Trick | 8863992 | 2012-04-24 17:56:43 +0000 | [diff] [blame] | 814 | } |
Devang Patel | e5feef0 | 2011-06-02 20:07:12 +0000 | [diff] [blame] | 815 | |
Rafael Espindola | b1f25f1 | 2014-03-07 06:08:31 +0000 | [diff] [blame] | 816 | assert( |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 817 | (CanHandleTerminators || (!MI.isTerminator() && !MI.isPosition())) && |
Rafael Espindola | b1f25f1 | 2014-03-07 06:08:31 +0000 | [diff] [blame] | 818 | "Cannot schedule terminators or labels!"); |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 819 | |
Dan Gohman | 3aab10b | 2008-12-04 01:35:46 +0000 | [diff] [blame] | 820 | // Add register-based dependencies (data, anti, and output). |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 821 | // For some instructions (calls, returns, inline-asm, etc.) there can |
| 822 | // be explicit uses and implicit defs, in which case the use will appear |
| 823 | // on the operand list before the def. Do two passes over the operand |
| 824 | // list to make sure that defs are processed before any uses. |
Andrew Trick | ec25648 | 2012-12-18 20:53:01 +0000 | [diff] [blame] | 825 | bool HasVRegDef = false; |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 826 | for (unsigned j = 0, n = MI.getNumOperands(); j != n; ++j) { |
| 827 | const MachineOperand &MO = MI.getOperand(j); |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 828 | if (!MO.isReg() || !MO.isDef()) |
| 829 | continue; |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 830 | unsigned Reg = MO.getReg(); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 831 | if (TargetRegisterInfo::isPhysicalRegister(Reg)) { |
Andrew Trick | dbee9d8 | 2012-01-14 02:17:15 +0000 | [diff] [blame] | 832 | addPhysRegDeps(SU, j); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 833 | } else if (TargetRegisterInfo::isVirtualRegister(Reg)) { |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 834 | HasVRegDef = true; |
| 835 | addVRegDefDeps(SU, j); |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 836 | } |
| 837 | } |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 838 | // Now process all uses. |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 839 | for (unsigned j = 0, n = MI.getNumOperands(); j != n; ++j) { |
| 840 | const MachineOperand &MO = MI.getOperand(j); |
Matthias Braun | 8a5b467 | 2016-05-10 20:11:58 +0000 | [diff] [blame] | 841 | // Only look at use operands. |
| 842 | // We do not need to check for MO.readsReg() here because subsequent |
| 843 | // subregister defs will get output dependence edges and need no |
| 844 | // additional use dependencies. |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 845 | if (!MO.isReg() || !MO.isUse()) |
| 846 | continue; |
| 847 | unsigned Reg = MO.getReg(); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 848 | if (TargetRegisterInfo::isPhysicalRegister(Reg)) { |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 849 | addPhysRegDeps(SU, j); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 850 | } else if (TargetRegisterInfo::isVirtualRegister(Reg) && MO.readsReg()) { |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 851 | addVRegUseDeps(SU, j); |
Matthias Braun | 111603f | 2016-11-10 22:11:00 +0000 | [diff] [blame] | 852 | } |
Krzysztof Parzyszek | a356bb7 | 2016-05-10 16:50:30 +0000 | [diff] [blame] | 853 | } |
| 854 | |
Andrew Trick | ec25648 | 2012-12-18 20:53:01 +0000 | [diff] [blame] | 855 | // If we haven't seen any uses in this scheduling region, create a |
| 856 | // dependence edge to ExitSU to model the live-out latency. This is required |
| 857 | // for vreg defs with no in-region use, and prefetches with no vreg def. |
| 858 | // |
| 859 | // FIXME: NumDataSuccs would be more precise than NumSuccs here. This |
| 860 | // check currently relies on being called before adding chain deps. |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 861 | if (SU->NumSuccs == 0 && SU->Latency > 1 && (HasVRegDef || MI.mayLoad())) { |
Andrew Trick | ec25648 | 2012-12-18 20:53:01 +0000 | [diff] [blame] | 862 | SDep Dep(SU, SDep::Artificial); |
| 863 | Dep.setLatency(SU->Latency - 1); |
| 864 | ExitSU.addPred(Dep); |
| 865 | } |
Dan Gohman | 3aab10b | 2008-12-04 01:35:46 +0000 | [diff] [blame] | 866 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 867 | // Add memory dependencies (Note: isStoreToStackSlot and |
| 868 | // isLoadFromStackSLot are not usable after stack slots are lowered to |
| 869 | // actual addresses). |
| 870 | |
| 871 | // This is a barrier event that acts as a pivotal node in the DAG. |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 872 | if (isGlobalMemoryObject(AA, &MI)) { |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 873 | |
| 874 | // Become the barrier chain. |
David Goodwin | d2f9c04 | 2009-11-09 19:22:17 +0000 | [diff] [blame] | 875 | if (BarrierChain) |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 876 | BarrierChain->addPredBarrier(SU); |
David Goodwin | d2f9c04 | 2009-11-09 19:22:17 +0000 | [diff] [blame] | 877 | BarrierChain = SU; |
| 878 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 879 | DEBUG(dbgs() << "Global memory object and new barrier chain: SU(" |
| 880 | << BarrierChain->NodeNum << ").\n";); |
Tom Stellard | 3e01d47 | 2014-12-08 23:36:48 +0000 | [diff] [blame] | 881 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 882 | // Add dependencies against everything below it and clear maps. |
| 883 | addBarrierChain(Stores); |
| 884 | addBarrierChain(Loads); |
| 885 | addBarrierChain(NonAliasStores); |
| 886 | addBarrierChain(NonAliasLoads); |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 887 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 888 | continue; |
| 889 | } |
| 890 | |
| 891 | // If it's not a store or a variant load, we're done. |
Justin Lebar | d98cf00 | 2016-09-10 01:03:20 +0000 | [diff] [blame] | 892 | if (!MI.mayStore() && |
| 893 | !(MI.mayLoad() && !MI.isDereferenceableInvariantLoad(AA))) |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 894 | continue; |
| 895 | |
| 896 | // Always add dependecy edge to BarrierChain if present. |
| 897 | if (BarrierChain) |
| 898 | BarrierChain->addPredBarrier(SU); |
| 899 | |
| 900 | // Find the underlying objects for MI. The Objs vector is either |
| 901 | // empty, or filled with the Values of memory locations which this |
| 902 | // SU depends on. An empty vector means the memory location is |
Jonas Paulsson | 98963fe | 2016-02-15 16:43:15 +0000 | [diff] [blame] | 903 | // unknown, and may alias anything. |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 904 | UnderlyingObjectsVector Objs; |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 905 | getUnderlyingObjectsForInstr(&MI, MFI, Objs, MF.getDataLayout()); |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 906 | |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 907 | if (MI.mayStore()) { |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 908 | if (Objs.empty()) { |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 909 | // An unknown store depends on all stores and loads. |
| 910 | addChainDependencies(SU, Stores); |
| 911 | addChainDependencies(SU, NonAliasStores); |
| 912 | addChainDependencies(SU, Loads); |
| 913 | addChainDependencies(SU, NonAliasLoads); |
| 914 | |
| 915 | // Map this store to 'UnknownValue'. |
| 916 | Stores.insert(SU, UnknownValue); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 917 | } else { |
| 918 | // Add precise dependencies against all previously seen memory |
| 919 | // accesses mapped to the same Value(s). |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 920 | for (const UnderlyingObject &UnderlObj : Objs) { |
| 921 | ValueType V = UnderlObj.getValue(); |
| 922 | bool ThisMayAlias = UnderlObj.mayAlias(); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 923 | |
| 924 | // Add dependencies to previous stores and loads mapped to V. |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 925 | addChainDependencies(SU, (ThisMayAlias ? Stores : NonAliasStores), V); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 926 | addChainDependencies(SU, (ThisMayAlias ? Loads : NonAliasLoads), V); |
Geoff Berry | c0739d8 | 2016-04-12 15:50:19 +0000 | [diff] [blame] | 927 | } |
| 928 | // Update the store map after all chains have been added to avoid adding |
| 929 | // self-loop edge if multiple underlying objects are present. |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 930 | for (const UnderlyingObject &UnderlObj : Objs) { |
| 931 | ValueType V = UnderlObj.getValue(); |
| 932 | bool ThisMayAlias = UnderlObj.mayAlias(); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 933 | |
| 934 | // Map this store to V. |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 935 | (ThisMayAlias ? Stores : NonAliasStores).insert(SU, V); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 936 | } |
| 937 | // The store may have dependencies to unanalyzable loads and |
| 938 | // stores. |
| 939 | addChainDependencies(SU, Loads, UnknownValue); |
| 940 | addChainDependencies(SU, Stores, UnknownValue); |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 941 | } |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 942 | } else { // SU is a load. |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 943 | if (Objs.empty()) { |
| 944 | // An unknown load depends on all stores. |
| 945 | addChainDependencies(SU, Stores); |
| 946 | addChainDependencies(SU, NonAliasStores); |
| 947 | |
| 948 | Loads.insert(SU, UnknownValue); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 949 | } else { |
Geoff Berry | 6381713 | 2016-04-14 21:31:07 +0000 | [diff] [blame] | 950 | for (const UnderlyingObject &UnderlObj : Objs) { |
| 951 | ValueType V = UnderlObj.getValue(); |
| 952 | bool ThisMayAlias = UnderlObj.mayAlias(); |
Chandler Carruth | b472856 | 2016-03-31 21:55:58 +0000 | [diff] [blame] | 953 | |
| 954 | // Add precise dependencies against all previously seen stores |
| 955 | // mapping to the same Value(s). |
| 956 | addChainDependencies(SU, (ThisMayAlias ? Stores : NonAliasStores), V); |
| 957 | |
| 958 | // Map this load to V. |
| 959 | (ThisMayAlias ? Loads : NonAliasLoads).insert(SU, V); |
| 960 | } |
| 961 | // The load may have dependencies to unanalyzable stores. |
| 962 | addChainDependencies(SU, Stores, UnknownValue); |
Hal Finkel | 66859ae | 2012-12-10 18:49:16 +0000 | [diff] [blame] | 963 | } |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 964 | } |
| 965 | |
| 966 | // Reduce maps if they grow huge. |
| 967 | if (Stores.size() + Loads.size() >= HugeRegion) { |
| 968 | DEBUG(dbgs() << "Reducing Stores and Loads maps.\n";); |
Mehdi Amini | 59ae854 | 2016-04-16 04:58:30 +0000 | [diff] [blame] | 969 | reduceHugeMemNodeMaps(Stores, Loads, getReductionSize()); |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 970 | } |
| 971 | if (NonAliasStores.size() + NonAliasLoads.size() >= HugeRegion) { |
| 972 | DEBUG(dbgs() << "Reducing NonAliasStores and NonAliasLoads maps.\n";); |
Mehdi Amini | 59ae854 | 2016-04-16 04:58:30 +0000 | [diff] [blame] | 973 | reduceHugeMemNodeMaps(NonAliasStores, NonAliasLoads, getReductionSize()); |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 974 | } |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 975 | } |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 976 | |
Andrew Trick | b767d1e | 2012-12-01 01:22:49 +0000 | [diff] [blame] | 977 | if (DbgMI) |
| 978 | FirstDbgValue = DbgMI; |
Dan Gohman | 619ef48 | 2009-01-15 19:20:50 +0000 | [diff] [blame] | 979 | |
Andrew Trick | d675a4c | 2012-02-23 01:52:38 +0000 | [diff] [blame] | 980 | Defs.clear(); |
| 981 | Uses.clear(); |
Matthias Braun | 97d0ffb | 2015-12-04 01:51:19 +0000 | [diff] [blame] | 982 | CurrentVRegDefs.clear(); |
| 983 | CurrentVRegUses.clear(); |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 984 | } |
| 985 | |
| 986 | raw_ostream &llvm::operator<<(raw_ostream &OS, const PseudoSourceValue* PSV) { |
| 987 | PSV->printCustom(OS); |
| 988 | return OS; |
| 989 | } |
| 990 | |
| 991 | void ScheduleDAGInstrs::Value2SUsMap::dump() { |
| 992 | for (auto &Itr : *this) { |
| 993 | if (Itr.first.is<const Value*>()) { |
| 994 | const Value *V = Itr.first.get<const Value*>(); |
| 995 | if (isa<UndefValue>(V)) |
| 996 | dbgs() << "Unknown"; |
| 997 | else |
| 998 | V->printAsOperand(dbgs()); |
| 999 | } |
| 1000 | else if (Itr.first.is<const PseudoSourceValue*>()) |
| 1001 | dbgs() << Itr.first.get<const PseudoSourceValue*>(); |
| 1002 | else |
| 1003 | llvm_unreachable("Unknown Value type."); |
| 1004 | |
| 1005 | dbgs() << " : "; |
| 1006 | dumpSUList(Itr.second); |
| 1007 | } |
| 1008 | } |
| 1009 | |
Jonas Paulsson | ac29f01 | 2016-02-03 17:52:29 +0000 | [diff] [blame] | 1010 | void ScheduleDAGInstrs::reduceHugeMemNodeMaps(Value2SUsMap &stores, |
| 1011 | Value2SUsMap &loads, unsigned N) { |
| 1012 | DEBUG(dbgs() << "Before reduction:\nStoring SUnits:\n"; |
| 1013 | stores.dump(); |
| 1014 | dbgs() << "Loading SUnits:\n"; |
| 1015 | loads.dump()); |
| 1016 | |
| 1017 | // Insert all SU's NodeNums into a vector and sort it. |
| 1018 | std::vector<unsigned> NodeNums; |
| 1019 | NodeNums.reserve(stores.size() + loads.size()); |
| 1020 | for (auto &I : stores) |
| 1021 | for (auto *SU : I.second) |
| 1022 | NodeNums.push_back(SU->NodeNum); |
| 1023 | for (auto &I : loads) |
| 1024 | for (auto *SU : I.second) |
| 1025 | NodeNums.push_back(SU->NodeNum); |
| 1026 | std::sort(NodeNums.begin(), NodeNums.end()); |
| 1027 | |
| 1028 | // The N last elements in NodeNums will be removed, and the SU with |
| 1029 | // the lowest NodeNum of them will become the new BarrierChain to |
| 1030 | // let the not yet seen SUs have a dependency to the removed SUs. |
| 1031 | assert (N <= NodeNums.size()); |
| 1032 | SUnit *newBarrierChain = &SUnits[*(NodeNums.end() - N)]; |
| 1033 | if (BarrierChain) { |
| 1034 | // The aliasing and non-aliasing maps reduce independently of each |
| 1035 | // other, but share a common BarrierChain. Check if the |
| 1036 | // newBarrierChain is above the former one. If it is not, it may |
| 1037 | // introduce a loop to use newBarrierChain, so keep the old one. |
| 1038 | if (newBarrierChain->NodeNum < BarrierChain->NodeNum) { |
| 1039 | BarrierChain->addPredBarrier(newBarrierChain); |
| 1040 | BarrierChain = newBarrierChain; |
| 1041 | DEBUG(dbgs() << "Inserting new barrier chain: SU(" |
| 1042 | << BarrierChain->NodeNum << ").\n";); |
| 1043 | } |
| 1044 | else |
| 1045 | DEBUG(dbgs() << "Keeping old barrier chain: SU(" |
| 1046 | << BarrierChain->NodeNum << ").\n";); |
| 1047 | } |
| 1048 | else |
| 1049 | BarrierChain = newBarrierChain; |
| 1050 | |
| 1051 | insertBarrierChain(stores); |
| 1052 | insertBarrierChain(loads); |
| 1053 | |
| 1054 | DEBUG(dbgs() << "After reduction:\nStoring SUnits:\n"; |
| 1055 | stores.dump(); |
| 1056 | dbgs() << "Loading SUnits:\n"; |
| 1057 | loads.dump()); |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 1058 | } |
| 1059 | |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1060 | void ScheduleDAGInstrs::startBlockForKills(MachineBasicBlock *BB) { |
| 1061 | // Start with no live registers. |
| 1062 | LiveRegs.reset(); |
| 1063 | |
| 1064 | // Examine the live-in regs of all successors. |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1065 | for (const MachineBasicBlock *Succ : BB->successors()) { |
| 1066 | for (const auto &LI : Succ->liveins()) { |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1067 | // Repeat, for reg and all subregs. |
Matthias Braun | d9da162 | 2015-09-09 18:08:03 +0000 | [diff] [blame] | 1068 | for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1069 | SubRegs.isValid(); ++SubRegs) |
| 1070 | LiveRegs.set(*SubRegs); |
| 1071 | } |
| 1072 | } |
| 1073 | } |
| 1074 | |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1075 | /// \brief If we change a kill flag on the bundle instruction implicit register |
| 1076 | /// operands, then we also need to propagate that to any instructions inside |
| 1077 | /// the bundle which had the same kill state. |
| 1078 | static void toggleBundleKillFlag(MachineInstr *MI, unsigned Reg, |
Mandeep Singh Grang | e5a2f11 | 2016-05-10 17:57:27 +0000 | [diff] [blame] | 1079 | bool NewKillState, |
| 1080 | const TargetRegisterInfo *TRI) { |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1081 | if (MI->getOpcode() != TargetOpcode::BUNDLE) |
| 1082 | return; |
| 1083 | |
| 1084 | // Walk backwards from the last instruction in the bundle to the first. |
| 1085 | // Once we set a kill flag on an instruction, we bail out, as otherwise we |
| 1086 | // might set it on too many operands. We will clear as many flags as we |
| 1087 | // can though. |
Duncan P. N. Exon Smith | c5b668d | 2016-02-22 20:49:58 +0000 | [diff] [blame] | 1088 | MachineBasicBlock::instr_iterator Begin = MI->getIterator(); |
Matthias Braun | c8440dd | 2016-10-25 02:55:17 +0000 | [diff] [blame] | 1089 | MachineBasicBlock::instr_iterator End = getBundleEnd(Begin); |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1090 | while (Begin != End) { |
Mandeep Singh Grang | e5a2f11 | 2016-05-10 17:57:27 +0000 | [diff] [blame] | 1091 | if (NewKillState) { |
| 1092 | if ((--End)->addRegisterKilled(Reg, TRI, /* addIfNotFound= */ false)) |
| 1093 | return; |
| 1094 | } else |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1095 | (--End)->clearRegisterKills(Reg, TRI); |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1096 | } |
| 1097 | } |
| 1098 | |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1099 | void ScheduleDAGInstrs::toggleKillFlag(MachineInstr &MI, MachineOperand &MO) { |
Matthias Braun | c91e28a | 2017-01-27 18:53:07 +0000 | [diff] [blame] | 1100 | if (MO.isDebug()) |
| 1101 | return; |
| 1102 | |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1103 | // Setting kill flag... |
| 1104 | if (!MO.isKill()) { |
| 1105 | MO.setIsKill(true); |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1106 | toggleBundleKillFlag(&MI, MO.getReg(), true, TRI); |
| 1107 | return; |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1108 | } |
| 1109 | |
| 1110 | // If MO itself is live, clear the kill flag... |
| 1111 | if (LiveRegs.test(MO.getReg())) { |
| 1112 | MO.setIsKill(false); |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1113 | toggleBundleKillFlag(&MI, MO.getReg(), false, TRI); |
| 1114 | return; |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1115 | } |
| 1116 | |
| 1117 | // If any subreg of MO is live, then create an imp-def for that |
| 1118 | // subreg and keep MO marked as killed. |
| 1119 | MO.setIsKill(false); |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1120 | toggleBundleKillFlag(&MI, MO.getReg(), false, TRI); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1121 | bool AllDead = true; |
| 1122 | const unsigned SuperReg = MO.getReg(); |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1123 | MachineInstrBuilder MIB(MF, &MI); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1124 | for (MCSubRegIterator SubRegs(SuperReg, TRI); SubRegs.isValid(); ++SubRegs) { |
| 1125 | if (LiveRegs.test(*SubRegs)) { |
| 1126 | MIB.addReg(*SubRegs, RegState::ImplicitDefine); |
| 1127 | AllDead = false; |
| 1128 | } |
| 1129 | } |
| 1130 | |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1131 | if(AllDead) { |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1132 | MO.setIsKill(true); |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1133 | toggleBundleKillFlag(&MI, MO.getReg(), true, TRI); |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1134 | } |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1135 | } |
| 1136 | |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1137 | void ScheduleDAGInstrs::fixupKills(MachineBasicBlock *MBB) { |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1138 | // FIXME: Reuse the LivePhysRegs utility for this. |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1139 | DEBUG(dbgs() << "Fixup kills for BB#" << MBB->getNumber() << '\n'); |
| 1140 | |
| 1141 | LiveRegs.resize(TRI->getNumRegs()); |
| 1142 | BitVector killedRegs(TRI->getNumRegs()); |
| 1143 | |
| 1144 | startBlockForKills(MBB); |
| 1145 | |
| 1146 | // Examine block from end to start... |
| 1147 | unsigned Count = MBB->size(); |
| 1148 | for (MachineBasicBlock::iterator I = MBB->end(), E = MBB->begin(); |
| 1149 | I != E; --Count) { |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 1150 | MachineInstr &MI = *--I; |
| 1151 | if (MI.isDebugValue()) |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1152 | continue; |
| 1153 | |
| 1154 | // Update liveness. Registers that are defed but not used in this |
| 1155 | // instruction are now dead. Mark register and all subregs as they |
| 1156 | // are completely defined. |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 1157 | for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { |
| 1158 | MachineOperand &MO = MI.getOperand(i); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1159 | if (MO.isRegMask()) |
| 1160 | LiveRegs.clearBitsNotInMask(MO.getRegMask()); |
| 1161 | if (!MO.isReg()) continue; |
| 1162 | unsigned Reg = MO.getReg(); |
| 1163 | if (Reg == 0) continue; |
| 1164 | if (!MO.isDef()) continue; |
| 1165 | // Ignore two-addr defs. |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 1166 | if (MI.isRegTiedToUseOperand(i)) continue; |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1167 | |
| 1168 | // Repeat for reg and all subregs. |
| 1169 | for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true); |
| 1170 | SubRegs.isValid(); ++SubRegs) |
| 1171 | LiveRegs.reset(*SubRegs); |
| 1172 | } |
| 1173 | |
| 1174 | // Examine all used registers and set/clear kill flag. When a |
| 1175 | // register is used multiple times we only set the kill flag on |
| 1176 | // the first use. Don't set kill flags on undef operands. |
| 1177 | killedRegs.reset(); |
Krzysztof Parzyszek | e7c72cd | 2016-10-05 13:15:06 +0000 | [diff] [blame] | 1178 | |
| 1179 | // toggleKillFlag can append new operands (implicit defs), so using |
| 1180 | // a range-based loop is not safe. The new operands will be appended |
| 1181 | // at the end of the operand list and they don't need to be visited, |
| 1182 | // so iterating until the currently last operand is ok. |
| 1183 | for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { |
| 1184 | MachineOperand &MO = MI.getOperand(i); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1185 | if (!MO.isReg() || !MO.isUse() || MO.isUndef()) continue; |
| 1186 | unsigned Reg = MO.getReg(); |
| 1187 | if ((Reg == 0) || MRI.isReserved(Reg)) continue; |
| 1188 | |
| 1189 | bool kill = false; |
| 1190 | if (!killedRegs.test(Reg)) { |
| 1191 | kill = true; |
| 1192 | // A register is not killed if any subregs are live... |
| 1193 | for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { |
| 1194 | if (LiveRegs.test(*SubRegs)) { |
| 1195 | kill = false; |
| 1196 | break; |
| 1197 | } |
| 1198 | } |
| 1199 | |
| 1200 | // If subreg is not live, then register is killed if it became |
| 1201 | // live in this instruction |
| 1202 | if (kill) |
| 1203 | kill = !LiveRegs.test(Reg); |
| 1204 | } |
| 1205 | |
| 1206 | if (MO.isKill() != kill) { |
| 1207 | DEBUG(dbgs() << "Fixing " << MO << " in "); |
Matthias Braun | 26e8c35 | 2017-01-27 18:53:05 +0000 | [diff] [blame] | 1208 | toggleKillFlag(MI, MO); |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 1209 | DEBUG(MI.dump()); |
| 1210 | DEBUG({ |
| 1211 | if (MI.getOpcode() == TargetOpcode::BUNDLE) { |
| 1212 | MachineBasicBlock::instr_iterator Begin = MI.getIterator(); |
Matthias Braun | c8440dd | 2016-10-25 02:55:17 +0000 | [diff] [blame] | 1213 | MachineBasicBlock::instr_iterator End = getBundleEnd(Begin); |
Duncan P. N. Exon Smith | b77911b | 2016-07-01 16:21:48 +0000 | [diff] [blame] | 1214 | while (++Begin != End) |
| 1215 | DEBUG(Begin->dump()); |
| 1216 | } |
Pete Cooper | 300069a | 2015-05-04 16:52:06 +0000 | [diff] [blame] | 1217 | }); |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1218 | } |
| 1219 | |
| 1220 | killedRegs.set(Reg); |
| 1221 | } |
| 1222 | |
| 1223 | // Mark any used register (that is not using undef) and subregs as |
| 1224 | // now live... |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1225 | for (const MachineOperand &MO : MI.operands()) { |
Andrew Trick | 6b104f8 | 2013-12-28 21:56:55 +0000 | [diff] [blame] | 1226 | if (!MO.isReg() || !MO.isUse() || MO.isUndef()) continue; |
| 1227 | unsigned Reg = MO.getReg(); |
| 1228 | if ((Reg == 0) || MRI.isReserved(Reg)) continue; |
| 1229 | |
| 1230 | for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true); |
| 1231 | SubRegs.isValid(); ++SubRegs) |
| 1232 | LiveRegs.set(*SubRegs); |
| 1233 | } |
| 1234 | } |
| 1235 | } |
| 1236 | |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 1237 | void ScheduleDAGInstrs::dumpNode(const SUnit *SU) const { |
Matthias Braun | 8c209aa | 2017-01-28 02:02:38 +0000 | [diff] [blame] | 1238 | // Cannot completely remove virtual function even in release mode. |
Manman Ren | 19f49ac | 2012-09-11 22:23:19 +0000 | [diff] [blame] | 1239 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 1240 | SU->getInstr()->dump(); |
Manman Ren | 742534c | 2012-09-06 19:06:06 +0000 | [diff] [blame] | 1241 | #endif |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 1242 | } |
| 1243 | |
| 1244 | std::string ScheduleDAGInstrs::getGraphNodeLabel(const SUnit *SU) const { |
Alp Toker | e69170a | 2014-06-26 22:52:05 +0000 | [diff] [blame] | 1245 | std::string s; |
| 1246 | raw_string_ostream oss(s); |
Dan Gohman | b954343 | 2009-02-10 23:27:53 +0000 | [diff] [blame] | 1247 | if (SU == &EntrySU) |
| 1248 | oss << "<entry>"; |
| 1249 | else if (SU == &ExitSU) |
| 1250 | oss << "<exit>"; |
| 1251 | else |
Eric Christopher | 1cdefae | 2015-02-27 00:11:34 +0000 | [diff] [blame] | 1252 | SU->getInstr()->print(oss, /*SkipOpers=*/true); |
Dan Gohman | 60cb69e | 2008-11-19 23:18:57 +0000 | [diff] [blame] | 1253 | return oss.str(); |
| 1254 | } |
| 1255 | |
Andrew Trick | 1b2324d | 2012-03-07 00:18:22 +0000 | [diff] [blame] | 1256 | /// Return the basic block label. It is not necessarilly unique because a block |
| 1257 | /// contains multiple scheduling regions. But it is fine for visualization. |
| 1258 | std::string ScheduleDAGInstrs::getDAGName() const { |
| 1259 | return "dag." + BB->getFullName(); |
| 1260 | } |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1261 | |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1262 | //===----------------------------------------------------------------------===// |
| 1263 | // SchedDFSResult Implementation |
| 1264 | //===----------------------------------------------------------------------===// |
| 1265 | |
| 1266 | namespace llvm { |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1267 | /// Internal state used to compute SchedDFSResult. |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1268 | class SchedDFSImpl { |
| 1269 | SchedDFSResult &R; |
| 1270 | |
| 1271 | /// Join DAG nodes into equivalence classes by their subtree. |
| 1272 | IntEqClasses SubtreeClasses; |
| 1273 | /// List PredSU, SuccSU pairs that represent data edges between subtrees. |
| 1274 | std::vector<std::pair<const SUnit*, const SUnit*> > ConnectionPairs; |
| 1275 | |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1276 | struct RootData { |
| 1277 | unsigned NodeID; |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1278 | unsigned ParentNodeID; ///< Parent node (member of the parent subtree). |
| 1279 | unsigned SubInstrCount; ///< Instr count in this tree only, not children. |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1280 | |
| 1281 | RootData(unsigned id): NodeID(id), |
| 1282 | ParentNodeID(SchedDFSResult::InvalidSubtreeID), |
| 1283 | SubInstrCount(0) {} |
| 1284 | |
| 1285 | unsigned getSparseSetIndex() const { return NodeID; } |
| 1286 | }; |
| 1287 | |
| 1288 | SparseSet<RootData> RootSet; |
| 1289 | |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1290 | public: |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1291 | SchedDFSImpl(SchedDFSResult &r): R(r), SubtreeClasses(R.DFSNodeData.size()) { |
| 1292 | RootSet.setUniverse(R.DFSNodeData.size()); |
| 1293 | } |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1294 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1295 | /// Returns true if this node been visited by the DFS traversal. |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1296 | /// |
| 1297 | /// During visitPostorderNode the Node's SubtreeID is assigned to the Node |
| 1298 | /// ID. Later, SubtreeID is updated but remains valid. |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1299 | bool isVisited(const SUnit *SU) const { |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1300 | return R.DFSNodeData[SU->NodeNum].SubtreeID |
| 1301 | != SchedDFSResult::InvalidSubtreeID; |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1302 | } |
| 1303 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1304 | /// Initializes this node's instruction count. We don't need to flag the node |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1305 | /// visited until visitPostorder because the DAG cannot have cycles. |
| 1306 | void visitPreorder(const SUnit *SU) { |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1307 | R.DFSNodeData[SU->NodeNum].InstrCount = |
| 1308 | SU->getInstr()->isTransient() ? 0 : 1; |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1309 | } |
| 1310 | |
| 1311 | /// Called once for each node after all predecessors are visited. Revisit this |
| 1312 | /// node's predecessors and potentially join them now that we know the ILP of |
| 1313 | /// the other predecessors. |
| 1314 | void visitPostorderNode(const SUnit *SU) { |
| 1315 | // Mark this node as the root of a subtree. It may be joined with its |
| 1316 | // successors later. |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1317 | R.DFSNodeData[SU->NodeNum].SubtreeID = SU->NodeNum; |
| 1318 | RootData RData(SU->NodeNum); |
| 1319 | RData.SubInstrCount = SU->getInstr()->isTransient() ? 0 : 1; |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1320 | |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1321 | // If any predecessors are still in their own subtree, they either cannot be |
| 1322 | // joined or are large enough to remain separate. If this parent node's |
| 1323 | // total instruction count is not greater than a child subtree by at least |
| 1324 | // the subtree limit, then try to join it now since splitting subtrees is |
| 1325 | // only useful if multiple high-pressure paths are possible. |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1326 | unsigned InstrCount = R.DFSNodeData[SU->NodeNum].InstrCount; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1327 | for (const SDep &PredDep : SU->Preds) { |
| 1328 | if (PredDep.getKind() != SDep::Data) |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1329 | continue; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1330 | unsigned PredNum = PredDep.getSUnit()->NodeNum; |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1331 | if ((InstrCount - R.DFSNodeData[PredNum].InstrCount) < R.SubtreeLimit) |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1332 | joinPredSubtree(PredDep, SU, /*CheckLimit=*/false); |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1333 | |
| 1334 | // Either link or merge the TreeData entry from the child to the parent. |
Andrew Trick | 646eeb6 | 2013-01-25 06:52:30 +0000 | [diff] [blame] | 1335 | if (R.DFSNodeData[PredNum].SubtreeID == PredNum) { |
| 1336 | // If the predecessor's parent is invalid, this is a tree edge and the |
| 1337 | // current node is the parent. |
| 1338 | if (RootSet[PredNum].ParentNodeID == SchedDFSResult::InvalidSubtreeID) |
| 1339 | RootSet[PredNum].ParentNodeID = SU->NodeNum; |
| 1340 | } |
| 1341 | else if (RootSet.count(PredNum)) { |
| 1342 | // The predecessor is not a root, but is still in the root set. This |
| 1343 | // must be the new parent that it was just joined to. Note that |
| 1344 | // RootSet[PredNum].ParentNodeID may either be invalid or may still be |
| 1345 | // set to the original parent. |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1346 | RData.SubInstrCount += RootSet[PredNum].SubInstrCount; |
| 1347 | RootSet.erase(PredNum); |
| 1348 | } |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1349 | } |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1350 | RootSet[SU->NodeNum] = RData; |
| 1351 | } |
| 1352 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1353 | /// \brief Called once for each tree edge after calling visitPostOrderNode on |
| 1354 | /// the predecessor. Increment the parent node's instruction count and |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1355 | /// preemptively join this subtree to its parent's if it is small enough. |
| 1356 | void visitPostorderEdge(const SDep &PredDep, const SUnit *Succ) { |
| 1357 | R.DFSNodeData[Succ->NodeNum].InstrCount |
| 1358 | += R.DFSNodeData[PredDep.getSUnit()->NodeNum].InstrCount; |
| 1359 | joinPredSubtree(PredDep, Succ); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1360 | } |
| 1361 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1362 | /// Adds a connection for cross edges. |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1363 | void visitCrossEdge(const SDep &PredDep, const SUnit *Succ) { |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1364 | ConnectionPairs.push_back(std::make_pair(PredDep.getSUnit(), Succ)); |
| 1365 | } |
| 1366 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1367 | /// Sets each node's subtree ID to the representative ID and record |
| 1368 | /// connections between trees. |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1369 | void finalize() { |
| 1370 | SubtreeClasses.compress(); |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1371 | R.DFSTreeData.resize(SubtreeClasses.getNumClasses()); |
| 1372 | assert(SubtreeClasses.getNumClasses() == RootSet.size() |
| 1373 | && "number of roots should match trees"); |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1374 | for (const RootData &Root : RootSet) { |
| 1375 | unsigned TreeID = SubtreeClasses[Root.NodeID]; |
| 1376 | if (Root.ParentNodeID != SchedDFSResult::InvalidSubtreeID) |
| 1377 | R.DFSTreeData[TreeID].ParentTreeID = SubtreeClasses[Root.ParentNodeID]; |
| 1378 | R.DFSTreeData[TreeID].SubInstrCount = Root.SubInstrCount; |
Andrew Trick | 646eeb6 | 2013-01-25 06:52:30 +0000 | [diff] [blame] | 1379 | // Note that SubInstrCount may be greater than InstrCount if we joined |
| 1380 | // subtrees across a cross edge. InstrCount will be attributed to the |
| 1381 | // original parent, while SubInstrCount will be attributed to the joined |
| 1382 | // parent. |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1383 | } |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1384 | R.SubtreeConnections.resize(SubtreeClasses.getNumClasses()); |
| 1385 | R.SubtreeConnectLevels.resize(SubtreeClasses.getNumClasses()); |
| 1386 | DEBUG(dbgs() << R.getNumSubtrees() << " subtrees:\n"); |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1387 | for (unsigned Idx = 0, End = R.DFSNodeData.size(); Idx != End; ++Idx) { |
| 1388 | R.DFSNodeData[Idx].SubtreeID = SubtreeClasses[Idx]; |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1389 | DEBUG(dbgs() << " SU(" << Idx << ") in tree " |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1390 | << R.DFSNodeData[Idx].SubtreeID << '\n'); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1391 | } |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1392 | for (const std::pair<const SUnit*, const SUnit*> &P : ConnectionPairs) { |
| 1393 | unsigned PredTree = SubtreeClasses[P.first->NodeNum]; |
| 1394 | unsigned SuccTree = SubtreeClasses[P.second->NodeNum]; |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1395 | if (PredTree == SuccTree) |
| 1396 | continue; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1397 | unsigned Depth = P.first->getDepth(); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1398 | addConnection(PredTree, SuccTree, Depth); |
| 1399 | addConnection(SuccTree, PredTree, Depth); |
| 1400 | } |
| 1401 | } |
| 1402 | |
| 1403 | protected: |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1404 | /// Joins the predecessor subtree with the successor that is its DFS parent. |
| 1405 | /// Applies some heuristics before joining. |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1406 | bool joinPredSubtree(const SDep &PredDep, const SUnit *Succ, |
| 1407 | bool CheckLimit = true) { |
| 1408 | assert(PredDep.getKind() == SDep::Data && "Subtrees are for data edges"); |
| 1409 | |
| 1410 | // Check if the predecessor is already joined. |
| 1411 | const SUnit *PredSU = PredDep.getSUnit(); |
| 1412 | unsigned PredNum = PredSU->NodeNum; |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1413 | if (R.DFSNodeData[PredNum].SubtreeID != PredNum) |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1414 | return false; |
Andrew Trick | b52a856 | 2013-01-25 00:12:57 +0000 | [diff] [blame] | 1415 | |
| 1416 | // Four is the magic number of successors before a node is considered a |
| 1417 | // pinch point. |
| 1418 | unsigned NumDataSucs = 0; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1419 | for (const SDep &SuccDep : PredSU->Succs) { |
| 1420 | if (SuccDep.getKind() == SDep::Data) { |
Andrew Trick | b52a856 | 2013-01-25 00:12:57 +0000 | [diff] [blame] | 1421 | if (++NumDataSucs >= 4) |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1422 | return false; |
Andrew Trick | b52a856 | 2013-01-25 00:12:57 +0000 | [diff] [blame] | 1423 | } |
| 1424 | } |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1425 | if (CheckLimit && R.DFSNodeData[PredNum].InstrCount > R.SubtreeLimit) |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1426 | return false; |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1427 | R.DFSNodeData[PredNum].SubtreeID = Succ->NodeNum; |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1428 | SubtreeClasses.join(Succ->NodeNum, PredNum); |
| 1429 | return true; |
Andrew Trick | b52a856 | 2013-01-25 00:12:57 +0000 | [diff] [blame] | 1430 | } |
| 1431 | |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1432 | /// Called by finalize() to record a connection between trees. |
| 1433 | void addConnection(unsigned FromTree, unsigned ToTree, unsigned Depth) { |
| 1434 | if (!Depth) |
| 1435 | return; |
| 1436 | |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1437 | do { |
| 1438 | SmallVectorImpl<SchedDFSResult::Connection> &Connections = |
| 1439 | R.SubtreeConnections[FromTree]; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1440 | for (SchedDFSResult::Connection &C : Connections) { |
| 1441 | if (C.TreeID == ToTree) { |
| 1442 | C.Level = std::max(C.Level, Depth); |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1443 | return; |
| 1444 | } |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1445 | } |
Andrew Trick | ffc8097 | 2013-01-25 06:52:27 +0000 | [diff] [blame] | 1446 | Connections.push_back(SchedDFSResult::Connection(ToTree, Depth)); |
| 1447 | FromTree = R.DFSTreeData[FromTree].ParentTreeID; |
| 1448 | } while (FromTree != SchedDFSResult::InvalidSubtreeID); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1449 | } |
| 1450 | }; |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1451 | } // end namespace llvm |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1452 | |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1453 | namespace { |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1454 | /// Manage the stack used by a reverse depth-first search over the DAG. |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1455 | class SchedDAGReverseDFS { |
| 1456 | std::vector<std::pair<const SUnit*, SUnit::const_pred_iterator> > DFSStack; |
| 1457 | public: |
| 1458 | bool isComplete() const { return DFSStack.empty(); } |
| 1459 | |
| 1460 | void follow(const SUnit *SU) { |
| 1461 | DFSStack.push_back(std::make_pair(SU, SU->Preds.begin())); |
| 1462 | } |
| 1463 | void advance() { ++DFSStack.back().second; } |
| 1464 | |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1465 | const SDep *backtrack() { |
| 1466 | DFSStack.pop_back(); |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 1467 | return DFSStack.empty() ? nullptr : std::prev(DFSStack.back().second); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1468 | } |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1469 | |
| 1470 | const SUnit *getCurr() const { return DFSStack.back().first; } |
| 1471 | |
| 1472 | SUnit::const_pred_iterator getPred() const { return DFSStack.back().second; } |
| 1473 | |
| 1474 | SUnit::const_pred_iterator getPredEnd() const { |
| 1475 | return getCurr()->Preds.end(); |
| 1476 | } |
| 1477 | }; |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 1478 | } // anonymous |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1479 | |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1480 | static bool hasDataSucc(const SUnit *SU) { |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1481 | for (const SDep &SuccDep : SU->Succs) { |
| 1482 | if (SuccDep.getKind() == SDep::Data && |
| 1483 | !SuccDep.getSUnit()->isBoundaryNode()) |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1484 | return true; |
| 1485 | } |
| 1486 | return false; |
| 1487 | } |
| 1488 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1489 | /// Computes an ILP metric for all nodes in the subDAG reachable via depth-first |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1490 | /// search from this root. |
Andrew Trick | e2c3f5c | 2013-01-25 06:33:57 +0000 | [diff] [blame] | 1491 | void SchedDFSResult::compute(ArrayRef<SUnit> SUnits) { |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1492 | if (!IsBottomUp) |
| 1493 | llvm_unreachable("Top-down ILP metric is unimplemnted"); |
| 1494 | |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1495 | SchedDFSImpl Impl(*this); |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1496 | for (const SUnit &SU : SUnits) { |
| 1497 | if (Impl.isVisited(&SU) || hasDataSucc(&SU)) |
Andrew Trick | e2c3f5c | 2013-01-25 06:33:57 +0000 | [diff] [blame] | 1498 | continue; |
| 1499 | |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1500 | SchedDAGReverseDFS DFS; |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1501 | Impl.visitPreorder(&SU); |
| 1502 | DFS.follow(&SU); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1503 | for (;;) { |
| 1504 | // Traverse the leftmost path as far as possible. |
| 1505 | while (DFS.getPred() != DFS.getPredEnd()) { |
| 1506 | const SDep &PredDep = *DFS.getPred(); |
| 1507 | DFS.advance(); |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1508 | // Ignore non-data edges. |
Andrew Trick | 646eeb6 | 2013-01-25 06:52:30 +0000 | [diff] [blame] | 1509 | if (PredDep.getKind() != SDep::Data |
| 1510 | || PredDep.getSUnit()->isBoundaryNode()) { |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1511 | continue; |
Andrew Trick | 646eeb6 | 2013-01-25 06:52:30 +0000 | [diff] [blame] | 1512 | } |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1513 | // An already visited edge is a cross edge, assuming an acyclic DAG. |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1514 | if (Impl.isVisited(PredDep.getSUnit())) { |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1515 | Impl.visitCrossEdge(PredDep, DFS.getCurr()); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1516 | continue; |
| 1517 | } |
| 1518 | Impl.visitPreorder(PredDep.getSUnit()); |
| 1519 | DFS.follow(PredDep.getSUnit()); |
| 1520 | } |
| 1521 | // Visit the top of the stack in postorder and backtrack. |
| 1522 | const SUnit *Child = DFS.getCurr(); |
| 1523 | const SDep *PredDep = DFS.backtrack(); |
Andrew Trick | 5b07eeb | 2013-01-25 06:02:44 +0000 | [diff] [blame] | 1524 | Impl.visitPostorderNode(Child); |
| 1525 | if (PredDep) |
| 1526 | Impl.visitPostorderEdge(*PredDep, DFS.getCurr()); |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1527 | if (DFS.isComplete()) |
| 1528 | break; |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1529 | } |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1530 | } |
| 1531 | Impl.finalize(); |
| 1532 | } |
| 1533 | |
| 1534 | /// The root of the given SubtreeID was just scheduled. For all subtrees |
| 1535 | /// connected to this tree, record the depth of the connection so that the |
| 1536 | /// nearest connected subtrees can be prioritized. |
| 1537 | void SchedDFSResult::scheduleTree(unsigned SubtreeID) { |
Matthias Braun | 298e007 | 2016-09-30 23:08:07 +0000 | [diff] [blame] | 1538 | for (const Connection &C : SubtreeConnections[SubtreeID]) { |
| 1539 | SubtreeConnectLevels[C.TreeID] = |
| 1540 | std::max(SubtreeConnectLevels[C.TreeID], C.Level); |
| 1541 | DEBUG(dbgs() << " Tree: " << C.TreeID |
| 1542 | << " @" << SubtreeConnectLevels[C.TreeID] << '\n'); |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1543 | } |
| 1544 | } |
| 1545 | |
Matthias Braun | 8c209aa | 2017-01-28 02:02:38 +0000 | [diff] [blame] | 1546 | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
| 1547 | LLVM_DUMP_METHOD void ILPValue::print(raw_ostream &OS) const { |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1548 | OS << InstrCount << " / " << Length << " = "; |
| 1549 | if (!Length) |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1550 | OS << "BADILP"; |
Andrew Trick | 48d392e | 2012-11-28 05:13:28 +0000 | [diff] [blame] | 1551 | else |
| 1552 | OS << format("%g", ((double)InstrCount / Length)); |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1553 | } |
| 1554 | |
Matthias Braun | 8c209aa | 2017-01-28 02:02:38 +0000 | [diff] [blame] | 1555 | LLVM_DUMP_METHOD void ILPValue::dump() const { |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1556 | dbgs() << *this << '\n'; |
| 1557 | } |
| 1558 | |
| 1559 | namespace llvm { |
| 1560 | |
Alp Toker | d8d510a | 2014-07-01 21:19:13 +0000 | [diff] [blame] | 1561 | LLVM_DUMP_METHOD |
Andrew Trick | 90f711d | 2012-10-15 18:02:27 +0000 | [diff] [blame] | 1562 | raw_ostream &operator<<(raw_ostream &OS, const ILPValue &Val) { |
| 1563 | Val.print(OS); |
| 1564 | return OS; |
| 1565 | } |
| 1566 | |
Matthias Braun | bd7d918 | 2017-01-27 18:53:00 +0000 | [diff] [blame] | 1567 | } // end namespace llvm |
Matthias Braun | 8c209aa | 2017-01-28 02:02:38 +0000 | [diff] [blame] | 1568 | #endif |