blob: 62318fd3781ff85f30b8388b0818ae31706ae4df [file] [log] [blame]
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +00001//===-- R600MachineScheduler.cpp - R600 Scheduler Interface -*- C++ -*-----===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// \brief R600 Machine Scheduler interface
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000012//
13//===----------------------------------------------------------------------===//
14
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000015#include "R600MachineScheduler.h"
Matt Arsenault43e92fe2016-06-24 06:30:11 +000016#include "R600InstrInfo.h"
Tom Stellard2e59a452014-06-13 01:32:00 +000017#include "AMDGPUSubtarget.h"
Benjamin Kramerd78bb462013-05-23 17:10:37 +000018#include "llvm/CodeGen/MachineRegisterInfo.h"
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000019#include "llvm/Pass.h"
Chandler Carruth30d69c22015-02-13 10:01:29 +000020#include "llvm/IR/LegacyPassManager.h"
NAKAMURA Takumi756cf882013-03-11 08:19:28 +000021#include "llvm/Support/raw_ostream.h"
NAKAMURA Takumi756cf882013-03-11 08:19:28 +000022
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000023using namespace llvm;
24
Chandler Carruth84e68b22014-04-22 02:41:26 +000025#define DEBUG_TYPE "misched"
26
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000027void R600SchedStrategy::initialize(ScheduleDAGMI *dag) {
Andrew Trickd7f890e2013-12-28 21:56:47 +000028 assert(dag->hasVRegLiveness() && "R600SchedStrategy needs vreg liveness");
29 DAG = static_cast<ScheduleDAGMILive*>(dag);
Matt Arsenault43e92fe2016-06-24 06:30:11 +000030 const R600Subtarget &ST = DAG->MF.getSubtarget<R600Subtarget>();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000031 TII = static_cast<const R600InstrInfo*>(DAG->TII);
32 TRI = static_cast<const R600RegisterInfo*>(DAG->TRI);
Eric Christopher7792e322015-01-30 23:24:40 +000033 VLIW5 = !ST.hasCaymanISA();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000034 MRI = &DAG->MRI;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000035 CurInstKind = IDOther;
36 CurEmitted = 0;
Vincent Lejeune77a83522013-06-29 19:32:43 +000037 OccupedSlotsMask = 31;
Vincent Lejeune80031d9f2013-04-03 16:49:34 +000038 InstKindLimit[IDAlu] = TII->getMaxAlusPerClause();
Vincent Lejeune3d5118c2013-05-17 16:50:56 +000039 InstKindLimit[IDOther] = 32;
Vincent Lejeunef9f4e1e2013-05-17 16:49:55 +000040 InstKindLimit[IDFetch] = ST.getTexVTXClauseSize();
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000041 AluInstCount = 0;
42 FetchInstCount = 0;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000043}
44
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000045void R600SchedStrategy::MoveUnits(std::vector<SUnit *> &QSrc,
46 std::vector<SUnit *> &QDst)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000047{
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000048 QDst.insert(QDst.end(), QSrc.begin(), QSrc.end());
49 QSrc.clear();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000050}
51
Matt Arsenault43e92fe2016-06-24 06:30:11 +000052static unsigned getWFCountLimitedByGPR(unsigned GPRCount) {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000053 assert (GPRCount && "GPRCount cannot be 0");
54 return 248 / GPRCount;
55}
56
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000057SUnit* R600SchedStrategy::pickNode(bool &IsTopNode) {
Craig Topper062a2ba2014-04-25 05:30:21 +000058 SUnit *SU = nullptr;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000059 NextInstKind = IDOther;
60
Vincent Lejeune3d5118c2013-05-17 16:50:56 +000061 IsTopNode = false;
62
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000063 // check if we might want to switch current clause type
Vincent Lejeune3d5118c2013-05-17 16:50:56 +000064 bool AllowSwitchToAlu = (CurEmitted >= InstKindLimit[CurInstKind]) ||
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000065 (Available[CurInstKind].empty());
Vincent Lejeunef9f4e1e2013-05-17 16:49:55 +000066 bool AllowSwitchFromAlu = (CurEmitted >= InstKindLimit[CurInstKind]) &&
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000067 (!Available[IDFetch].empty() || !Available[IDOther].empty());
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000068
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000069 if (CurInstKind == IDAlu && !Available[IDFetch].empty()) {
70 // We use the heuristic provided by AMD Accelerated Parallel Processing
71 // OpenCL Programming Guide :
72 // The approx. number of WF that allows TEX inst to hide ALU inst is :
73 // 500 (cycles for TEX) / (AluFetchRatio * 8 (cycles for ALU))
Andrew Trickd7f890e2013-12-28 21:56:47 +000074 float ALUFetchRationEstimate =
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000075 (AluInstCount + AvailablesAluCount() + Pending[IDAlu].size()) /
76 (FetchInstCount + Available[IDFetch].size());
Alexey Samsonovcce57012014-09-17 17:47:21 +000077 if (ALUFetchRationEstimate == 0) {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000078 AllowSwitchFromAlu = true;
Alexey Samsonovcce57012014-09-17 17:47:21 +000079 } else {
80 unsigned NeededWF = 62.5f / ALUFetchRationEstimate;
81 DEBUG( dbgs() << NeededWF << " approx. Wavefronts Required\n" );
82 // We assume the local GPR requirements to be "dominated" by the requirement
83 // of the TEX clause (which consumes 128 bits regs) ; ALU inst before and
84 // after TEX are indeed likely to consume or generate values from/for the
85 // TEX clause.
86 // Available[IDFetch].size() * 2 : GPRs required in the Fetch clause
87 // We assume that fetch instructions are either TnXYZW = TEX TnXYZW (need
88 // one GPR) or TmXYZW = TnXYZW (need 2 GPR).
89 // (TODO : use RegisterPressure)
90 // If we are going too use too many GPR, we flush Fetch instruction to lower
91 // register pressure on 128 bits regs.
92 unsigned NearRegisterRequirement = 2 * Available[IDFetch].size();
93 if (NeededWF > getWFCountLimitedByGPR(NearRegisterRequirement))
94 AllowSwitchFromAlu = true;
95 }
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000096 }
97
Tom Stellardaad53762013-06-05 03:43:06 +000098 if (!SU && ((AllowSwitchToAlu && CurInstKind != IDAlu) ||
99 (!AllowSwitchFromAlu && CurInstKind == IDAlu))) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000100 // try to pick ALU
101 SU = pickAlu();
Vincent Lejeune4b5b8492013-06-05 20:27:35 +0000102 if (!SU && !PhysicalRegCopy.empty()) {
103 SU = PhysicalRegCopy.front();
104 PhysicalRegCopy.erase(PhysicalRegCopy.begin());
105 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000106 if (SU) {
Vincent Lejeunef9f4e1e2013-05-17 16:49:55 +0000107 if (CurEmitted >= InstKindLimit[IDAlu])
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000108 CurEmitted = 0;
109 NextInstKind = IDAlu;
110 }
111 }
112
113 if (!SU) {
114 // try to pick FETCH
115 SU = pickOther(IDFetch);
116 if (SU)
117 NextInstKind = IDFetch;
118 }
119
120 // try to pick other
121 if (!SU) {
122 SU = pickOther(IDOther);
123 if (SU)
124 NextInstKind = IDOther;
125 }
126
127 DEBUG(
128 if (SU) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000129 dbgs() << " ** Pick node **\n";
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000130 SU->dump(DAG);
131 } else {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000132 dbgs() << "NO NODE \n";
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000133 for (unsigned i = 0; i < DAG->SUnits.size(); i++) {
134 const SUnit &S = DAG->SUnits[i];
135 if (!S.isScheduled)
136 S.dump(DAG);
137 }
138 }
139 );
140
141 return SU;
142}
143
144void R600SchedStrategy::schedNode(SUnit *SU, bool IsTopNode) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000145 if (NextInstKind != CurInstKind) {
146 DEBUG(dbgs() << "Instruction Type Switch\n");
147 if (NextInstKind != IDAlu)
Vincent Lejeune77a83522013-06-29 19:32:43 +0000148 OccupedSlotsMask |= 31;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000149 CurEmitted = 0;
150 CurInstKind = NextInstKind;
151 }
152
153 if (CurInstKind == IDAlu) {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000154 AluInstCount ++;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000155 switch (getAluKind(SU)) {
156 case AluT_XYZW:
157 CurEmitted += 4;
158 break;
159 case AluDiscarded:
160 break;
161 default: {
162 ++CurEmitted;
163 for (MachineInstr::mop_iterator It = SU->getInstr()->operands_begin(),
164 E = SU->getInstr()->operands_end(); It != E; ++It) {
165 MachineOperand &MO = *It;
166 if (MO.isReg() && MO.getReg() == AMDGPU::ALU_LITERAL_X)
167 ++CurEmitted;
168 }
169 }
170 }
171 } else {
172 ++CurEmitted;
173 }
174
175
176 DEBUG(dbgs() << CurEmitted << " Instructions Emitted in this clause\n");
177
178 if (CurInstKind != IDFetch) {
179 MoveUnits(Pending[IDFetch], Available[IDFetch]);
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000180 } else
181 FetchInstCount++;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000182}
183
Vincent Lejeune4b5b8492013-06-05 20:27:35 +0000184static bool
185isPhysicalRegCopy(MachineInstr *MI) {
186 if (MI->getOpcode() != AMDGPU::COPY)
187 return false;
188
189 return !TargetRegisterInfo::isVirtualRegister(MI->getOperand(1).getReg());
190}
191
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000192void R600SchedStrategy::releaseTopNode(SUnit *SU) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000193 DEBUG(dbgs() << "Top Releasing ";SU->dump(DAG););
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000194}
195
196void R600SchedStrategy::releaseBottomNode(SUnit *SU) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000197 DEBUG(dbgs() << "Bottom Releasing ";SU->dump(DAG););
Vincent Lejeune4b5b8492013-06-05 20:27:35 +0000198 if (isPhysicalRegCopy(SU->getInstr())) {
199 PhysicalRegCopy.push_back(SU);
200 return;
201 }
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000202
203 int IK = getInstKind(SU);
Tom Stellardaad53762013-06-05 03:43:06 +0000204
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000205 // There is no export clause, we can schedule one as soon as its ready
206 if (IK == IDOther)
207 Available[IDOther].push_back(SU);
208 else
209 Pending[IK].push_back(SU);
210
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000211}
212
213bool R600SchedStrategy::regBelongsToClass(unsigned Reg,
214 const TargetRegisterClass *RC) const {
215 if (!TargetRegisterInfo::isVirtualRegister(Reg)) {
216 return RC->contains(Reg);
217 } else {
218 return MRI->getRegClass(Reg) == RC;
219 }
220}
221
222R600SchedStrategy::AluKind R600SchedStrategy::getAluKind(SUnit *SU) const {
223 MachineInstr *MI = SU->getInstr();
224
Vincent Lejeune77a83522013-06-29 19:32:43 +0000225 if (TII->isTransOnly(MI))
226 return AluTrans;
227
Matt Arsenault180e0d52016-06-22 01:53:49 +0000228 switch (MI->getOpcode()) {
229 case AMDGPU::PRED_X:
230 return AluPredX;
231 case AMDGPU::INTERP_PAIR_XY:
232 case AMDGPU::INTERP_PAIR_ZW:
233 case AMDGPU::INTERP_VEC_LOAD:
234 case AMDGPU::DOT_4:
235 return AluT_XYZW;
236 case AMDGPU::COPY:
237 if (MI->getOperand(1).isUndef()) {
238 // MI will become a KILL, don't considers it in scheduling
239 return AluDiscarded;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000240 }
Matt Arsenault180e0d52016-06-22 01:53:49 +0000241 default:
242 break;
243 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000244
Matt Arsenault180e0d52016-06-22 01:53:49 +0000245 // Does the instruction take a whole IG ?
246 // XXX: Is it possible to add a helper function in R600InstrInfo that can
247 // be used here and in R600PacketizerList::isSoloInstruction() ?
248 if(TII->isVector(*MI) ||
249 TII->isCubeOp(MI->getOpcode()) ||
250 TII->isReductionOp(MI->getOpcode()) ||
251 MI->getOpcode() == AMDGPU::GROUP_BARRIER) {
252 return AluT_XYZW;
253 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000254
Matt Arsenault180e0d52016-06-22 01:53:49 +0000255 if (TII->isLDSInstr(MI->getOpcode())) {
256 return AluT_X;
257 }
Tom Stellardc026e8b2013-06-28 15:47:08 +0000258
Matt Arsenault180e0d52016-06-22 01:53:49 +0000259 // Is the result already assigned to a channel ?
260 unsigned DestSubReg = MI->getOperand(0).getSubReg();
261 switch (DestSubReg) {
262 case AMDGPU::sub0:
263 return AluT_X;
264 case AMDGPU::sub1:
265 return AluT_Y;
266 case AMDGPU::sub2:
267 return AluT_Z;
268 case AMDGPU::sub3:
269 return AluT_W;
270 default:
271 break;
272 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000273
Matt Arsenault180e0d52016-06-22 01:53:49 +0000274 // Is the result already member of a X/Y/Z/W class ?
275 unsigned DestReg = MI->getOperand(0).getReg();
276 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_XRegClass) ||
277 regBelongsToClass(DestReg, &AMDGPU::R600_AddrRegClass))
278 return AluT_X;
279 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_YRegClass))
280 return AluT_Y;
281 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_ZRegClass))
282 return AluT_Z;
283 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_WRegClass))
284 return AluT_W;
285 if (regBelongsToClass(DestReg, &AMDGPU::R600_Reg128RegClass))
286 return AluT_XYZW;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000287
Matt Arsenault180e0d52016-06-22 01:53:49 +0000288 // LDS src registers cannot be used in the Trans slot.
289 if (TII->readsLDSSrcReg(MI))
290 return AluT_XYZW;
Tom Stellard7f6fa4c2013-09-12 02:55:06 +0000291
Matt Arsenault180e0d52016-06-22 01:53:49 +0000292 return AluAny;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000293}
294
295int R600SchedStrategy::getInstKind(SUnit* SU) {
296 int Opcode = SU->getInstr()->getOpcode();
297
Vincent Lejeunee958c8e2013-05-17 16:50:37 +0000298 if (TII->usesTextureCache(Opcode) || TII->usesVertexCache(Opcode))
299 return IDFetch;
300
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000301 if (TII->isALUInstr(Opcode)) {
302 return IDAlu;
303 }
304
305 switch (Opcode) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000306 case AMDGPU::PRED_X:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000307 case AMDGPU::COPY:
308 case AMDGPU::CONST_COPY:
309 case AMDGPU::INTERP_PAIR_XY:
310 case AMDGPU::INTERP_PAIR_ZW:
311 case AMDGPU::INTERP_VEC_LOAD:
Vincent Lejeune519f21e2013-05-17 16:50:32 +0000312 case AMDGPU::DOT_4:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000313 return IDAlu;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000314 default:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000315 return IDOther;
316 }
317}
318
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000319SUnit *R600SchedStrategy::PopInst(std::vector<SUnit *> &Q, bool AnyALU) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000320 if (Q.empty())
Craig Topper062a2ba2014-04-25 05:30:21 +0000321 return nullptr;
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000322 for (std::vector<SUnit *>::reverse_iterator It = Q.rbegin(), E = Q.rend();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000323 It != E; ++It) {
324 SUnit *SU = *It;
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000325 InstructionsGroupCandidate.push_back(SU->getInstr());
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000326 if (TII->fitsConstReadLimitations(InstructionsGroupCandidate)
327 && (!AnyALU || !TII->isVectorOnly(SU->getInstr()))
328 ) {
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000329 InstructionsGroupCandidate.pop_back();
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000330 Q.erase((It + 1).base());
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000331 return SU;
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000332 } else {
333 InstructionsGroupCandidate.pop_back();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000334 }
335 }
Craig Topper062a2ba2014-04-25 05:30:21 +0000336 return nullptr;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000337}
338
339void R600SchedStrategy::LoadAlu() {
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000340 std::vector<SUnit *> &QSrc = Pending[IDAlu];
341 for (unsigned i = 0, e = QSrc.size(); i < e; ++i) {
342 AluKind AK = getAluKind(QSrc[i]);
343 AvailableAlus[AK].push_back(QSrc[i]);
344 }
345 QSrc.clear();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000346}
347
348void R600SchedStrategy::PrepareNextSlot() {
349 DEBUG(dbgs() << "New Slot\n");
350 assert (OccupedSlotsMask && "Slot wasn't filled");
351 OccupedSlotsMask = 0;
Matt Arsenault43e92fe2016-06-24 06:30:11 +0000352// if (HwGen == R600Subtarget::NORTHERN_ISLANDS)
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000353// OccupedSlotsMask |= 16;
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000354 InstructionsGroupCandidate.clear();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000355 LoadAlu();
356}
357
358void R600SchedStrategy::AssignSlot(MachineInstr* MI, unsigned Slot) {
Tom Stellardc026e8b2013-06-28 15:47:08 +0000359 int DstIndex = TII->getOperandIdx(MI->getOpcode(), AMDGPU::OpName::dst);
360 if (DstIndex == -1) {
361 return;
362 }
363 unsigned DestReg = MI->getOperand(DstIndex).getReg();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000364 // PressureRegister crashes if an operand is def and used in the same inst
365 // and we try to constraint its regclass
366 for (MachineInstr::mop_iterator It = MI->operands_begin(),
367 E = MI->operands_end(); It != E; ++It) {
368 MachineOperand &MO = *It;
369 if (MO.isReg() && !MO.isDef() &&
Tom Stellardc026e8b2013-06-28 15:47:08 +0000370 MO.getReg() == DestReg)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000371 return;
372 }
373 // Constrains the regclass of DestReg to assign it to Slot
374 switch (Slot) {
375 case 0:
376 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_XRegClass);
377 break;
378 case 1:
379 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_YRegClass);
380 break;
381 case 2:
382 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_ZRegClass);
383 break;
384 case 3:
385 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_WRegClass);
386 break;
387 }
388}
389
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000390SUnit *R600SchedStrategy::AttemptFillSlot(unsigned Slot, bool AnyAlu) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000391 static const AluKind IndexToID[] = {AluT_X, AluT_Y, AluT_Z, AluT_W};
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000392 SUnit *SlotedSU = PopInst(AvailableAlus[IndexToID[Slot]], AnyAlu);
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000393 if (SlotedSU)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000394 return SlotedSU;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000395 SUnit *UnslotedSU = PopInst(AvailableAlus[AluAny], AnyAlu);
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000396 if (UnslotedSU)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000397 AssignSlot(UnslotedSU->getInstr(), Slot);
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000398 return UnslotedSU;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000399}
400
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000401unsigned R600SchedStrategy::AvailablesAluCount() const {
402 return AvailableAlus[AluAny].size() + AvailableAlus[AluT_XYZW].size() +
403 AvailableAlus[AluT_X].size() + AvailableAlus[AluT_Y].size() +
404 AvailableAlus[AluT_Z].size() + AvailableAlus[AluT_W].size() +
Vincent Lejeune77a83522013-06-29 19:32:43 +0000405 AvailableAlus[AluTrans].size() + AvailableAlus[AluDiscarded].size() +
406 AvailableAlus[AluPredX].size();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000407}
408
409SUnit* R600SchedStrategy::pickAlu() {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000410 while (AvailablesAluCount() || !Pending[IDAlu].empty()) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000411 if (!OccupedSlotsMask) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000412 // Bottom up scheduling : predX must comes first
413 if (!AvailableAlus[AluPredX].empty()) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000414 OccupedSlotsMask |= 31;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000415 return PopInst(AvailableAlus[AluPredX], false);
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000416 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000417 // Flush physical reg copies (RA will discard them)
418 if (!AvailableAlus[AluDiscarded].empty()) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000419 OccupedSlotsMask |= 31;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000420 return PopInst(AvailableAlus[AluDiscarded], false);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000421 }
422 // If there is a T_XYZW alu available, use it
423 if (!AvailableAlus[AluT_XYZW].empty()) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000424 OccupedSlotsMask |= 15;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000425 return PopInst(AvailableAlus[AluT_XYZW], false);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000426 }
427 }
Vincent Lejeune77a83522013-06-29 19:32:43 +0000428 bool TransSlotOccuped = OccupedSlotsMask & 16;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000429 if (!TransSlotOccuped && VLIW5) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000430 if (!AvailableAlus[AluTrans].empty()) {
431 OccupedSlotsMask |= 16;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000432 return PopInst(AvailableAlus[AluTrans], false);
433 }
434 SUnit *SU = AttemptFillSlot(3, true);
435 if (SU) {
436 OccupedSlotsMask |= 16;
437 return SU;
Vincent Lejeune77a83522013-06-29 19:32:43 +0000438 }
439 }
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000440 for (int Chan = 3; Chan > -1; --Chan) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000441 bool isOccupied = OccupedSlotsMask & (1 << Chan);
442 if (!isOccupied) {
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000443 SUnit *SU = AttemptFillSlot(Chan, false);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000444 if (SU) {
445 OccupedSlotsMask |= (1 << Chan);
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000446 InstructionsGroupCandidate.push_back(SU->getInstr());
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000447 return SU;
448 }
449 }
450 }
451 PrepareNextSlot();
452 }
Craig Topper062a2ba2014-04-25 05:30:21 +0000453 return nullptr;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000454}
455
456SUnit* R600SchedStrategy::pickOther(int QID) {
Craig Topper062a2ba2014-04-25 05:30:21 +0000457 SUnit *SU = nullptr;
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000458 std::vector<SUnit *> &AQ = Available[QID];
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000459
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000460 if (AQ.empty()) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000461 MoveUnits(Pending[QID], AQ);
462 }
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000463 if (!AQ.empty()) {
464 SU = AQ.back();
465 AQ.resize(AQ.size() - 1);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000466 }
467 return SU;
468}