blob: d782713cab655c2c053406fecb877bec707a8725 [file] [log] [blame]
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +00001//===-- R600MachineScheduler.cpp - R600 Scheduler Interface -*- C++ -*-----===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// \brief R600 Machine Scheduler interface
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000012//
13//===----------------------------------------------------------------------===//
14
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000015#include "R600MachineScheduler.h"
Tom Stellard2e59a452014-06-13 01:32:00 +000016#include "AMDGPUSubtarget.h"
Benjamin Kramerd78bb462013-05-23 17:10:37 +000017#include "llvm/CodeGen/MachineRegisterInfo.h"
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000018#include "llvm/Pass.h"
19#include "llvm/PassManager.h"
NAKAMURA Takumi756cf882013-03-11 08:19:28 +000020#include "llvm/Support/raw_ostream.h"
NAKAMURA Takumi756cf882013-03-11 08:19:28 +000021
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000022using namespace llvm;
23
Chandler Carruth84e68b22014-04-22 02:41:26 +000024#define DEBUG_TYPE "misched"
25
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000026void R600SchedStrategy::initialize(ScheduleDAGMI *dag) {
Andrew Trickd7f890e2013-12-28 21:56:47 +000027 assert(dag->hasVRegLiveness() && "R600SchedStrategy needs vreg liveness");
28 DAG = static_cast<ScheduleDAGMILive*>(dag);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000029 TII = static_cast<const R600InstrInfo*>(DAG->TII);
30 TRI = static_cast<const R600RegisterInfo*>(DAG->TRI);
Vincent Lejeune7e2c8322013-09-04 19:53:46 +000031 VLIW5 = !DAG->MF.getTarget().getSubtarget<AMDGPUSubtarget>().hasCaymanISA();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000032 MRI = &DAG->MRI;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000033 CurInstKind = IDOther;
34 CurEmitted = 0;
Vincent Lejeune77a83522013-06-29 19:32:43 +000035 OccupedSlotsMask = 31;
Vincent Lejeune80031d9f2013-04-03 16:49:34 +000036 InstKindLimit[IDAlu] = TII->getMaxAlusPerClause();
Vincent Lejeune3d5118c2013-05-17 16:50:56 +000037 InstKindLimit[IDOther] = 32;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000038
39 const AMDGPUSubtarget &ST = DAG->TM.getSubtarget<AMDGPUSubtarget>();
Vincent Lejeunef9f4e1e2013-05-17 16:49:55 +000040 InstKindLimit[IDFetch] = ST.getTexVTXClauseSize();
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000041 AluInstCount = 0;
42 FetchInstCount = 0;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000043}
44
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000045void R600SchedStrategy::MoveUnits(std::vector<SUnit *> &QSrc,
46 std::vector<SUnit *> &QDst)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000047{
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000048 QDst.insert(QDst.end(), QSrc.begin(), QSrc.end());
49 QSrc.clear();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000050}
51
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000052static
53unsigned getWFCountLimitedByGPR(unsigned GPRCount) {
54 assert (GPRCount && "GPRCount cannot be 0");
55 return 248 / GPRCount;
56}
57
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000058SUnit* R600SchedStrategy::pickNode(bool &IsTopNode) {
Craig Topper062a2ba2014-04-25 05:30:21 +000059 SUnit *SU = nullptr;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000060 NextInstKind = IDOther;
61
Vincent Lejeune3d5118c2013-05-17 16:50:56 +000062 IsTopNode = false;
63
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000064 // check if we might want to switch current clause type
Vincent Lejeune3d5118c2013-05-17 16:50:56 +000065 bool AllowSwitchToAlu = (CurEmitted >= InstKindLimit[CurInstKind]) ||
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000066 (Available[CurInstKind].empty());
Vincent Lejeunef9f4e1e2013-05-17 16:49:55 +000067 bool AllowSwitchFromAlu = (CurEmitted >= InstKindLimit[CurInstKind]) &&
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +000068 (!Available[IDFetch].empty() || !Available[IDOther].empty());
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +000069
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000070 if (CurInstKind == IDAlu && !Available[IDFetch].empty()) {
71 // We use the heuristic provided by AMD Accelerated Parallel Processing
72 // OpenCL Programming Guide :
73 // The approx. number of WF that allows TEX inst to hide ALU inst is :
74 // 500 (cycles for TEX) / (AluFetchRatio * 8 (cycles for ALU))
Andrew Trickd7f890e2013-12-28 21:56:47 +000075 float ALUFetchRationEstimate =
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000076 (AluInstCount + AvailablesAluCount() + Pending[IDAlu].size()) /
77 (FetchInstCount + Available[IDFetch].size());
Alexey Samsonovcce57012014-09-17 17:47:21 +000078 if (ALUFetchRationEstimate == 0) {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000079 AllowSwitchFromAlu = true;
Alexey Samsonovcce57012014-09-17 17:47:21 +000080 } else {
81 unsigned NeededWF = 62.5f / ALUFetchRationEstimate;
82 DEBUG( dbgs() << NeededWF << " approx. Wavefronts Required\n" );
83 // We assume the local GPR requirements to be "dominated" by the requirement
84 // of the TEX clause (which consumes 128 bits regs) ; ALU inst before and
85 // after TEX are indeed likely to consume or generate values from/for the
86 // TEX clause.
87 // Available[IDFetch].size() * 2 : GPRs required in the Fetch clause
88 // We assume that fetch instructions are either TnXYZW = TEX TnXYZW (need
89 // one GPR) or TmXYZW = TnXYZW (need 2 GPR).
90 // (TODO : use RegisterPressure)
91 // If we are going too use too many GPR, we flush Fetch instruction to lower
92 // register pressure on 128 bits regs.
93 unsigned NearRegisterRequirement = 2 * Available[IDFetch].size();
94 if (NeededWF > getWFCountLimitedByGPR(NearRegisterRequirement))
95 AllowSwitchFromAlu = true;
96 }
Vincent Lejeuned1a9d182013-06-07 23:30:34 +000097 }
98
Tom Stellardaad53762013-06-05 03:43:06 +000099 if (!SU && ((AllowSwitchToAlu && CurInstKind != IDAlu) ||
100 (!AllowSwitchFromAlu && CurInstKind == IDAlu))) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000101 // try to pick ALU
102 SU = pickAlu();
Vincent Lejeune4b5b8492013-06-05 20:27:35 +0000103 if (!SU && !PhysicalRegCopy.empty()) {
104 SU = PhysicalRegCopy.front();
105 PhysicalRegCopy.erase(PhysicalRegCopy.begin());
106 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000107 if (SU) {
Vincent Lejeunef9f4e1e2013-05-17 16:49:55 +0000108 if (CurEmitted >= InstKindLimit[IDAlu])
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000109 CurEmitted = 0;
110 NextInstKind = IDAlu;
111 }
112 }
113
114 if (!SU) {
115 // try to pick FETCH
116 SU = pickOther(IDFetch);
117 if (SU)
118 NextInstKind = IDFetch;
119 }
120
121 // try to pick other
122 if (!SU) {
123 SU = pickOther(IDOther);
124 if (SU)
125 NextInstKind = IDOther;
126 }
127
128 DEBUG(
129 if (SU) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000130 dbgs() << " ** Pick node **\n";
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000131 SU->dump(DAG);
132 } else {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000133 dbgs() << "NO NODE \n";
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000134 for (unsigned i = 0; i < DAG->SUnits.size(); i++) {
135 const SUnit &S = DAG->SUnits[i];
136 if (!S.isScheduled)
137 S.dump(DAG);
138 }
139 }
140 );
141
142 return SU;
143}
144
145void R600SchedStrategy::schedNode(SUnit *SU, bool IsTopNode) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000146 if (NextInstKind != CurInstKind) {
147 DEBUG(dbgs() << "Instruction Type Switch\n");
148 if (NextInstKind != IDAlu)
Vincent Lejeune77a83522013-06-29 19:32:43 +0000149 OccupedSlotsMask |= 31;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000150 CurEmitted = 0;
151 CurInstKind = NextInstKind;
152 }
153
154 if (CurInstKind == IDAlu) {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000155 AluInstCount ++;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000156 switch (getAluKind(SU)) {
157 case AluT_XYZW:
158 CurEmitted += 4;
159 break;
160 case AluDiscarded:
161 break;
162 default: {
163 ++CurEmitted;
164 for (MachineInstr::mop_iterator It = SU->getInstr()->operands_begin(),
165 E = SU->getInstr()->operands_end(); It != E; ++It) {
166 MachineOperand &MO = *It;
167 if (MO.isReg() && MO.getReg() == AMDGPU::ALU_LITERAL_X)
168 ++CurEmitted;
169 }
170 }
171 }
172 } else {
173 ++CurEmitted;
174 }
175
176
177 DEBUG(dbgs() << CurEmitted << " Instructions Emitted in this clause\n");
178
179 if (CurInstKind != IDFetch) {
180 MoveUnits(Pending[IDFetch], Available[IDFetch]);
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000181 } else
182 FetchInstCount++;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000183}
184
Vincent Lejeune4b5b8492013-06-05 20:27:35 +0000185static bool
186isPhysicalRegCopy(MachineInstr *MI) {
187 if (MI->getOpcode() != AMDGPU::COPY)
188 return false;
189
190 return !TargetRegisterInfo::isVirtualRegister(MI->getOperand(1).getReg());
191}
192
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000193void R600SchedStrategy::releaseTopNode(SUnit *SU) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000194 DEBUG(dbgs() << "Top Releasing ";SU->dump(DAG););
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000195}
196
197void R600SchedStrategy::releaseBottomNode(SUnit *SU) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000198 DEBUG(dbgs() << "Bottom Releasing ";SU->dump(DAG););
Vincent Lejeune4b5b8492013-06-05 20:27:35 +0000199 if (isPhysicalRegCopy(SU->getInstr())) {
200 PhysicalRegCopy.push_back(SU);
201 return;
202 }
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000203
204 int IK = getInstKind(SU);
Tom Stellardaad53762013-06-05 03:43:06 +0000205
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000206 // There is no export clause, we can schedule one as soon as its ready
207 if (IK == IDOther)
208 Available[IDOther].push_back(SU);
209 else
210 Pending[IK].push_back(SU);
211
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000212}
213
214bool R600SchedStrategy::regBelongsToClass(unsigned Reg,
215 const TargetRegisterClass *RC) const {
216 if (!TargetRegisterInfo::isVirtualRegister(Reg)) {
217 return RC->contains(Reg);
218 } else {
219 return MRI->getRegClass(Reg) == RC;
220 }
221}
222
223R600SchedStrategy::AluKind R600SchedStrategy::getAluKind(SUnit *SU) const {
224 MachineInstr *MI = SU->getInstr();
225
Vincent Lejeune77a83522013-06-29 19:32:43 +0000226 if (TII->isTransOnly(MI))
227 return AluTrans;
228
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000229 switch (MI->getOpcode()) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000230 case AMDGPU::PRED_X:
231 return AluPredX;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000232 case AMDGPU::INTERP_PAIR_XY:
233 case AMDGPU::INTERP_PAIR_ZW:
234 case AMDGPU::INTERP_VEC_LOAD:
Vincent Lejeune519f21e2013-05-17 16:50:32 +0000235 case AMDGPU::DOT_4:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000236 return AluT_XYZW;
237 case AMDGPU::COPY:
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000238 if (MI->getOperand(1).isUndef()) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000239 // MI will become a KILL, don't considers it in scheduling
240 return AluDiscarded;
241 }
242 default:
243 break;
244 }
245
246 // Does the instruction take a whole IG ?
Tom Stellardce540332013-06-28 15:46:59 +0000247 // XXX: Is it possible to add a helper function in R600InstrInfo that can
248 // be used here and in R600PacketizerList::isSoloInstruction() ?
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000249 if(TII->isVector(*MI) ||
250 TII->isCubeOp(MI->getOpcode()) ||
Tom Stellardce540332013-06-28 15:46:59 +0000251 TII->isReductionOp(MI->getOpcode()) ||
252 MI->getOpcode() == AMDGPU::GROUP_BARRIER) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000253 return AluT_XYZW;
Tom Stellardce540332013-06-28 15:46:59 +0000254 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000255
Tom Stellardc026e8b2013-06-28 15:47:08 +0000256 if (TII->isLDSInstr(MI->getOpcode())) {
257 return AluT_X;
258 }
259
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000260 // Is the result already assigned to a channel ?
261 unsigned DestSubReg = MI->getOperand(0).getSubReg();
262 switch (DestSubReg) {
263 case AMDGPU::sub0:
264 return AluT_X;
265 case AMDGPU::sub1:
266 return AluT_Y;
267 case AMDGPU::sub2:
268 return AluT_Z;
269 case AMDGPU::sub3:
270 return AluT_W;
271 default:
272 break;
273 }
274
275 // Is the result already member of a X/Y/Z/W class ?
276 unsigned DestReg = MI->getOperand(0).getReg();
277 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_XRegClass) ||
278 regBelongsToClass(DestReg, &AMDGPU::R600_AddrRegClass))
279 return AluT_X;
280 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_YRegClass))
281 return AluT_Y;
282 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_ZRegClass))
283 return AluT_Z;
284 if (regBelongsToClass(DestReg, &AMDGPU::R600_TReg32_WRegClass))
285 return AluT_W;
286 if (regBelongsToClass(DestReg, &AMDGPU::R600_Reg128RegClass))
287 return AluT_XYZW;
288
Tom Stellard7f6fa4c2013-09-12 02:55:06 +0000289 // LDS src registers cannot be used in the Trans slot.
290 if (TII->readsLDSSrcReg(MI))
291 return AluT_XYZW;
292
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000293 return AluAny;
294
295}
296
297int R600SchedStrategy::getInstKind(SUnit* SU) {
298 int Opcode = SU->getInstr()->getOpcode();
299
Vincent Lejeunee958c8e2013-05-17 16:50:37 +0000300 if (TII->usesTextureCache(Opcode) || TII->usesVertexCache(Opcode))
301 return IDFetch;
302
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000303 if (TII->isALUInstr(Opcode)) {
304 return IDAlu;
305 }
306
307 switch (Opcode) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000308 case AMDGPU::PRED_X:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000309 case AMDGPU::COPY:
310 case AMDGPU::CONST_COPY:
311 case AMDGPU::INTERP_PAIR_XY:
312 case AMDGPU::INTERP_PAIR_ZW:
313 case AMDGPU::INTERP_VEC_LOAD:
Vincent Lejeune519f21e2013-05-17 16:50:32 +0000314 case AMDGPU::DOT_4:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000315 return IDAlu;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000316 default:
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000317 return IDOther;
318 }
319}
320
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000321SUnit *R600SchedStrategy::PopInst(std::vector<SUnit *> &Q, bool AnyALU) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000322 if (Q.empty())
Craig Topper062a2ba2014-04-25 05:30:21 +0000323 return nullptr;
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000324 for (std::vector<SUnit *>::reverse_iterator It = Q.rbegin(), E = Q.rend();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000325 It != E; ++It) {
326 SUnit *SU = *It;
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000327 InstructionsGroupCandidate.push_back(SU->getInstr());
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000328 if (TII->fitsConstReadLimitations(InstructionsGroupCandidate)
329 && (!AnyALU || !TII->isVectorOnly(SU->getInstr()))
330 ) {
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000331 InstructionsGroupCandidate.pop_back();
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000332 Q.erase((It + 1).base());
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000333 return SU;
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000334 } else {
335 InstructionsGroupCandidate.pop_back();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000336 }
337 }
Craig Topper062a2ba2014-04-25 05:30:21 +0000338 return nullptr;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000339}
340
341void R600SchedStrategy::LoadAlu() {
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000342 std::vector<SUnit *> &QSrc = Pending[IDAlu];
343 for (unsigned i = 0, e = QSrc.size(); i < e; ++i) {
344 AluKind AK = getAluKind(QSrc[i]);
345 AvailableAlus[AK].push_back(QSrc[i]);
346 }
347 QSrc.clear();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000348}
349
350void R600SchedStrategy::PrepareNextSlot() {
351 DEBUG(dbgs() << "New Slot\n");
352 assert (OccupedSlotsMask && "Slot wasn't filled");
353 OccupedSlotsMask = 0;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000354// if (HwGen == AMDGPUSubtarget::NORTHERN_ISLANDS)
355// OccupedSlotsMask |= 16;
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000356 InstructionsGroupCandidate.clear();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000357 LoadAlu();
358}
359
360void R600SchedStrategy::AssignSlot(MachineInstr* MI, unsigned Slot) {
Tom Stellardc026e8b2013-06-28 15:47:08 +0000361 int DstIndex = TII->getOperandIdx(MI->getOpcode(), AMDGPU::OpName::dst);
362 if (DstIndex == -1) {
363 return;
364 }
365 unsigned DestReg = MI->getOperand(DstIndex).getReg();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000366 // PressureRegister crashes if an operand is def and used in the same inst
367 // and we try to constraint its regclass
368 for (MachineInstr::mop_iterator It = MI->operands_begin(),
369 E = MI->operands_end(); It != E; ++It) {
370 MachineOperand &MO = *It;
371 if (MO.isReg() && !MO.isDef() &&
Tom Stellardc026e8b2013-06-28 15:47:08 +0000372 MO.getReg() == DestReg)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000373 return;
374 }
375 // Constrains the regclass of DestReg to assign it to Slot
376 switch (Slot) {
377 case 0:
378 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_XRegClass);
379 break;
380 case 1:
381 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_YRegClass);
382 break;
383 case 2:
384 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_ZRegClass);
385 break;
386 case 3:
387 MRI->constrainRegClass(DestReg, &AMDGPU::R600_TReg32_WRegClass);
388 break;
389 }
390}
391
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000392SUnit *R600SchedStrategy::AttemptFillSlot(unsigned Slot, bool AnyAlu) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000393 static const AluKind IndexToID[] = {AluT_X, AluT_Y, AluT_Z, AluT_W};
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000394 SUnit *SlotedSU = PopInst(AvailableAlus[IndexToID[Slot]], AnyAlu);
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000395 if (SlotedSU)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000396 return SlotedSU;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000397 SUnit *UnslotedSU = PopInst(AvailableAlus[AluAny], AnyAlu);
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000398 if (UnslotedSU)
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000399 AssignSlot(UnslotedSU->getInstr(), Slot);
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000400 return UnslotedSU;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000401}
402
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000403unsigned R600SchedStrategy::AvailablesAluCount() const {
404 return AvailableAlus[AluAny].size() + AvailableAlus[AluT_XYZW].size() +
405 AvailableAlus[AluT_X].size() + AvailableAlus[AluT_Y].size() +
406 AvailableAlus[AluT_Z].size() + AvailableAlus[AluT_W].size() +
Vincent Lejeune77a83522013-06-29 19:32:43 +0000407 AvailableAlus[AluTrans].size() + AvailableAlus[AluDiscarded].size() +
408 AvailableAlus[AluPredX].size();
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000409}
410
411SUnit* R600SchedStrategy::pickAlu() {
Vincent Lejeuned1a9d182013-06-07 23:30:34 +0000412 while (AvailablesAluCount() || !Pending[IDAlu].empty()) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000413 if (!OccupedSlotsMask) {
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000414 // Bottom up scheduling : predX must comes first
415 if (!AvailableAlus[AluPredX].empty()) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000416 OccupedSlotsMask |= 31;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000417 return PopInst(AvailableAlus[AluPredX], false);
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000418 }
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000419 // Flush physical reg copies (RA will discard them)
420 if (!AvailableAlus[AluDiscarded].empty()) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000421 OccupedSlotsMask |= 31;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000422 return PopInst(AvailableAlus[AluDiscarded], false);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000423 }
424 // If there is a T_XYZW alu available, use it
425 if (!AvailableAlus[AluT_XYZW].empty()) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000426 OccupedSlotsMask |= 15;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000427 return PopInst(AvailableAlus[AluT_XYZW], false);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000428 }
429 }
Vincent Lejeune77a83522013-06-29 19:32:43 +0000430 bool TransSlotOccuped = OccupedSlotsMask & 16;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000431 if (!TransSlotOccuped && VLIW5) {
Vincent Lejeune77a83522013-06-29 19:32:43 +0000432 if (!AvailableAlus[AluTrans].empty()) {
433 OccupedSlotsMask |= 16;
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000434 return PopInst(AvailableAlus[AluTrans], false);
435 }
436 SUnit *SU = AttemptFillSlot(3, true);
437 if (SU) {
438 OccupedSlotsMask |= 16;
439 return SU;
Vincent Lejeune77a83522013-06-29 19:32:43 +0000440 }
441 }
Vincent Lejeune3d5118c2013-05-17 16:50:56 +0000442 for (int Chan = 3; Chan > -1; --Chan) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000443 bool isOccupied = OccupedSlotsMask & (1 << Chan);
444 if (!isOccupied) {
Vincent Lejeune7e2c8322013-09-04 19:53:46 +0000445 SUnit *SU = AttemptFillSlot(Chan, false);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000446 if (SU) {
447 OccupedSlotsMask |= (1 << Chan);
Vincent Lejeune0a22bc42013-03-14 15:50:45 +0000448 InstructionsGroupCandidate.push_back(SU->getInstr());
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000449 return SU;
450 }
451 }
452 }
453 PrepareNextSlot();
454 }
Craig Topper062a2ba2014-04-25 05:30:21 +0000455 return nullptr;
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000456}
457
458SUnit* R600SchedStrategy::pickOther(int QID) {
Craig Topper062a2ba2014-04-25 05:30:21 +0000459 SUnit *SU = nullptr;
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000460 std::vector<SUnit *> &AQ = Available[QID];
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000461
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000462 if (AQ.empty()) {
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000463 MoveUnits(Pending[QID], AQ);
464 }
Vincent Lejeune4c81d4d2013-05-17 16:50:44 +0000465 if (!AQ.empty()) {
466 SU = AQ.back();
467 AQ.resize(AQ.size() - 1);
Vincent Lejeune68b6b6d2013-03-05 18:41:32 +0000468 }
469 return SU;
470}