blob: 674671962433f366a92813bc6bea4d68f99ea143 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/bit-vector.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006#include "src/compiler/instruction.h"
7#include "src/compiler/register-allocator-verifier.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013namespace {
14
15size_t OperandCount(const Instruction* instr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
17}
18
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020void VerifyEmptyGaps(const Instruction* instr) {
21 for (int i = Instruction::FIRST_GAP_POSITION;
22 i <= Instruction::LAST_GAP_POSITION; i++) {
23 Instruction::GapPosition inner_pos =
24 static_cast<Instruction::GapPosition>(i);
25 CHECK(instr->GetParallelMove(inner_pos) == nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 }
27}
28
29
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030void VerifyAllocatedGaps(const Instruction* instr) {
31 for (int i = Instruction::FIRST_GAP_POSITION;
32 i <= Instruction::LAST_GAP_POSITION; i++) {
33 Instruction::GapPosition inner_pos =
34 static_cast<Instruction::GapPosition>(i);
Ben Murdochda12d292016-06-02 14:46:10 +010035 const ParallelMove* moves = instr->GetParallelMove(inner_pos);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 if (moves == nullptr) continue;
Ben Murdochda12d292016-06-02 14:46:10 +010037 for (const MoveOperands* move : *moves) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038 if (move->IsRedundant()) continue;
39 CHECK(move->source().IsAllocated() || move->source().IsConstant());
40 CHECK(move->destination().IsAllocated());
41 }
42 }
43}
44
45} // namespace
46
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047RegisterAllocatorVerifier::RegisterAllocatorVerifier(
48 Zone* zone, const RegisterConfiguration* config,
49 const InstructionSequence* sequence)
Ben Murdochc5610432016-08-08 18:44:38 +010050 : zone_(zone),
51 config_(config),
52 sequence_(sequence),
53 constraints_(zone),
54 assessments_(zone),
55 outstanding_assessments_(zone) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 constraints_.reserve(sequence->instructions().size());
57 // TODO(dcarney): model unique constraints.
58 // Construct OperandConstraints for all InstructionOperands, eliminating
59 // kSameAsFirst along the way.
Ben Murdochda12d292016-06-02 14:46:10 +010060 for (const Instruction* instr : sequence->instructions()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 // All gaps should be totally unallocated at this point.
62 VerifyEmptyGaps(instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040063 const size_t operand_count = OperandCount(instr);
Ben Murdochda12d292016-06-02 14:46:10 +010064 OperandConstraint* op_constraints =
65 zone->NewArray<OperandConstraint>(operand_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040066 size_t count = 0;
67 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
68 BuildConstraint(instr->InputAt(i), &op_constraints[count]);
69 VerifyInput(op_constraints[count]);
70 }
71 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
72 BuildConstraint(instr->TempAt(i), &op_constraints[count]);
73 VerifyTemp(op_constraints[count]);
74 }
75 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
76 BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
77 if (op_constraints[count].type_ == kSameAsFirst) {
78 CHECK(instr->InputCount() > 0);
79 op_constraints[count].type_ = op_constraints[0].type_;
80 op_constraints[count].value_ = op_constraints[0].value_;
81 }
82 VerifyOutput(op_constraints[count]);
83 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040084 InstructionConstraint instr_constraint = {instr, operand_count,
85 op_constraints};
86 constraints()->push_back(instr_constraint);
87 }
88}
89
Ben Murdochc5610432016-08-08 18:44:38 +010090void RegisterAllocatorVerifier::VerifyInput(
91 const OperandConstraint& constraint) {
92 CHECK_NE(kSameAsFirst, constraint.type_);
93 if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
94 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
95 constraint.virtual_register_);
96 }
97}
98
99void RegisterAllocatorVerifier::VerifyTemp(
100 const OperandConstraint& constraint) {
101 CHECK_NE(kSameAsFirst, constraint.type_);
102 CHECK_NE(kImmediate, constraint.type_);
103 CHECK_NE(kExplicit, constraint.type_);
104 CHECK_NE(kConstant, constraint.type_);
105}
106
107void RegisterAllocatorVerifier::VerifyOutput(
108 const OperandConstraint& constraint) {
109 CHECK_NE(kImmediate, constraint.type_);
110 CHECK_NE(kExplicit, constraint.type_);
111 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
112 constraint.virtual_register_);
113}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400114
115void RegisterAllocatorVerifier::VerifyAssignment() {
116 CHECK(sequence()->instructions().size() == constraints()->size());
117 auto instr_it = sequence()->begin();
118 for (const auto& instr_constraint : *constraints()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100119 const Instruction* instr = instr_constraint.instruction_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000120 // All gaps should be totally allocated at this point.
121 VerifyAllocatedGaps(instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400122 const size_t operand_count = instr_constraint.operand_constaints_size_;
Ben Murdochda12d292016-06-02 14:46:10 +0100123 const OperandConstraint* op_constraints =
124 instr_constraint.operand_constraints_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400125 CHECK_EQ(instr, *instr_it);
126 CHECK(operand_count == OperandCount(instr));
127 size_t count = 0;
128 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
129 CheckConstraint(instr->InputAt(i), &op_constraints[count]);
130 }
131 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
132 CheckConstraint(instr->TempAt(i), &op_constraints[count]);
133 }
134 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
135 CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
136 }
137 ++instr_it;
138 }
139}
140
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400141void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
142 OperandConstraint* constraint) {
143 constraint->value_ = kMinInt;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000144 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400145 if (op->IsConstant()) {
146 constraint->type_ = kConstant;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000147 constraint->value_ = ConstantOperand::cast(op)->virtual_register();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400148 constraint->virtual_register_ = constraint->value_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000149 } else if (op->IsExplicit()) {
150 constraint->type_ = kExplicit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400151 } else if (op->IsImmediate()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100152 const ImmediateOperand* imm = ImmediateOperand::cast(op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000153 int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
154 : imm->indexed_value();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400155 constraint->type_ = kImmediate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156 constraint->value_ = value;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400157 } else {
158 CHECK(op->IsUnallocated());
Ben Murdochda12d292016-06-02 14:46:10 +0100159 const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400160 int vreg = unallocated->virtual_register();
161 constraint->virtual_register_ = vreg;
162 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000163 constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400164 constraint->value_ = unallocated->fixed_slot_index();
165 } else {
166 switch (unallocated->extended_policy()) {
167 case UnallocatedOperand::ANY:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400168 case UnallocatedOperand::NONE:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 if (sequence()->IsFloat(vreg)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400170 constraint->type_ = kNoneDouble;
171 } else {
172 constraint->type_ = kNone;
173 }
174 break;
175 case UnallocatedOperand::FIXED_REGISTER:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000176 if (unallocated->HasSecondaryStorage()) {
177 constraint->type_ = kRegisterAndSlot;
178 constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
179 } else {
180 constraint->type_ = kFixedRegister;
181 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400182 constraint->value_ = unallocated->fixed_register_index();
183 break;
184 case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
185 constraint->type_ = kFixedDoubleRegister;
186 constraint->value_ = unallocated->fixed_register_index();
187 break;
188 case UnallocatedOperand::MUST_HAVE_REGISTER:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 if (sequence()->IsFloat(vreg)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400190 constraint->type_ = kDoubleRegister;
191 } else {
192 constraint->type_ = kRegister;
193 }
194 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 case UnallocatedOperand::MUST_HAVE_SLOT:
196 constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot;
197 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400198 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
199 constraint->type_ = kSameAsFirst;
200 break;
201 }
202 }
203 }
204}
205
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400206void RegisterAllocatorVerifier::CheckConstraint(
207 const InstructionOperand* op, const OperandConstraint* constraint) {
208 switch (constraint->type_) {
209 case kConstant:
210 CHECK(op->IsConstant());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000211 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
212 constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400213 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000214 case kImmediate: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400215 CHECK(op->IsImmediate());
Ben Murdochda12d292016-06-02 14:46:10 +0100216 const ImmediateOperand* imm = ImmediateOperand::cast(op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217 int value = imm->type() == ImmediateOperand::INLINE
218 ? imm->inline_value()
219 : imm->indexed_value();
220 CHECK_EQ(value, constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400221 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400223 case kRegister:
224 CHECK(op->IsRegister());
225 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400226 case kDoubleRegister:
Ben Murdochc5610432016-08-08 18:44:38 +0100227 CHECK(op->IsFPRegister());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400228 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000229 case kExplicit:
230 CHECK(op->IsExplicit());
231 return;
232 case kFixedRegister:
233 case kRegisterAndSlot:
234 CHECK(op->IsRegister());
235 CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(),
236 constraint->value_);
237 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400238 case kFixedDoubleRegister:
Ben Murdochc5610432016-08-08 18:44:38 +0100239 CHECK(op->IsFPRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 CHECK_EQ(LocationOperand::cast(op)->GetDoubleRegister().code(),
241 constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400242 return;
243 case kFixedSlot:
244 CHECK(op->IsStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
246 return;
247 case kSlot:
248 CHECK(op->IsStackSlot());
249 return;
250 case kDoubleSlot:
Ben Murdochc5610432016-08-08 18:44:38 +0100251 CHECK(op->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400252 return;
253 case kNone:
254 CHECK(op->IsRegister() || op->IsStackSlot());
255 return;
256 case kNoneDouble:
Ben Murdochc5610432016-08-08 18:44:38 +0100257 CHECK(op->IsFPRegister() || op->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400258 return;
259 case kSameAsFirst:
260 CHECK(false);
261 return;
262 }
263}
264
Ben Murdochc5610432016-08-08 18:44:38 +0100265void BlockAssessments::PerformMoves(const Instruction* instruction) {
266 const ParallelMove* first =
267 instruction->GetParallelMove(Instruction::GapPosition::START);
268 PerformParallelMoves(first);
269 const ParallelMove* last =
270 instruction->GetParallelMove(Instruction::GapPosition::END);
271 PerformParallelMoves(last);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400272}
273
Ben Murdochc5610432016-08-08 18:44:38 +0100274void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
275 if (moves == nullptr) return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400276
Ben Murdochc5610432016-08-08 18:44:38 +0100277 CHECK(map_for_moves_.empty());
278 for (MoveOperands* move : *moves) {
279 if (move->IsEliminated() || move->IsRedundant()) continue;
280 auto it = map_.find(move->source());
281 // The RHS of a parallel move should have been already assessed.
282 CHECK(it != map_.end());
283 // The LHS of a parallel move should not have been assigned in this
284 // parallel move.
285 CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
286 // Copy the assessment to the destination.
287 map_for_moves_[move->destination()] = it->second;
288 }
289 for (auto pair : map_for_moves_) {
290 map_[pair.first] = pair.second;
291 }
292 map_for_moves_.clear();
293}
294
295void BlockAssessments::DropRegisters() {
296 for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
297 auto current = iterator;
298 ++iterator;
299 InstructionOperand op = current->first;
300 if (op.IsAnyRegister()) map().erase(current);
301 }
302}
303
304BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
305 const InstructionBlock* block) {
306 RpoNumber current_block_id = block->rpo_number();
307
308 BlockAssessments* ret = new (zone()) BlockAssessments(zone());
309 if (block->PredecessorCount() == 0) {
310 // TODO(mtrofin): the following check should hold, however, in certain
311 // unit tests it is invalidated by the last block. Investigate and
312 // normalize the CFG.
313 // CHECK(current_block_id.ToInt() == 0);
314 // The phi size test below is because we can, technically, have phi
315 // instructions with one argument. Some tests expose that, too.
316 } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
317 const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
318 ret->CopyFrom(prev_block);
319 } else {
320 for (RpoNumber pred_id : block->predecessors()) {
321 // For every operand coming from any of the predecessors, create an
322 // Unfinalized assessment.
323 auto iterator = assessments_.find(pred_id);
324 if (iterator == assessments_.end()) {
325 // This block is the head of a loop, and this predecessor is the
326 // loopback
327 // arc.
328 // Validate this is a loop case, otherwise the CFG is malformed.
329 CHECK(pred_id >= current_block_id);
330 CHECK(block->IsLoopHeader());
331 continue;
332 }
333 const BlockAssessments* pred_assessments = iterator->second;
334 CHECK_NOT_NULL(pred_assessments);
335 for (auto pair : pred_assessments->map()) {
336 InstructionOperand operand = pair.first;
337 if (ret->map().find(operand) == ret->map().end()) {
338 ret->map().insert(std::make_pair(
339 operand, new (zone()) PendingAssessment(block, operand)));
340 }
341 }
342 }
343 }
344 return ret;
345}
346
347void RegisterAllocatorVerifier::ValidatePendingAssessment(
348 RpoNumber block_id, InstructionOperand op,
349 BlockAssessments* current_assessments, const PendingAssessment* assessment,
350 int virtual_register) {
351 // When validating a pending assessment, it is possible some of the
352 // assessments
353 // for the original operand (the one where the assessment was created for
354 // first) are also pending. To avoid recursion, we use a work list. To
355 // deal with cycles, we keep a set of seen nodes.
356 ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(zone());
357 ZoneSet<RpoNumber> seen(zone());
358 worklist.push(std::make_pair(assessment, virtual_register));
359 seen.insert(block_id);
360
361 while (!worklist.empty()) {
362 auto work = worklist.front();
363 const PendingAssessment* current_assessment = work.first;
364 int current_virtual_register = work.second;
365 InstructionOperand current_operand = current_assessment->operand();
366 worklist.pop();
367
368 const InstructionBlock* origin = current_assessment->origin();
369 CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
370
371 // Check if the virtual register is a phi first, instead of relying on
372 // the incoming assessments. In particular, this handles the case
373 // v1 = phi v0 v0, which structurally is identical to v0 having been
374 // defined at the top of a diamond, and arriving at the node joining the
375 // diamond's branches.
376 const PhiInstruction* phi = nullptr;
377 for (const PhiInstruction* candidate : origin->phis()) {
378 if (candidate->virtual_register() == current_virtual_register) {
379 phi = candidate;
380 break;
381 }
382 }
383
384 int op_index = 0;
385 for (RpoNumber pred : origin->predecessors()) {
386 int expected =
387 phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
388
389 ++op_index;
390 auto pred_assignment = assessments_.find(pred);
391 if (pred_assignment == assessments_.end()) {
392 CHECK(origin->IsLoopHeader());
393 auto todo_iter = outstanding_assessments_.find(pred);
394 DelayedAssessments* set = nullptr;
395 if (todo_iter == outstanding_assessments_.end()) {
396 set = new (zone()) DelayedAssessments(zone());
397 outstanding_assessments_.insert(std::make_pair(pred, set));
398 } else {
399 set = todo_iter->second;
400 }
401 set->AddDelayedAssessment(current_operand, expected);
402 continue;
403 }
404
405 const BlockAssessments* pred_assessments = pred_assignment->second;
406 auto found_contribution = pred_assessments->map().find(current_operand);
407 CHECK(found_contribution != pred_assessments->map().end());
408 Assessment* contribution = found_contribution->second;
409
410 switch (contribution->kind()) {
411 case Final:
412 ValidateFinalAssessment(
413 block_id, current_operand, current_assessments,
414 FinalAssessment::cast(contribution), expected);
415 break;
416 case Pending: {
417 // This happens if we have a diamond feeding into another one, and
418 // the inner one never being used - other than for carrying the value.
419 const PendingAssessment* next = PendingAssessment::cast(contribution);
420 if (seen.find(pred) == seen.end()) {
421 worklist.push({next, expected});
422 seen.insert(pred);
423 }
424 // Note that we do not want to finalize pending assessments at the
425 // beginning of a block - which is the information we'd have
426 // available here. This is because this operand may be reused to
427 // define
428 // duplicate phis.
429 break;
430 }
431 }
432 }
433 }
434 // If everything checks out, we may make the assessment.
435 current_assessments->map()[op] =
436 new (zone()) FinalAssessment(virtual_register, assessment);
437}
438
439void RegisterAllocatorVerifier::ValidateFinalAssessment(
440 RpoNumber block_id, InstructionOperand op,
441 BlockAssessments* current_assessments, const FinalAssessment* assessment,
442 int virtual_register) {
443 if (assessment->virtual_register() == virtual_register) return;
444 // If we have 2 phis with the exact same operand list, and the first phi is
445 // used before the second one, via the operand incoming to the block,
446 // and the second one's operand is defined (via a parallel move) after the
447 // use, then the original operand will be assigned to the first phi. We
448 // then look at the original pending assessment to ascertain if op
449 // is virtual_register.
450 const PendingAssessment* old = assessment->original_pending_assessment();
451 CHECK_NOT_NULL(old);
452 ValidatePendingAssessment(block_id, op, current_assessments, old,
453 virtual_register);
454}
455
456void RegisterAllocatorVerifier::ValidateUse(
457 RpoNumber block_id, BlockAssessments* current_assessments,
458 InstructionOperand op, int virtual_register) {
459 auto iterator = current_assessments->map().find(op);
460 // We should have seen this operand before.
461 CHECK(iterator != current_assessments->map().end());
462 Assessment* assessment = iterator->second;
463
464 switch (assessment->kind()) {
465 case Final:
466 ValidateFinalAssessment(block_id, op, current_assessments,
467 FinalAssessment::cast(assessment),
468 virtual_register);
469 break;
470 case Pending: {
471 const PendingAssessment* pending = PendingAssessment::cast(assessment);
472 ValidatePendingAssessment(block_id, op, current_assessments, pending,
473 virtual_register);
474 break;
475 }
476 }
477}
478
479void RegisterAllocatorVerifier::VerifyGapMoves() {
480 CHECK(assessments_.empty());
481 CHECK(outstanding_assessments_.empty());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 const size_t block_count = sequence()->instruction_blocks().size();
483 for (size_t block_index = 0; block_index < block_count; ++block_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100484 const InstructionBlock* block =
485 sequence()->instruction_blocks()[block_index];
Ben Murdochc5610432016-08-08 18:44:38 +0100486 BlockAssessments* block_assessments = CreateForBlock(block);
487
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400488 for (int instr_index = block->code_start(); instr_index < block->code_end();
489 ++instr_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100490 const InstructionConstraint& instr_constraint = constraints_[instr_index];
491 const Instruction* instr = instr_constraint.instruction_;
Ben Murdochc5610432016-08-08 18:44:38 +0100492 block_assessments->PerformMoves(instr);
493
Ben Murdochda12d292016-06-02 14:46:10 +0100494 const OperandConstraint* op_constraints =
495 instr_constraint.operand_constraints_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400496 size_t count = 0;
497 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 if (op_constraints[count].type_ == kImmediate ||
499 op_constraints[count].type_ == kExplicit) {
500 continue;
501 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400502 int virtual_register = op_constraints[count].virtual_register_;
Ben Murdochc5610432016-08-08 18:44:38 +0100503 InstructionOperand op = *instr->InputAt(i);
504 ValidateUse(block->rpo_number(), block_assessments, op,
505 virtual_register);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400506 }
507 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
Ben Murdochc5610432016-08-08 18:44:38 +0100508 block_assessments->Drop(*instr->TempAt(i));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400509 }
510 if (instr->IsCall()) {
Ben Murdochc5610432016-08-08 18:44:38 +0100511 block_assessments->DropRegisters();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400512 }
513 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400514 int virtual_register = op_constraints[count].virtual_register_;
Ben Murdochc5610432016-08-08 18:44:38 +0100515 block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516 if (op_constraints[count].type_ == kRegisterAndSlot) {
517 const AllocatedOperand* reg_op =
518 AllocatedOperand::cast(instr->OutputAt(i));
519 MachineRepresentation rep = reg_op->representation();
520 const AllocatedOperand* stack_op = AllocatedOperand::New(
521 zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
522 op_constraints[i].spilled_slot_);
Ben Murdochc5610432016-08-08 18:44:38 +0100523 block_assessments->AddDefinition(*stack_op, virtual_register);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000524 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400525 }
526 }
Ben Murdochc5610432016-08-08 18:44:38 +0100527 // Now commit the assessments for this block. If there are any delayed
528 // assessments, ValidatePendingAssessment should see this block, too.
529 assessments_[block->rpo_number()] = block_assessments;
530
531 auto todo_iter = outstanding_assessments_.find(block->rpo_number());
532 if (todo_iter == outstanding_assessments_.end()) continue;
533 DelayedAssessments* todo = todo_iter->second;
534 for (auto pair : todo->map()) {
535 InstructionOperand op = pair.first;
536 int vreg = pair.second;
537 auto found_op = block_assessments->map().find(op);
538 CHECK(found_op != block_assessments->map().end());
539 switch (found_op->second->kind()) {
540 case Final:
541 ValidateFinalAssessment(block->rpo_number(), op, block_assessments,
542 FinalAssessment::cast(found_op->second),
543 vreg);
544 break;
545 case Pending:
546 const PendingAssessment* pending =
547 PendingAssessment::cast(found_op->second);
548 ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
549 pending, vreg);
550 block_assessments->map()[op] =
551 new (zone()) FinalAssessment(vreg, pending);
552 break;
553 }
554 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400555 }
556}
557
558} // namespace compiler
559} // namespace internal
560} // namespace v8