blob: 2d10de081c5d9abaf0d095520ef61ebb0859cc28 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/bit-vector.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006#include "src/compiler/instruction.h"
7#include "src/compiler/register-allocator-verifier.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013namespace {
14
15size_t OperandCount(const Instruction* instr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
17}
18
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020void VerifyEmptyGaps(const Instruction* instr) {
21 for (int i = Instruction::FIRST_GAP_POSITION;
22 i <= Instruction::LAST_GAP_POSITION; i++) {
23 Instruction::GapPosition inner_pos =
24 static_cast<Instruction::GapPosition>(i);
25 CHECK(instr->GetParallelMove(inner_pos) == nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 }
27}
28
29
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030void VerifyAllocatedGaps(const Instruction* instr) {
31 for (int i = Instruction::FIRST_GAP_POSITION;
32 i <= Instruction::LAST_GAP_POSITION; i++) {
33 Instruction::GapPosition inner_pos =
34 static_cast<Instruction::GapPosition>(i);
Ben Murdochda12d292016-06-02 14:46:10 +010035 const ParallelMove* moves = instr->GetParallelMove(inner_pos);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 if (moves == nullptr) continue;
Ben Murdochda12d292016-06-02 14:46:10 +010037 for (const MoveOperands* move : *moves) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038 if (move->IsRedundant()) continue;
39 CHECK(move->source().IsAllocated() || move->source().IsConstant());
40 CHECK(move->destination().IsAllocated());
41 }
42 }
43}
44
45} // namespace
46
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047RegisterAllocatorVerifier::RegisterAllocatorVerifier(
48 Zone* zone, const RegisterConfiguration* config,
49 const InstructionSequence* sequence)
Ben Murdochc5610432016-08-08 18:44:38 +010050 : zone_(zone),
51 config_(config),
52 sequence_(sequence),
53 constraints_(zone),
54 assessments_(zone),
55 outstanding_assessments_(zone) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 constraints_.reserve(sequence->instructions().size());
57 // TODO(dcarney): model unique constraints.
58 // Construct OperandConstraints for all InstructionOperands, eliminating
59 // kSameAsFirst along the way.
Ben Murdochda12d292016-06-02 14:46:10 +010060 for (const Instruction* instr : sequence->instructions()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 // All gaps should be totally unallocated at this point.
62 VerifyEmptyGaps(instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040063 const size_t operand_count = OperandCount(instr);
Ben Murdochda12d292016-06-02 14:46:10 +010064 OperandConstraint* op_constraints =
65 zone->NewArray<OperandConstraint>(operand_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040066 size_t count = 0;
67 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
68 BuildConstraint(instr->InputAt(i), &op_constraints[count]);
69 VerifyInput(op_constraints[count]);
70 }
71 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
72 BuildConstraint(instr->TempAt(i), &op_constraints[count]);
73 VerifyTemp(op_constraints[count]);
74 }
75 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
76 BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
77 if (op_constraints[count].type_ == kSameAsFirst) {
78 CHECK(instr->InputCount() > 0);
79 op_constraints[count].type_ = op_constraints[0].type_;
80 op_constraints[count].value_ = op_constraints[0].value_;
81 }
82 VerifyOutput(op_constraints[count]);
83 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040084 InstructionConstraint instr_constraint = {instr, operand_count,
85 op_constraints};
86 constraints()->push_back(instr_constraint);
87 }
88}
89
Ben Murdochc5610432016-08-08 18:44:38 +010090void RegisterAllocatorVerifier::VerifyInput(
91 const OperandConstraint& constraint) {
92 CHECK_NE(kSameAsFirst, constraint.type_);
93 if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
94 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
95 constraint.virtual_register_);
96 }
97}
98
99void RegisterAllocatorVerifier::VerifyTemp(
100 const OperandConstraint& constraint) {
101 CHECK_NE(kSameAsFirst, constraint.type_);
102 CHECK_NE(kImmediate, constraint.type_);
103 CHECK_NE(kExplicit, constraint.type_);
104 CHECK_NE(kConstant, constraint.type_);
105}
106
107void RegisterAllocatorVerifier::VerifyOutput(
108 const OperandConstraint& constraint) {
109 CHECK_NE(kImmediate, constraint.type_);
110 CHECK_NE(kExplicit, constraint.type_);
111 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
112 constraint.virtual_register_);
113}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400114
115void RegisterAllocatorVerifier::VerifyAssignment() {
116 CHECK(sequence()->instructions().size() == constraints()->size());
117 auto instr_it = sequence()->begin();
118 for (const auto& instr_constraint : *constraints()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100119 const Instruction* instr = instr_constraint.instruction_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000120 // All gaps should be totally allocated at this point.
121 VerifyAllocatedGaps(instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400122 const size_t operand_count = instr_constraint.operand_constaints_size_;
Ben Murdochda12d292016-06-02 14:46:10 +0100123 const OperandConstraint* op_constraints =
124 instr_constraint.operand_constraints_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400125 CHECK_EQ(instr, *instr_it);
126 CHECK(operand_count == OperandCount(instr));
127 size_t count = 0;
128 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
129 CheckConstraint(instr->InputAt(i), &op_constraints[count]);
130 }
131 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
132 CheckConstraint(instr->TempAt(i), &op_constraints[count]);
133 }
134 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
135 CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
136 }
137 ++instr_it;
138 }
139}
140
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400141void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
142 OperandConstraint* constraint) {
143 constraint->value_ = kMinInt;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000144 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400145 if (op->IsConstant()) {
146 constraint->type_ = kConstant;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000147 constraint->value_ = ConstantOperand::cast(op)->virtual_register();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400148 constraint->virtual_register_ = constraint->value_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000149 } else if (op->IsExplicit()) {
150 constraint->type_ = kExplicit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400151 } else if (op->IsImmediate()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100152 const ImmediateOperand* imm = ImmediateOperand::cast(op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000153 int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
154 : imm->indexed_value();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400155 constraint->type_ = kImmediate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156 constraint->value_ = value;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400157 } else {
158 CHECK(op->IsUnallocated());
Ben Murdochda12d292016-06-02 14:46:10 +0100159 const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400160 int vreg = unallocated->virtual_register();
161 constraint->virtual_register_ = vreg;
162 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100163 constraint->type_ = sequence()->IsFP(vreg) ? kFPSlot : kSlot;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400164 constraint->value_ = unallocated->fixed_slot_index();
165 } else {
166 switch (unallocated->extended_policy()) {
167 case UnallocatedOperand::ANY:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400168 case UnallocatedOperand::NONE:
Ben Murdoch61f157c2016-09-16 13:49:30 +0100169 if (sequence()->IsFP(vreg)) {
170 constraint->type_ = kNoneFP;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400171 } else {
172 constraint->type_ = kNone;
173 }
174 break;
175 case UnallocatedOperand::FIXED_REGISTER:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000176 if (unallocated->HasSecondaryStorage()) {
177 constraint->type_ = kRegisterAndSlot;
178 constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
179 } else {
180 constraint->type_ = kFixedRegister;
181 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400182 constraint->value_ = unallocated->fixed_register_index();
183 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100184 case UnallocatedOperand::FIXED_FP_REGISTER:
185 constraint->type_ = kFixedFPRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400186 constraint->value_ = unallocated->fixed_register_index();
187 break;
188 case UnallocatedOperand::MUST_HAVE_REGISTER:
Ben Murdoch61f157c2016-09-16 13:49:30 +0100189 if (sequence()->IsFP(vreg)) {
190 constraint->type_ = kFPRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400191 } else {
192 constraint->type_ = kRegister;
193 }
194 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 case UnallocatedOperand::MUST_HAVE_SLOT:
Ben Murdoch61f157c2016-09-16 13:49:30 +0100196 constraint->type_ = sequence()->IsFP(vreg) ? kFPSlot : kSlot;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400198 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
199 constraint->type_ = kSameAsFirst;
200 break;
201 }
202 }
203 }
204}
205
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400206void RegisterAllocatorVerifier::CheckConstraint(
207 const InstructionOperand* op, const OperandConstraint* constraint) {
208 switch (constraint->type_) {
209 case kConstant:
210 CHECK(op->IsConstant());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000211 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
212 constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400213 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000214 case kImmediate: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400215 CHECK(op->IsImmediate());
Ben Murdochda12d292016-06-02 14:46:10 +0100216 const ImmediateOperand* imm = ImmediateOperand::cast(op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217 int value = imm->type() == ImmediateOperand::INLINE
218 ? imm->inline_value()
219 : imm->indexed_value();
220 CHECK_EQ(value, constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400221 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400223 case kRegister:
224 CHECK(op->IsRegister());
225 return;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100226 case kFPRegister:
Ben Murdochc5610432016-08-08 18:44:38 +0100227 CHECK(op->IsFPRegister());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400228 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000229 case kExplicit:
230 CHECK(op->IsExplicit());
231 return;
232 case kFixedRegister:
233 case kRegisterAndSlot:
234 CHECK(op->IsRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100235 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000236 return;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100237 case kFixedFPRegister:
Ben Murdochc5610432016-08-08 18:44:38 +0100238 CHECK(op->IsFPRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100239 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400240 return;
241 case kFixedSlot:
242 CHECK(op->IsStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000243 CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
244 return;
245 case kSlot:
246 CHECK(op->IsStackSlot());
247 return;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100248 case kFPSlot:
Ben Murdochc5610432016-08-08 18:44:38 +0100249 CHECK(op->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400250 return;
251 case kNone:
252 CHECK(op->IsRegister() || op->IsStackSlot());
253 return;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100254 case kNoneFP:
Ben Murdochc5610432016-08-08 18:44:38 +0100255 CHECK(op->IsFPRegister() || op->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400256 return;
257 case kSameAsFirst:
258 CHECK(false);
259 return;
260 }
261}
262
Ben Murdochc5610432016-08-08 18:44:38 +0100263void BlockAssessments::PerformMoves(const Instruction* instruction) {
264 const ParallelMove* first =
265 instruction->GetParallelMove(Instruction::GapPosition::START);
266 PerformParallelMoves(first);
267 const ParallelMove* last =
268 instruction->GetParallelMove(Instruction::GapPosition::END);
269 PerformParallelMoves(last);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270}
271
Ben Murdochc5610432016-08-08 18:44:38 +0100272void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
273 if (moves == nullptr) return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400274
Ben Murdochc5610432016-08-08 18:44:38 +0100275 CHECK(map_for_moves_.empty());
276 for (MoveOperands* move : *moves) {
277 if (move->IsEliminated() || move->IsRedundant()) continue;
278 auto it = map_.find(move->source());
279 // The RHS of a parallel move should have been already assessed.
280 CHECK(it != map_.end());
281 // The LHS of a parallel move should not have been assigned in this
282 // parallel move.
283 CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
284 // Copy the assessment to the destination.
285 map_for_moves_[move->destination()] = it->second;
286 }
287 for (auto pair : map_for_moves_) {
288 map_[pair.first] = pair.second;
289 }
290 map_for_moves_.clear();
291}
292
293void BlockAssessments::DropRegisters() {
294 for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
295 auto current = iterator;
296 ++iterator;
297 InstructionOperand op = current->first;
298 if (op.IsAnyRegister()) map().erase(current);
299 }
300}
301
302BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
303 const InstructionBlock* block) {
304 RpoNumber current_block_id = block->rpo_number();
305
306 BlockAssessments* ret = new (zone()) BlockAssessments(zone());
307 if (block->PredecessorCount() == 0) {
308 // TODO(mtrofin): the following check should hold, however, in certain
309 // unit tests it is invalidated by the last block. Investigate and
310 // normalize the CFG.
311 // CHECK(current_block_id.ToInt() == 0);
312 // The phi size test below is because we can, technically, have phi
313 // instructions with one argument. Some tests expose that, too.
314 } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
315 const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
316 ret->CopyFrom(prev_block);
317 } else {
318 for (RpoNumber pred_id : block->predecessors()) {
319 // For every operand coming from any of the predecessors, create an
320 // Unfinalized assessment.
321 auto iterator = assessments_.find(pred_id);
322 if (iterator == assessments_.end()) {
323 // This block is the head of a loop, and this predecessor is the
324 // loopback
325 // arc.
326 // Validate this is a loop case, otherwise the CFG is malformed.
327 CHECK(pred_id >= current_block_id);
328 CHECK(block->IsLoopHeader());
329 continue;
330 }
331 const BlockAssessments* pred_assessments = iterator->second;
332 CHECK_NOT_NULL(pred_assessments);
333 for (auto pair : pred_assessments->map()) {
334 InstructionOperand operand = pair.first;
335 if (ret->map().find(operand) == ret->map().end()) {
336 ret->map().insert(std::make_pair(
337 operand, new (zone()) PendingAssessment(block, operand)));
338 }
339 }
340 }
341 }
342 return ret;
343}
344
345void RegisterAllocatorVerifier::ValidatePendingAssessment(
346 RpoNumber block_id, InstructionOperand op,
347 BlockAssessments* current_assessments, const PendingAssessment* assessment,
348 int virtual_register) {
349 // When validating a pending assessment, it is possible some of the
350 // assessments
351 // for the original operand (the one where the assessment was created for
352 // first) are also pending. To avoid recursion, we use a work list. To
353 // deal with cycles, we keep a set of seen nodes.
354 ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(zone());
355 ZoneSet<RpoNumber> seen(zone());
356 worklist.push(std::make_pair(assessment, virtual_register));
357 seen.insert(block_id);
358
359 while (!worklist.empty()) {
360 auto work = worklist.front();
361 const PendingAssessment* current_assessment = work.first;
362 int current_virtual_register = work.second;
363 InstructionOperand current_operand = current_assessment->operand();
364 worklist.pop();
365
366 const InstructionBlock* origin = current_assessment->origin();
367 CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
368
369 // Check if the virtual register is a phi first, instead of relying on
370 // the incoming assessments. In particular, this handles the case
371 // v1 = phi v0 v0, which structurally is identical to v0 having been
372 // defined at the top of a diamond, and arriving at the node joining the
373 // diamond's branches.
374 const PhiInstruction* phi = nullptr;
375 for (const PhiInstruction* candidate : origin->phis()) {
376 if (candidate->virtual_register() == current_virtual_register) {
377 phi = candidate;
378 break;
379 }
380 }
381
382 int op_index = 0;
383 for (RpoNumber pred : origin->predecessors()) {
384 int expected =
385 phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
386
387 ++op_index;
388 auto pred_assignment = assessments_.find(pred);
389 if (pred_assignment == assessments_.end()) {
390 CHECK(origin->IsLoopHeader());
391 auto todo_iter = outstanding_assessments_.find(pred);
392 DelayedAssessments* set = nullptr;
393 if (todo_iter == outstanding_assessments_.end()) {
394 set = new (zone()) DelayedAssessments(zone());
395 outstanding_assessments_.insert(std::make_pair(pred, set));
396 } else {
397 set = todo_iter->second;
398 }
399 set->AddDelayedAssessment(current_operand, expected);
400 continue;
401 }
402
403 const BlockAssessments* pred_assessments = pred_assignment->second;
404 auto found_contribution = pred_assessments->map().find(current_operand);
405 CHECK(found_contribution != pred_assessments->map().end());
406 Assessment* contribution = found_contribution->second;
407
408 switch (contribution->kind()) {
409 case Final:
410 ValidateFinalAssessment(
411 block_id, current_operand, current_assessments,
412 FinalAssessment::cast(contribution), expected);
413 break;
414 case Pending: {
415 // This happens if we have a diamond feeding into another one, and
416 // the inner one never being used - other than for carrying the value.
417 const PendingAssessment* next = PendingAssessment::cast(contribution);
418 if (seen.find(pred) == seen.end()) {
419 worklist.push({next, expected});
420 seen.insert(pred);
421 }
422 // Note that we do not want to finalize pending assessments at the
423 // beginning of a block - which is the information we'd have
424 // available here. This is because this operand may be reused to
425 // define
426 // duplicate phis.
427 break;
428 }
429 }
430 }
431 }
432 // If everything checks out, we may make the assessment.
433 current_assessments->map()[op] =
434 new (zone()) FinalAssessment(virtual_register, assessment);
435}
436
437void RegisterAllocatorVerifier::ValidateFinalAssessment(
438 RpoNumber block_id, InstructionOperand op,
439 BlockAssessments* current_assessments, const FinalAssessment* assessment,
440 int virtual_register) {
441 if (assessment->virtual_register() == virtual_register) return;
442 // If we have 2 phis with the exact same operand list, and the first phi is
443 // used before the second one, via the operand incoming to the block,
444 // and the second one's operand is defined (via a parallel move) after the
445 // use, then the original operand will be assigned to the first phi. We
446 // then look at the original pending assessment to ascertain if op
447 // is virtual_register.
448 const PendingAssessment* old = assessment->original_pending_assessment();
449 CHECK_NOT_NULL(old);
450 ValidatePendingAssessment(block_id, op, current_assessments, old,
451 virtual_register);
452}
453
454void RegisterAllocatorVerifier::ValidateUse(
455 RpoNumber block_id, BlockAssessments* current_assessments,
456 InstructionOperand op, int virtual_register) {
457 auto iterator = current_assessments->map().find(op);
458 // We should have seen this operand before.
459 CHECK(iterator != current_assessments->map().end());
460 Assessment* assessment = iterator->second;
461
462 switch (assessment->kind()) {
463 case Final:
464 ValidateFinalAssessment(block_id, op, current_assessments,
465 FinalAssessment::cast(assessment),
466 virtual_register);
467 break;
468 case Pending: {
469 const PendingAssessment* pending = PendingAssessment::cast(assessment);
470 ValidatePendingAssessment(block_id, op, current_assessments, pending,
471 virtual_register);
472 break;
473 }
474 }
475}
476
477void RegisterAllocatorVerifier::VerifyGapMoves() {
478 CHECK(assessments_.empty());
479 CHECK(outstanding_assessments_.empty());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480 const size_t block_count = sequence()->instruction_blocks().size();
481 for (size_t block_index = 0; block_index < block_count; ++block_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100482 const InstructionBlock* block =
483 sequence()->instruction_blocks()[block_index];
Ben Murdochc5610432016-08-08 18:44:38 +0100484 BlockAssessments* block_assessments = CreateForBlock(block);
485
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400486 for (int instr_index = block->code_start(); instr_index < block->code_end();
487 ++instr_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100488 const InstructionConstraint& instr_constraint = constraints_[instr_index];
489 const Instruction* instr = instr_constraint.instruction_;
Ben Murdochc5610432016-08-08 18:44:38 +0100490 block_assessments->PerformMoves(instr);
491
Ben Murdochda12d292016-06-02 14:46:10 +0100492 const OperandConstraint* op_constraints =
493 instr_constraint.operand_constraints_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400494 size_t count = 0;
495 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000496 if (op_constraints[count].type_ == kImmediate ||
497 op_constraints[count].type_ == kExplicit) {
498 continue;
499 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400500 int virtual_register = op_constraints[count].virtual_register_;
Ben Murdochc5610432016-08-08 18:44:38 +0100501 InstructionOperand op = *instr->InputAt(i);
502 ValidateUse(block->rpo_number(), block_assessments, op,
503 virtual_register);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 }
505 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
Ben Murdochc5610432016-08-08 18:44:38 +0100506 block_assessments->Drop(*instr->TempAt(i));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400507 }
508 if (instr->IsCall()) {
Ben Murdochc5610432016-08-08 18:44:38 +0100509 block_assessments->DropRegisters();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400510 }
511 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400512 int virtual_register = op_constraints[count].virtual_register_;
Ben Murdochc5610432016-08-08 18:44:38 +0100513 block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000514 if (op_constraints[count].type_ == kRegisterAndSlot) {
515 const AllocatedOperand* reg_op =
516 AllocatedOperand::cast(instr->OutputAt(i));
517 MachineRepresentation rep = reg_op->representation();
518 const AllocatedOperand* stack_op = AllocatedOperand::New(
519 zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
520 op_constraints[i].spilled_slot_);
Ben Murdochc5610432016-08-08 18:44:38 +0100521 block_assessments->AddDefinition(*stack_op, virtual_register);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400523 }
524 }
Ben Murdochc5610432016-08-08 18:44:38 +0100525 // Now commit the assessments for this block. If there are any delayed
526 // assessments, ValidatePendingAssessment should see this block, too.
527 assessments_[block->rpo_number()] = block_assessments;
528
529 auto todo_iter = outstanding_assessments_.find(block->rpo_number());
530 if (todo_iter == outstanding_assessments_.end()) continue;
531 DelayedAssessments* todo = todo_iter->second;
532 for (auto pair : todo->map()) {
533 InstructionOperand op = pair.first;
534 int vreg = pair.second;
535 auto found_op = block_assessments->map().find(op);
536 CHECK(found_op != block_assessments->map().end());
537 switch (found_op->second->kind()) {
538 case Final:
539 ValidateFinalAssessment(block->rpo_number(), op, block_assessments,
540 FinalAssessment::cast(found_op->second),
541 vreg);
542 break;
543 case Pending:
544 const PendingAssessment* pending =
545 PendingAssessment::cast(found_op->second);
546 ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
547 pending, vreg);
548 block_assessments->map()[op] =
549 new (zone()) FinalAssessment(vreg, pending);
550 break;
551 }
552 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400553 }
554}
555
556} // namespace compiler
557} // namespace internal
558} // namespace v8