blob: f2160f52ce7a788530be617526b25f5a7d286740 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/bit-vector.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006#include "src/compiler/instruction.h"
7#include "src/compiler/register-allocator-verifier.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013namespace {
14
15size_t OperandCount(const Instruction* instr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
17}
18
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020void VerifyEmptyGaps(const Instruction* instr) {
21 for (int i = Instruction::FIRST_GAP_POSITION;
22 i <= Instruction::LAST_GAP_POSITION; i++) {
23 Instruction::GapPosition inner_pos =
24 static_cast<Instruction::GapPosition>(i);
25 CHECK(instr->GetParallelMove(inner_pos) == nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 }
27}
28
29
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030void VerifyAllocatedGaps(const Instruction* instr) {
31 for (int i = Instruction::FIRST_GAP_POSITION;
32 i <= Instruction::LAST_GAP_POSITION; i++) {
33 Instruction::GapPosition inner_pos =
34 static_cast<Instruction::GapPosition>(i);
Ben Murdochda12d292016-06-02 14:46:10 +010035 const ParallelMove* moves = instr->GetParallelMove(inner_pos);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 if (moves == nullptr) continue;
Ben Murdochda12d292016-06-02 14:46:10 +010037 for (const MoveOperands* move : *moves) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038 if (move->IsRedundant()) continue;
39 CHECK(move->source().IsAllocated() || move->source().IsConstant());
40 CHECK(move->destination().IsAllocated());
41 }
42 }
43}
44
45} // namespace
46
47
Emily Bernierd0a1eb72015-03-24 16:35:39 -040048void RegisterAllocatorVerifier::VerifyInput(
49 const OperandConstraint& constraint) {
50 CHECK_NE(kSameAsFirst, constraint.type_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
52 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040053 constraint.virtual_register_);
54 }
55}
56
57
58void RegisterAllocatorVerifier::VerifyTemp(
59 const OperandConstraint& constraint) {
60 CHECK_NE(kSameAsFirst, constraint.type_);
61 CHECK_NE(kImmediate, constraint.type_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062 CHECK_NE(kExplicit, constraint.type_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040063 CHECK_NE(kConstant, constraint.type_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040064}
65
66
67void RegisterAllocatorVerifier::VerifyOutput(
68 const OperandConstraint& constraint) {
69 CHECK_NE(kImmediate, constraint.type_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 CHECK_NE(kExplicit, constraint.type_);
71 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040072 constraint.virtual_register_);
73}
74
75
76RegisterAllocatorVerifier::RegisterAllocatorVerifier(
77 Zone* zone, const RegisterConfiguration* config,
78 const InstructionSequence* sequence)
79 : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) {
80 constraints_.reserve(sequence->instructions().size());
81 // TODO(dcarney): model unique constraints.
82 // Construct OperandConstraints for all InstructionOperands, eliminating
83 // kSameAsFirst along the way.
Ben Murdochda12d292016-06-02 14:46:10 +010084 for (const Instruction* instr : sequence->instructions()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 // All gaps should be totally unallocated at this point.
86 VerifyEmptyGaps(instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040087 const size_t operand_count = OperandCount(instr);
Ben Murdochda12d292016-06-02 14:46:10 +010088 OperandConstraint* op_constraints =
89 zone->NewArray<OperandConstraint>(operand_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040090 size_t count = 0;
91 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
92 BuildConstraint(instr->InputAt(i), &op_constraints[count]);
93 VerifyInput(op_constraints[count]);
94 }
95 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
96 BuildConstraint(instr->TempAt(i), &op_constraints[count]);
97 VerifyTemp(op_constraints[count]);
98 }
99 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
100 BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
101 if (op_constraints[count].type_ == kSameAsFirst) {
102 CHECK(instr->InputCount() > 0);
103 op_constraints[count].type_ = op_constraints[0].type_;
104 op_constraints[count].value_ = op_constraints[0].value_;
105 }
106 VerifyOutput(op_constraints[count]);
107 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400108 InstructionConstraint instr_constraint = {instr, operand_count,
109 op_constraints};
110 constraints()->push_back(instr_constraint);
111 }
112}
113
114
115void RegisterAllocatorVerifier::VerifyAssignment() {
116 CHECK(sequence()->instructions().size() == constraints()->size());
117 auto instr_it = sequence()->begin();
118 for (const auto& instr_constraint : *constraints()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100119 const Instruction* instr = instr_constraint.instruction_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000120 // All gaps should be totally allocated at this point.
121 VerifyAllocatedGaps(instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400122 const size_t operand_count = instr_constraint.operand_constaints_size_;
Ben Murdochda12d292016-06-02 14:46:10 +0100123 const OperandConstraint* op_constraints =
124 instr_constraint.operand_constraints_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400125 CHECK_EQ(instr, *instr_it);
126 CHECK(operand_count == OperandCount(instr));
127 size_t count = 0;
128 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
129 CheckConstraint(instr->InputAt(i), &op_constraints[count]);
130 }
131 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
132 CheckConstraint(instr->TempAt(i), &op_constraints[count]);
133 }
134 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
135 CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
136 }
137 ++instr_it;
138 }
139}
140
141
142void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
143 OperandConstraint* constraint) {
144 constraint->value_ = kMinInt;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400146 if (op->IsConstant()) {
147 constraint->type_ = kConstant;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000148 constraint->value_ = ConstantOperand::cast(op)->virtual_register();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400149 constraint->virtual_register_ = constraint->value_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150 } else if (op->IsExplicit()) {
151 constraint->type_ = kExplicit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400152 } else if (op->IsImmediate()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100153 const ImmediateOperand* imm = ImmediateOperand::cast(op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154 int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
155 : imm->indexed_value();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400156 constraint->type_ = kImmediate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000157 constraint->value_ = value;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400158 } else {
159 CHECK(op->IsUnallocated());
Ben Murdochda12d292016-06-02 14:46:10 +0100160 const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400161 int vreg = unallocated->virtual_register();
162 constraint->virtual_register_ = vreg;
163 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400165 constraint->value_ = unallocated->fixed_slot_index();
166 } else {
167 switch (unallocated->extended_policy()) {
168 case UnallocatedOperand::ANY:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400169 case UnallocatedOperand::NONE:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000170 if (sequence()->IsFloat(vreg)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400171 constraint->type_ = kNoneDouble;
172 } else {
173 constraint->type_ = kNone;
174 }
175 break;
176 case UnallocatedOperand::FIXED_REGISTER:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 if (unallocated->HasSecondaryStorage()) {
178 constraint->type_ = kRegisterAndSlot;
179 constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
180 } else {
181 constraint->type_ = kFixedRegister;
182 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400183 constraint->value_ = unallocated->fixed_register_index();
184 break;
185 case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
186 constraint->type_ = kFixedDoubleRegister;
187 constraint->value_ = unallocated->fixed_register_index();
188 break;
189 case UnallocatedOperand::MUST_HAVE_REGISTER:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190 if (sequence()->IsFloat(vreg)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400191 constraint->type_ = kDoubleRegister;
192 } else {
193 constraint->type_ = kRegister;
194 }
195 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000196 case UnallocatedOperand::MUST_HAVE_SLOT:
197 constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot;
198 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400199 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
200 constraint->type_ = kSameAsFirst;
201 break;
202 }
203 }
204 }
205}
206
207
208void RegisterAllocatorVerifier::CheckConstraint(
209 const InstructionOperand* op, const OperandConstraint* constraint) {
210 switch (constraint->type_) {
211 case kConstant:
212 CHECK(op->IsConstant());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000213 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
214 constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400215 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000216 case kImmediate: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400217 CHECK(op->IsImmediate());
Ben Murdochda12d292016-06-02 14:46:10 +0100218 const ImmediateOperand* imm = ImmediateOperand::cast(op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000219 int value = imm->type() == ImmediateOperand::INLINE
220 ? imm->inline_value()
221 : imm->indexed_value();
222 CHECK_EQ(value, constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400223 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225 case kRegister:
226 CHECK(op->IsRegister());
227 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400228 case kDoubleRegister:
229 CHECK(op->IsDoubleRegister());
230 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000231 case kExplicit:
232 CHECK(op->IsExplicit());
233 return;
234 case kFixedRegister:
235 case kRegisterAndSlot:
236 CHECK(op->IsRegister());
237 CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(),
238 constraint->value_);
239 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400240 case kFixedDoubleRegister:
241 CHECK(op->IsDoubleRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 CHECK_EQ(LocationOperand::cast(op)->GetDoubleRegister().code(),
243 constraint->value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400244 return;
245 case kFixedSlot:
246 CHECK(op->IsStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
248 return;
249 case kSlot:
250 CHECK(op->IsStackSlot());
251 return;
252 case kDoubleSlot:
253 CHECK(op->IsDoubleStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400254 return;
255 case kNone:
256 CHECK(op->IsRegister() || op->IsStackSlot());
257 return;
258 case kNoneDouble:
259 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot());
260 return;
261 case kSameAsFirst:
262 CHECK(false);
263 return;
264 }
265}
266
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267namespace {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400268
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269typedef RpoNumber Rpo;
270
271static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister;
272
273struct PhiData : public ZoneObject {
274 PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg,
275 const PhiData* first_pred_phi, Zone* zone)
276 : definition_rpo(definition_rpo),
277 virtual_register(phi->virtual_register()),
278 first_pred_vreg(first_pred_vreg),
279 first_pred_phi(first_pred_phi),
280 operands(zone) {
281 operands.reserve(phi->operands().size());
282 operands.insert(operands.begin(), phi->operands().begin(),
283 phi->operands().end());
284 }
285 const Rpo definition_rpo;
286 const int virtual_register;
287 const int first_pred_vreg;
288 const PhiData* first_pred_phi;
289 IntVector operands;
290};
291
292class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400293 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 explicit PhiMap(Zone* zone) : ZoneMap<int, PhiData*>(zone) {}
295};
296
297struct OperandLess {
298 bool operator()(const InstructionOperand* a,
299 const InstructionOperand* b) const {
300 return a->CompareCanonicalized(*b);
301 }
302};
303
304class OperandMap : public ZoneObject {
305 public:
306 struct MapValue : public ZoneObject {
307 MapValue()
308 : incoming(nullptr),
309 define_vreg(kInvalidVreg),
310 use_vreg(kInvalidVreg),
311 succ_vreg(kInvalidVreg) {}
312 MapValue* incoming; // value from first predecessor block.
313 int define_vreg; // valid if this value was defined in this block.
314 int use_vreg; // valid if this value was used in this block.
315 int succ_vreg; // valid if propagated back from successor block.
316 };
317
318 class Map
319 : public ZoneMap<const InstructionOperand*, MapValue*, OperandLess> {
320 public:
321 explicit Map(Zone* zone)
322 : ZoneMap<const InstructionOperand*, MapValue*, OperandLess>(zone) {}
323
324 // Remove all entries with keys not in other.
325 void Intersect(const Map& other) {
326 if (this->empty()) return;
327 auto it = this->begin();
328 OperandLess less;
Ben Murdochda12d292016-06-02 14:46:10 +0100329 for (const std::pair<const InstructionOperand*, MapValue*>& o : other) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 while (less(it->first, o.first)) {
331 this->erase(it++);
332 if (it == this->end()) return;
333 }
334 if (it->first->EqualsCanonicalized(*o.first)) {
335 ++it;
336 if (it == this->end()) return;
337 } else {
338 CHECK(less(o.first, it->first));
339 }
340 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341 }
342 };
343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 explicit OperandMap(Zone* zone) : map_(zone) {}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400345
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346 Map& map() { return map_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400347
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348 void RunParallelMoves(Zone* zone, const ParallelMove* moves) {
349 // Compute outgoing mappings.
350 Map to_insert(zone);
Ben Murdochda12d292016-06-02 14:46:10 +0100351 for (const MoveOperands* move : *moves) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000352 if (move->IsEliminated()) continue;
353 auto cur = map().find(&move->source());
354 CHECK(cur != map().end());
355 auto res =
356 to_insert.insert(std::make_pair(&move->destination(), cur->second));
357 // Ensure injectivity of moves.
358 CHECK(res.second);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400359 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000360 // Drop current mappings.
Ben Murdochda12d292016-06-02 14:46:10 +0100361 for (const MoveOperands* move : *moves) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000362 if (move->IsEliminated()) continue;
363 auto cur = map().find(&move->destination());
364 if (cur != map().end()) map().erase(cur);
365 }
366 // Insert new values.
367 map().insert(to_insert.begin(), to_insert.end());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400368 }
369
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 void RunGaps(Zone* zone, const Instruction* instr) {
371 for (int i = Instruction::FIRST_GAP_POSITION;
372 i <= Instruction::LAST_GAP_POSITION; i++) {
Ben Murdochda12d292016-06-02 14:46:10 +0100373 Instruction::GapPosition inner_pos =
374 static_cast<Instruction::GapPosition>(i);
375 const ParallelMove* move = instr->GetParallelMove(inner_pos);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400376 if (move == nullptr) continue;
377 RunParallelMoves(zone, move);
378 }
379 }
380
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400381 void Drop(const InstructionOperand* op) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382 auto it = map().find(op);
383 if (it != map().end()) map().erase(it);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384 }
385
386 void DropRegisters(const RegisterConfiguration* config) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000387 // TODO(dcarney): sort map by kind and drop range.
388 for (auto it = map().begin(); it != map().end();) {
Ben Murdochda12d292016-06-02 14:46:10 +0100389 const InstructionOperand* op = it->first;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000390 if (op->IsRegister() || op->IsDoubleRegister()) {
391 map().erase(it++);
392 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400393 ++it;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400394 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000395 }
396 }
397
398 MapValue* Define(Zone* zone, const InstructionOperand* op,
399 int virtual_register) {
Ben Murdochda12d292016-06-02 14:46:10 +0100400 MapValue* value = new (zone) MapValue();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401 value->define_vreg = virtual_register;
402 auto res = map().insert(std::make_pair(op, value));
403 if (!res.second) res.first->second = value;
404 return value;
405 }
406
407 void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) {
408 auto it = map().find(op);
409 CHECK(it != map().end());
Ben Murdochda12d292016-06-02 14:46:10 +0100410 MapValue* v = it->second;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000411 if (v->define_vreg != kInvalidVreg) {
412 CHECK_EQ(v->define_vreg, use_vreg);
413 }
414 // Already used this vreg in this block.
415 if (v->use_vreg != kInvalidVreg) {
416 CHECK_EQ(v->use_vreg, use_vreg);
417 return;
418 }
419 if (!initial_pass) {
420 // A value may be defined and used in this block or the use must have
421 // propagated up.
422 if (v->succ_vreg != kInvalidVreg) {
423 CHECK_EQ(v->succ_vreg, use_vreg);
424 } else {
425 CHECK_EQ(v->define_vreg, use_vreg);
426 }
427 // Mark the use.
428 it->second->use_vreg = use_vreg;
429 return;
430 }
431 // Go up block list and ensure the correct definition is reached.
432 for (; v != nullptr; v = v->incoming) {
433 // Value unused in block.
434 if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
435 continue;
436 }
437 // Found correct definition or use.
438 CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg);
439 // Mark the use.
440 it->second->use_vreg = use_vreg;
441 return;
442 }
443 // Use of a non-phi value without definition.
444 CHECK(false);
445 }
446
447 void UsePhi(const InstructionOperand* op, const PhiData* phi,
448 bool initial_pass) {
449 auto it = map().find(op);
450 CHECK(it != map().end());
Ben Murdochda12d292016-06-02 14:46:10 +0100451 MapValue* v = it->second;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000452 int use_vreg = phi->virtual_register;
453 // Phis are not defined.
454 CHECK_EQ(kInvalidVreg, v->define_vreg);
455 // Already used this vreg in this block.
456 if (v->use_vreg != kInvalidVreg) {
457 CHECK_EQ(v->use_vreg, use_vreg);
458 return;
459 }
460 if (!initial_pass) {
461 // A used phi must have propagated its use to a predecessor.
462 CHECK_EQ(v->succ_vreg, use_vreg);
463 // Mark the use.
464 v->use_vreg = use_vreg;
465 return;
466 }
467 // Go up the block list starting at the first predecessor and ensure this
468 // phi has a correct use or definition.
469 for (v = v->incoming; v != nullptr; v = v->incoming) {
470 // Value unused in block.
471 if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
472 continue;
473 }
474 // Found correct definition or use.
475 if (v->define_vreg != kInvalidVreg) {
476 CHECK(v->define_vreg == phi->first_pred_vreg);
477 } else if (v->use_vreg != phi->first_pred_vreg) {
478 // Walk the phi chain, hunting for a matching phi use.
Ben Murdochda12d292016-06-02 14:46:10 +0100479 const PhiData* p = phi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480 for (; p != nullptr; p = p->first_pred_phi) {
481 if (p->virtual_register == v->use_vreg) break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400482 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483 CHECK(p);
484 }
485 // Mark the use.
486 it->second->use_vreg = use_vreg;
487 return;
488 }
489 // Use of a phi value without definition.
490 UNREACHABLE();
491 }
492
493 private:
494 Map map_;
495 DISALLOW_COPY_AND_ASSIGN(OperandMap);
496};
497
498} // namespace
499
500
501class RegisterAllocatorVerifier::BlockMaps {
502 public:
503 BlockMaps(Zone* zone, const InstructionSequence* sequence)
504 : zone_(zone),
505 sequence_(sequence),
506 phi_map_guard_(sequence->VirtualRegisterCount(), zone),
507 phi_map_(zone),
508 incoming_maps_(zone),
509 outgoing_maps_(zone) {
510 InitializePhis();
511 InitializeOperandMaps();
512 }
513
514 bool IsPhi(int virtual_register) {
515 return phi_map_guard_.Contains(virtual_register);
516 }
517
518 const PhiData* GetPhi(int virtual_register) {
519 auto it = phi_map_.find(virtual_register);
520 CHECK(it != phi_map_.end());
521 return it->second;
522 }
523
524 OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) {
525 return initial_pass ? InitializeFromFirstPredecessor(block_index)
526 : InitializeFromIntersection(block_index);
527 }
528
529 void PropagateUsesBackwards() {
530 typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>>
531 BlockIds;
532 BlockIds block_ids((BlockIds::key_compare()),
533 zone_allocator<size_t>(zone()));
534 // First ensure that incoming contains only keys in all predecessors.
Ben Murdochda12d292016-06-02 14:46:10 +0100535 for (const InstructionBlock* block : sequence()->instruction_blocks()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 size_t index = block->rpo_number().ToSize();
537 block_ids.insert(index);
Ben Murdochda12d292016-06-02 14:46:10 +0100538 OperandMap::Map& succ_map = incoming_maps_[index]->map();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000539 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
Ben Murdochda12d292016-06-02 14:46:10 +0100540 RpoNumber pred_rpo = block->predecessors()[i];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map());
542 }
543 }
544 // Back propagation fixpoint.
545 while (!block_ids.empty()) {
546 // Pop highest block_id.
547 auto block_id_it = block_ids.begin();
548 const size_t succ_index = *block_id_it;
549 block_ids.erase(block_id_it);
550 // Propagate uses back to their definition blocks using succ_vreg.
Ben Murdochda12d292016-06-02 14:46:10 +0100551 const InstructionBlock* block =
552 sequence()->instruction_blocks()[succ_index];
553 OperandMap::Map& succ_map = incoming_maps_[succ_index]->map();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
555 for (auto& succ_val : succ_map) {
556 // An incoming map contains no defines.
557 CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg);
558 // Compute succ_vreg.
559 int succ_vreg = succ_val.second->succ_vreg;
560 if (succ_vreg == kInvalidVreg) {
561 succ_vreg = succ_val.second->use_vreg;
562 // Initialize succ_vreg in back propagation chain.
563 succ_val.second->succ_vreg = succ_vreg;
564 }
565 if (succ_vreg == kInvalidVreg) continue;
566 // May need to transition phi.
567 if (IsPhi(succ_vreg)) {
Ben Murdochda12d292016-06-02 14:46:10 +0100568 const PhiData* phi = GetPhi(succ_vreg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 if (phi->definition_rpo.ToSize() == succ_index) {
570 // phi definition block, transition to pred value.
571 succ_vreg = phi->operands[i];
572 }
573 }
574 // Push succ_vreg up to all predecessors.
Ben Murdochda12d292016-06-02 14:46:10 +0100575 RpoNumber pred_rpo = block->predecessors()[i];
576 OperandMap::Map& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000577 auto& pred_val = *pred_map.find(succ_val.first);
578 if (pred_val.second->use_vreg != kInvalidVreg) {
579 CHECK_EQ(succ_vreg, pred_val.second->use_vreg);
580 }
581 if (pred_val.second->define_vreg != kInvalidVreg) {
582 CHECK_EQ(succ_vreg, pred_val.second->define_vreg);
583 }
584 if (pred_val.second->succ_vreg != kInvalidVreg) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100585 if (succ_vreg != pred_val.second->succ_vreg) {
586 // When a block introduces 2 identical phis A and B, and both are
587 // operands to other phis C and D, and we optimized the moves
588 // defining A or B such that they now appear in the block defining
589 // A and B, the back propagation will get confused when visiting
590 // upwards from C and D. The operand in the block defining A and B
591 // will be attributed to C (or D, depending which of these is
592 // visited first).
593 CHECK(IsPhi(pred_val.second->succ_vreg));
594 CHECK(IsPhi(succ_vreg));
595 const PhiData* current_phi = GetPhi(succ_vreg);
596 const PhiData* assigned_phi = GetPhi(pred_val.second->succ_vreg);
597 CHECK_EQ(current_phi->operands.size(),
598 assigned_phi->operands.size());
599 CHECK_EQ(current_phi->definition_rpo,
600 assigned_phi->definition_rpo);
601 for (size_t i = 0; i < current_phi->operands.size(); ++i) {
602 CHECK_EQ(current_phi->operands[i], assigned_phi->operands[i]);
603 }
604 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 } else {
606 pred_val.second->succ_vreg = succ_vreg;
607 block_ids.insert(pred_rpo.ToSize());
608 }
609 }
610 }
611 }
612 // Clear uses and back links for second pass.
Ben Murdochda12d292016-06-02 14:46:10 +0100613 for (OperandMap* operand_map : incoming_maps_) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 for (auto& succ_val : operand_map->map()) {
615 succ_val.second->incoming = nullptr;
616 succ_val.second->use_vreg = kInvalidVreg;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400617 }
618 }
619 }
620
621 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622 OperandMap* InitializeFromFirstPredecessor(size_t block_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100623 OperandMap* to_init = outgoing_maps_[block_index];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 CHECK(to_init->map().empty());
Ben Murdochda12d292016-06-02 14:46:10 +0100625 const InstructionBlock* block =
626 sequence()->instruction_blocks()[block_index];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000627 if (block->predecessors().empty()) return to_init;
628 size_t predecessor_index = block->predecessors()[0].ToSize();
629 // Ensure not a backedge.
630 CHECK(predecessor_index < block->rpo_number().ToSize());
Ben Murdochda12d292016-06-02 14:46:10 +0100631 OperandMap* incoming = outgoing_maps_[predecessor_index];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000632 // Copy map and replace values.
633 to_init->map() = incoming->map();
634 for (auto& it : to_init->map()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100635 OperandMap::MapValue* incoming = it.second;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 it.second = new (zone()) OperandMap::MapValue();
637 it.second->incoming = incoming;
638 }
639 // Copy to incoming map for second pass.
640 incoming_maps_[block_index]->map() = to_init->map();
641 return to_init;
642 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 OperandMap* InitializeFromIntersection(size_t block_index) {
645 return incoming_maps_[block_index];
646 }
647
648 void InitializeOperandMaps() {
649 size_t block_count = sequence()->instruction_blocks().size();
650 incoming_maps_.reserve(block_count);
651 outgoing_maps_.reserve(block_count);
652 for (size_t i = 0; i < block_count; ++i) {
653 incoming_maps_.push_back(new (zone()) OperandMap(zone()));
654 outgoing_maps_.push_back(new (zone()) OperandMap(zone()));
655 }
656 }
657
658 void InitializePhis() {
659 const size_t block_count = sequence()->instruction_blocks().size();
660 for (size_t block_index = 0; block_index < block_count; ++block_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100661 const InstructionBlock* block =
662 sequence()->instruction_blocks()[block_index];
663 for (const PhiInstruction* phi : block->phis()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 int first_pred_vreg = phi->operands()[0];
665 const PhiData* first_pred_phi = nullptr;
666 if (IsPhi(first_pred_vreg)) {
667 first_pred_phi = GetPhi(first_pred_vreg);
668 first_pred_vreg = first_pred_phi->first_pred_vreg;
669 }
670 CHECK(!IsPhi(first_pred_vreg));
Ben Murdochda12d292016-06-02 14:46:10 +0100671 PhiData* phi_data = new (zone()) PhiData(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000672 block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone());
673 auto res =
674 phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data));
675 CHECK(res.second);
676 phi_map_guard_.Add(phi->virtual_register());
677 }
678 }
679 }
680
681 typedef ZoneVector<OperandMap*> OperandMaps;
682 typedef ZoneVector<PhiData*> PhiVector;
683
684 Zone* zone() const { return zone_; }
685 const InstructionSequence* sequence() const { return sequence_; }
686
687 Zone* const zone_;
688 const InstructionSequence* const sequence_;
689 BitVector phi_map_guard_;
690 PhiMap phi_map_;
691 OperandMaps incoming_maps_;
692 OperandMaps outgoing_maps_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400693};
694
695
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400696void RegisterAllocatorVerifier::VerifyGapMoves() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000697 BlockMaps block_maps(zone(), sequence());
698 VerifyGapMoves(&block_maps, true);
699 block_maps.PropagateUsesBackwards();
700 VerifyGapMoves(&block_maps, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400701}
702
703
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000704// Compute and verify outgoing values for every block.
705void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
706 bool initial_pass) {
707 const size_t block_count = sequence()->instruction_blocks().size();
708 for (size_t block_index = 0; block_index < block_count; ++block_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100709 OperandMap* current =
710 block_maps->InitializeIncoming(block_index, initial_pass);
711 const InstructionBlock* block =
712 sequence()->instruction_blocks()[block_index];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400713 for (int instr_index = block->code_start(); instr_index < block->code_end();
714 ++instr_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100715 const InstructionConstraint& instr_constraint = constraints_[instr_index];
716 const Instruction* instr = instr_constraint.instruction_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000717 current->RunGaps(zone(), instr);
Ben Murdochda12d292016-06-02 14:46:10 +0100718 const OperandConstraint* op_constraints =
719 instr_constraint.operand_constraints_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400720 size_t count = 0;
721 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000722 if (op_constraints[count].type_ == kImmediate ||
723 op_constraints[count].type_ == kExplicit) {
724 continue;
725 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400726 int virtual_register = op_constraints[count].virtual_register_;
Ben Murdochda12d292016-06-02 14:46:10 +0100727 const InstructionOperand* op = instr->InputAt(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 if (!block_maps->IsPhi(virtual_register)) {
729 current->Use(op, virtual_register, initial_pass);
730 } else {
Ben Murdochda12d292016-06-02 14:46:10 +0100731 const PhiData* phi = block_maps->GetPhi(virtual_register);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000732 current->UsePhi(op, phi, initial_pass);
733 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400734 }
735 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
736 current->Drop(instr->TempAt(i));
737 }
738 if (instr->IsCall()) {
739 current->DropRegisters(config());
740 }
741 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400742 int virtual_register = op_constraints[count].virtual_register_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000743 OperandMap::MapValue* value =
744 current->Define(zone(), instr->OutputAt(i), virtual_register);
745 if (op_constraints[count].type_ == kRegisterAndSlot) {
746 const AllocatedOperand* reg_op =
747 AllocatedOperand::cast(instr->OutputAt(i));
748 MachineRepresentation rep = reg_op->representation();
749 const AllocatedOperand* stack_op = AllocatedOperand::New(
750 zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
751 op_constraints[i].spilled_slot_);
752 auto insert_result =
753 current->map().insert(std::make_pair(stack_op, value));
754 DCHECK(insert_result.second);
755 USE(insert_result);
756 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400757 }
758 }
759 }
760}
761
762} // namespace compiler
763} // namespace internal
764} // namespace v8