Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 1 | // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 5 | #include "src/bit-vector.h" |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 6 | #include "src/compiler/instruction.h" |
| 7 | #include "src/compiler/register-allocator-verifier.h" |
| 8 | |
| 9 | namespace v8 { |
| 10 | namespace internal { |
| 11 | namespace compiler { |
| 12 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 13 | namespace { |
| 14 | |
| 15 | size_t OperandCount(const Instruction* instr) { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 16 | return instr->InputCount() + instr->OutputCount() + instr->TempCount(); |
| 17 | } |
| 18 | |
| 19 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 20 | void VerifyEmptyGaps(const Instruction* instr) { |
| 21 | for (int i = Instruction::FIRST_GAP_POSITION; |
| 22 | i <= Instruction::LAST_GAP_POSITION; i++) { |
| 23 | Instruction::GapPosition inner_pos = |
| 24 | static_cast<Instruction::GapPosition>(i); |
| 25 | CHECK(instr->GetParallelMove(inner_pos) == nullptr); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 26 | } |
| 27 | } |
| 28 | |
| 29 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 30 | void VerifyAllocatedGaps(const Instruction* instr) { |
| 31 | for (int i = Instruction::FIRST_GAP_POSITION; |
| 32 | i <= Instruction::LAST_GAP_POSITION; i++) { |
| 33 | Instruction::GapPosition inner_pos = |
| 34 | static_cast<Instruction::GapPosition>(i); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 35 | const ParallelMove* moves = instr->GetParallelMove(inner_pos); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 36 | if (moves == nullptr) continue; |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 37 | for (const MoveOperands* move : *moves) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 38 | if (move->IsRedundant()) continue; |
| 39 | CHECK(move->source().IsAllocated() || move->source().IsConstant()); |
| 40 | CHECK(move->destination().IsAllocated()); |
| 41 | } |
| 42 | } |
| 43 | } |
| 44 | |
| 45 | } // namespace |
| 46 | |
| 47 | |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 48 | void RegisterAllocatorVerifier::VerifyInput( |
| 49 | const OperandConstraint& constraint) { |
| 50 | CHECK_NE(kSameAsFirst, constraint.type_); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 51 | if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) { |
| 52 | CHECK_NE(InstructionOperand::kInvalidVirtualRegister, |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 53 | constraint.virtual_register_); |
| 54 | } |
| 55 | } |
| 56 | |
| 57 | |
| 58 | void RegisterAllocatorVerifier::VerifyTemp( |
| 59 | const OperandConstraint& constraint) { |
| 60 | CHECK_NE(kSameAsFirst, constraint.type_); |
| 61 | CHECK_NE(kImmediate, constraint.type_); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 62 | CHECK_NE(kExplicit, constraint.type_); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 63 | CHECK_NE(kConstant, constraint.type_); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | |
| 67 | void RegisterAllocatorVerifier::VerifyOutput( |
| 68 | const OperandConstraint& constraint) { |
| 69 | CHECK_NE(kImmediate, constraint.type_); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 70 | CHECK_NE(kExplicit, constraint.type_); |
| 71 | CHECK_NE(InstructionOperand::kInvalidVirtualRegister, |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 72 | constraint.virtual_register_); |
| 73 | } |
| 74 | |
| 75 | |
| 76 | RegisterAllocatorVerifier::RegisterAllocatorVerifier( |
| 77 | Zone* zone, const RegisterConfiguration* config, |
| 78 | const InstructionSequence* sequence) |
| 79 | : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) { |
| 80 | constraints_.reserve(sequence->instructions().size()); |
| 81 | // TODO(dcarney): model unique constraints. |
| 82 | // Construct OperandConstraints for all InstructionOperands, eliminating |
| 83 | // kSameAsFirst along the way. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 84 | for (const Instruction* instr : sequence->instructions()) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 85 | // All gaps should be totally unallocated at this point. |
| 86 | VerifyEmptyGaps(instr); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 87 | const size_t operand_count = OperandCount(instr); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 88 | OperandConstraint* op_constraints = |
| 89 | zone->NewArray<OperandConstraint>(operand_count); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 90 | size_t count = 0; |
| 91 | for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
| 92 | BuildConstraint(instr->InputAt(i), &op_constraints[count]); |
| 93 | VerifyInput(op_constraints[count]); |
| 94 | } |
| 95 | for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { |
| 96 | BuildConstraint(instr->TempAt(i), &op_constraints[count]); |
| 97 | VerifyTemp(op_constraints[count]); |
| 98 | } |
| 99 | for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
| 100 | BuildConstraint(instr->OutputAt(i), &op_constraints[count]); |
| 101 | if (op_constraints[count].type_ == kSameAsFirst) { |
| 102 | CHECK(instr->InputCount() > 0); |
| 103 | op_constraints[count].type_ = op_constraints[0].type_; |
| 104 | op_constraints[count].value_ = op_constraints[0].value_; |
| 105 | } |
| 106 | VerifyOutput(op_constraints[count]); |
| 107 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 108 | InstructionConstraint instr_constraint = {instr, operand_count, |
| 109 | op_constraints}; |
| 110 | constraints()->push_back(instr_constraint); |
| 111 | } |
| 112 | } |
| 113 | |
| 114 | |
| 115 | void RegisterAllocatorVerifier::VerifyAssignment() { |
| 116 | CHECK(sequence()->instructions().size() == constraints()->size()); |
| 117 | auto instr_it = sequence()->begin(); |
| 118 | for (const auto& instr_constraint : *constraints()) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 119 | const Instruction* instr = instr_constraint.instruction_; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 120 | // All gaps should be totally allocated at this point. |
| 121 | VerifyAllocatedGaps(instr); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 122 | const size_t operand_count = instr_constraint.operand_constaints_size_; |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 123 | const OperandConstraint* op_constraints = |
| 124 | instr_constraint.operand_constraints_; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 125 | CHECK_EQ(instr, *instr_it); |
| 126 | CHECK(operand_count == OperandCount(instr)); |
| 127 | size_t count = 0; |
| 128 | for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
| 129 | CheckConstraint(instr->InputAt(i), &op_constraints[count]); |
| 130 | } |
| 131 | for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { |
| 132 | CheckConstraint(instr->TempAt(i), &op_constraints[count]); |
| 133 | } |
| 134 | for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
| 135 | CheckConstraint(instr->OutputAt(i), &op_constraints[count]); |
| 136 | } |
| 137 | ++instr_it; |
| 138 | } |
| 139 | } |
| 140 | |
| 141 | |
| 142 | void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op, |
| 143 | OperandConstraint* constraint) { |
| 144 | constraint->value_ = kMinInt; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 145 | constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 146 | if (op->IsConstant()) { |
| 147 | constraint->type_ = kConstant; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 148 | constraint->value_ = ConstantOperand::cast(op)->virtual_register(); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 149 | constraint->virtual_register_ = constraint->value_; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 150 | } else if (op->IsExplicit()) { |
| 151 | constraint->type_ = kExplicit; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 152 | } else if (op->IsImmediate()) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 153 | const ImmediateOperand* imm = ImmediateOperand::cast(op); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 154 | int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value() |
| 155 | : imm->indexed_value(); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 156 | constraint->type_ = kImmediate; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 157 | constraint->value_ = value; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 158 | } else { |
| 159 | CHECK(op->IsUnallocated()); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 160 | const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 161 | int vreg = unallocated->virtual_register(); |
| 162 | constraint->virtual_register_ = vreg; |
| 163 | if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 164 | constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 165 | constraint->value_ = unallocated->fixed_slot_index(); |
| 166 | } else { |
| 167 | switch (unallocated->extended_policy()) { |
| 168 | case UnallocatedOperand::ANY: |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 169 | case UnallocatedOperand::NONE: |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 170 | if (sequence()->IsFloat(vreg)) { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 171 | constraint->type_ = kNoneDouble; |
| 172 | } else { |
| 173 | constraint->type_ = kNone; |
| 174 | } |
| 175 | break; |
| 176 | case UnallocatedOperand::FIXED_REGISTER: |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 177 | if (unallocated->HasSecondaryStorage()) { |
| 178 | constraint->type_ = kRegisterAndSlot; |
| 179 | constraint->spilled_slot_ = unallocated->GetSecondaryStorage(); |
| 180 | } else { |
| 181 | constraint->type_ = kFixedRegister; |
| 182 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 183 | constraint->value_ = unallocated->fixed_register_index(); |
| 184 | break; |
| 185 | case UnallocatedOperand::FIXED_DOUBLE_REGISTER: |
| 186 | constraint->type_ = kFixedDoubleRegister; |
| 187 | constraint->value_ = unallocated->fixed_register_index(); |
| 188 | break; |
| 189 | case UnallocatedOperand::MUST_HAVE_REGISTER: |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 190 | if (sequence()->IsFloat(vreg)) { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 191 | constraint->type_ = kDoubleRegister; |
| 192 | } else { |
| 193 | constraint->type_ = kRegister; |
| 194 | } |
| 195 | break; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 196 | case UnallocatedOperand::MUST_HAVE_SLOT: |
| 197 | constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot; |
| 198 | break; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 199 | case UnallocatedOperand::SAME_AS_FIRST_INPUT: |
| 200 | constraint->type_ = kSameAsFirst; |
| 201 | break; |
| 202 | } |
| 203 | } |
| 204 | } |
| 205 | } |
| 206 | |
| 207 | |
| 208 | void RegisterAllocatorVerifier::CheckConstraint( |
| 209 | const InstructionOperand* op, const OperandConstraint* constraint) { |
| 210 | switch (constraint->type_) { |
| 211 | case kConstant: |
| 212 | CHECK(op->IsConstant()); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 213 | CHECK_EQ(ConstantOperand::cast(op)->virtual_register(), |
| 214 | constraint->value_); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 215 | return; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 216 | case kImmediate: { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 217 | CHECK(op->IsImmediate()); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 218 | const ImmediateOperand* imm = ImmediateOperand::cast(op); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 219 | int value = imm->type() == ImmediateOperand::INLINE |
| 220 | ? imm->inline_value() |
| 221 | : imm->indexed_value(); |
| 222 | CHECK_EQ(value, constraint->value_); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 223 | return; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 224 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 225 | case kRegister: |
| 226 | CHECK(op->IsRegister()); |
| 227 | return; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 228 | case kDoubleRegister: |
| 229 | CHECK(op->IsDoubleRegister()); |
| 230 | return; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 231 | case kExplicit: |
| 232 | CHECK(op->IsExplicit()); |
| 233 | return; |
| 234 | case kFixedRegister: |
| 235 | case kRegisterAndSlot: |
| 236 | CHECK(op->IsRegister()); |
| 237 | CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(), |
| 238 | constraint->value_); |
| 239 | return; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 240 | case kFixedDoubleRegister: |
| 241 | CHECK(op->IsDoubleRegister()); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 242 | CHECK_EQ(LocationOperand::cast(op)->GetDoubleRegister().code(), |
| 243 | constraint->value_); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 244 | return; |
| 245 | case kFixedSlot: |
| 246 | CHECK(op->IsStackSlot()); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 247 | CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_); |
| 248 | return; |
| 249 | case kSlot: |
| 250 | CHECK(op->IsStackSlot()); |
| 251 | return; |
| 252 | case kDoubleSlot: |
| 253 | CHECK(op->IsDoubleStackSlot()); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 254 | return; |
| 255 | case kNone: |
| 256 | CHECK(op->IsRegister() || op->IsStackSlot()); |
| 257 | return; |
| 258 | case kNoneDouble: |
| 259 | CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot()); |
| 260 | return; |
| 261 | case kSameAsFirst: |
| 262 | CHECK(false); |
| 263 | return; |
| 264 | } |
| 265 | } |
| 266 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 267 | namespace { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 268 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 269 | typedef RpoNumber Rpo; |
| 270 | |
| 271 | static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister; |
| 272 | |
| 273 | struct PhiData : public ZoneObject { |
| 274 | PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg, |
| 275 | const PhiData* first_pred_phi, Zone* zone) |
| 276 | : definition_rpo(definition_rpo), |
| 277 | virtual_register(phi->virtual_register()), |
| 278 | first_pred_vreg(first_pred_vreg), |
| 279 | first_pred_phi(first_pred_phi), |
| 280 | operands(zone) { |
| 281 | operands.reserve(phi->operands().size()); |
| 282 | operands.insert(operands.begin(), phi->operands().begin(), |
| 283 | phi->operands().end()); |
| 284 | } |
| 285 | const Rpo definition_rpo; |
| 286 | const int virtual_register; |
| 287 | const int first_pred_vreg; |
| 288 | const PhiData* first_pred_phi; |
| 289 | IntVector operands; |
| 290 | }; |
| 291 | |
| 292 | class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 293 | public: |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 294 | explicit PhiMap(Zone* zone) : ZoneMap<int, PhiData*>(zone) {} |
| 295 | }; |
| 296 | |
| 297 | struct OperandLess { |
| 298 | bool operator()(const InstructionOperand* a, |
| 299 | const InstructionOperand* b) const { |
| 300 | return a->CompareCanonicalized(*b); |
| 301 | } |
| 302 | }; |
| 303 | |
| 304 | class OperandMap : public ZoneObject { |
| 305 | public: |
| 306 | struct MapValue : public ZoneObject { |
| 307 | MapValue() |
| 308 | : incoming(nullptr), |
| 309 | define_vreg(kInvalidVreg), |
| 310 | use_vreg(kInvalidVreg), |
| 311 | succ_vreg(kInvalidVreg) {} |
| 312 | MapValue* incoming; // value from first predecessor block. |
| 313 | int define_vreg; // valid if this value was defined in this block. |
| 314 | int use_vreg; // valid if this value was used in this block. |
| 315 | int succ_vreg; // valid if propagated back from successor block. |
| 316 | }; |
| 317 | |
| 318 | class Map |
| 319 | : public ZoneMap<const InstructionOperand*, MapValue*, OperandLess> { |
| 320 | public: |
| 321 | explicit Map(Zone* zone) |
| 322 | : ZoneMap<const InstructionOperand*, MapValue*, OperandLess>(zone) {} |
| 323 | |
| 324 | // Remove all entries with keys not in other. |
| 325 | void Intersect(const Map& other) { |
| 326 | if (this->empty()) return; |
| 327 | auto it = this->begin(); |
| 328 | OperandLess less; |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 329 | for (const std::pair<const InstructionOperand*, MapValue*>& o : other) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 330 | while (less(it->first, o.first)) { |
| 331 | this->erase(it++); |
| 332 | if (it == this->end()) return; |
| 333 | } |
| 334 | if (it->first->EqualsCanonicalized(*o.first)) { |
| 335 | ++it; |
| 336 | if (it == this->end()) return; |
| 337 | } else { |
| 338 | CHECK(less(o.first, it->first)); |
| 339 | } |
| 340 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 341 | } |
| 342 | }; |
| 343 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 344 | explicit OperandMap(Zone* zone) : map_(zone) {} |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 345 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 346 | Map& map() { return map_; } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 347 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 348 | void RunParallelMoves(Zone* zone, const ParallelMove* moves) { |
| 349 | // Compute outgoing mappings. |
| 350 | Map to_insert(zone); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 351 | for (const MoveOperands* move : *moves) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 352 | if (move->IsEliminated()) continue; |
| 353 | auto cur = map().find(&move->source()); |
| 354 | CHECK(cur != map().end()); |
| 355 | auto res = |
| 356 | to_insert.insert(std::make_pair(&move->destination(), cur->second)); |
| 357 | // Ensure injectivity of moves. |
| 358 | CHECK(res.second); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 359 | } |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 360 | // Drop current mappings. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 361 | for (const MoveOperands* move : *moves) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 362 | if (move->IsEliminated()) continue; |
| 363 | auto cur = map().find(&move->destination()); |
| 364 | if (cur != map().end()) map().erase(cur); |
| 365 | } |
| 366 | // Insert new values. |
| 367 | map().insert(to_insert.begin(), to_insert.end()); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 368 | } |
| 369 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 370 | void RunGaps(Zone* zone, const Instruction* instr) { |
| 371 | for (int i = Instruction::FIRST_GAP_POSITION; |
| 372 | i <= Instruction::LAST_GAP_POSITION; i++) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 373 | Instruction::GapPosition inner_pos = |
| 374 | static_cast<Instruction::GapPosition>(i); |
| 375 | const ParallelMove* move = instr->GetParallelMove(inner_pos); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 376 | if (move == nullptr) continue; |
| 377 | RunParallelMoves(zone, move); |
| 378 | } |
| 379 | } |
| 380 | |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 381 | void Drop(const InstructionOperand* op) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 382 | auto it = map().find(op); |
| 383 | if (it != map().end()) map().erase(it); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 384 | } |
| 385 | |
| 386 | void DropRegisters(const RegisterConfiguration* config) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 387 | // TODO(dcarney): sort map by kind and drop range. |
| 388 | for (auto it = map().begin(); it != map().end();) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 389 | const InstructionOperand* op = it->first; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 390 | if (op->IsRegister() || op->IsDoubleRegister()) { |
| 391 | map().erase(it++); |
| 392 | } else { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 393 | ++it; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 394 | } |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 395 | } |
| 396 | } |
| 397 | |
| 398 | MapValue* Define(Zone* zone, const InstructionOperand* op, |
| 399 | int virtual_register) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 400 | MapValue* value = new (zone) MapValue(); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 401 | value->define_vreg = virtual_register; |
| 402 | auto res = map().insert(std::make_pair(op, value)); |
| 403 | if (!res.second) res.first->second = value; |
| 404 | return value; |
| 405 | } |
| 406 | |
| 407 | void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) { |
| 408 | auto it = map().find(op); |
| 409 | CHECK(it != map().end()); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 410 | MapValue* v = it->second; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 411 | if (v->define_vreg != kInvalidVreg) { |
| 412 | CHECK_EQ(v->define_vreg, use_vreg); |
| 413 | } |
| 414 | // Already used this vreg in this block. |
| 415 | if (v->use_vreg != kInvalidVreg) { |
| 416 | CHECK_EQ(v->use_vreg, use_vreg); |
| 417 | return; |
| 418 | } |
| 419 | if (!initial_pass) { |
| 420 | // A value may be defined and used in this block or the use must have |
| 421 | // propagated up. |
| 422 | if (v->succ_vreg != kInvalidVreg) { |
| 423 | CHECK_EQ(v->succ_vreg, use_vreg); |
| 424 | } else { |
| 425 | CHECK_EQ(v->define_vreg, use_vreg); |
| 426 | } |
| 427 | // Mark the use. |
| 428 | it->second->use_vreg = use_vreg; |
| 429 | return; |
| 430 | } |
| 431 | // Go up block list and ensure the correct definition is reached. |
| 432 | for (; v != nullptr; v = v->incoming) { |
| 433 | // Value unused in block. |
| 434 | if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) { |
| 435 | continue; |
| 436 | } |
| 437 | // Found correct definition or use. |
| 438 | CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg); |
| 439 | // Mark the use. |
| 440 | it->second->use_vreg = use_vreg; |
| 441 | return; |
| 442 | } |
| 443 | // Use of a non-phi value without definition. |
| 444 | CHECK(false); |
| 445 | } |
| 446 | |
| 447 | void UsePhi(const InstructionOperand* op, const PhiData* phi, |
| 448 | bool initial_pass) { |
| 449 | auto it = map().find(op); |
| 450 | CHECK(it != map().end()); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 451 | MapValue* v = it->second; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 452 | int use_vreg = phi->virtual_register; |
| 453 | // Phis are not defined. |
| 454 | CHECK_EQ(kInvalidVreg, v->define_vreg); |
| 455 | // Already used this vreg in this block. |
| 456 | if (v->use_vreg != kInvalidVreg) { |
| 457 | CHECK_EQ(v->use_vreg, use_vreg); |
| 458 | return; |
| 459 | } |
| 460 | if (!initial_pass) { |
| 461 | // A used phi must have propagated its use to a predecessor. |
| 462 | CHECK_EQ(v->succ_vreg, use_vreg); |
| 463 | // Mark the use. |
| 464 | v->use_vreg = use_vreg; |
| 465 | return; |
| 466 | } |
| 467 | // Go up the block list starting at the first predecessor and ensure this |
| 468 | // phi has a correct use or definition. |
| 469 | for (v = v->incoming; v != nullptr; v = v->incoming) { |
| 470 | // Value unused in block. |
| 471 | if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) { |
| 472 | continue; |
| 473 | } |
| 474 | // Found correct definition or use. |
| 475 | if (v->define_vreg != kInvalidVreg) { |
| 476 | CHECK(v->define_vreg == phi->first_pred_vreg); |
| 477 | } else if (v->use_vreg != phi->first_pred_vreg) { |
| 478 | // Walk the phi chain, hunting for a matching phi use. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 479 | const PhiData* p = phi; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 480 | for (; p != nullptr; p = p->first_pred_phi) { |
| 481 | if (p->virtual_register == v->use_vreg) break; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 482 | } |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 483 | CHECK(p); |
| 484 | } |
| 485 | // Mark the use. |
| 486 | it->second->use_vreg = use_vreg; |
| 487 | return; |
| 488 | } |
| 489 | // Use of a phi value without definition. |
| 490 | UNREACHABLE(); |
| 491 | } |
| 492 | |
| 493 | private: |
| 494 | Map map_; |
| 495 | DISALLOW_COPY_AND_ASSIGN(OperandMap); |
| 496 | }; |
| 497 | |
| 498 | } // namespace |
| 499 | |
| 500 | |
| 501 | class RegisterAllocatorVerifier::BlockMaps { |
| 502 | public: |
| 503 | BlockMaps(Zone* zone, const InstructionSequence* sequence) |
| 504 | : zone_(zone), |
| 505 | sequence_(sequence), |
| 506 | phi_map_guard_(sequence->VirtualRegisterCount(), zone), |
| 507 | phi_map_(zone), |
| 508 | incoming_maps_(zone), |
| 509 | outgoing_maps_(zone) { |
| 510 | InitializePhis(); |
| 511 | InitializeOperandMaps(); |
| 512 | } |
| 513 | |
| 514 | bool IsPhi(int virtual_register) { |
| 515 | return phi_map_guard_.Contains(virtual_register); |
| 516 | } |
| 517 | |
| 518 | const PhiData* GetPhi(int virtual_register) { |
| 519 | auto it = phi_map_.find(virtual_register); |
| 520 | CHECK(it != phi_map_.end()); |
| 521 | return it->second; |
| 522 | } |
| 523 | |
| 524 | OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) { |
| 525 | return initial_pass ? InitializeFromFirstPredecessor(block_index) |
| 526 | : InitializeFromIntersection(block_index); |
| 527 | } |
| 528 | |
| 529 | void PropagateUsesBackwards() { |
| 530 | typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>> |
| 531 | BlockIds; |
| 532 | BlockIds block_ids((BlockIds::key_compare()), |
| 533 | zone_allocator<size_t>(zone())); |
| 534 | // First ensure that incoming contains only keys in all predecessors. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 535 | for (const InstructionBlock* block : sequence()->instruction_blocks()) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 536 | size_t index = block->rpo_number().ToSize(); |
| 537 | block_ids.insert(index); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 538 | OperandMap::Map& succ_map = incoming_maps_[index]->map(); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 539 | for (size_t i = 0; i < block->PredecessorCount(); ++i) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 540 | RpoNumber pred_rpo = block->predecessors()[i]; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 541 | succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map()); |
| 542 | } |
| 543 | } |
| 544 | // Back propagation fixpoint. |
| 545 | while (!block_ids.empty()) { |
| 546 | // Pop highest block_id. |
| 547 | auto block_id_it = block_ids.begin(); |
| 548 | const size_t succ_index = *block_id_it; |
| 549 | block_ids.erase(block_id_it); |
| 550 | // Propagate uses back to their definition blocks using succ_vreg. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 551 | const InstructionBlock* block = |
| 552 | sequence()->instruction_blocks()[succ_index]; |
| 553 | OperandMap::Map& succ_map = incoming_maps_[succ_index]->map(); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 554 | for (size_t i = 0; i < block->PredecessorCount(); ++i) { |
| 555 | for (auto& succ_val : succ_map) { |
| 556 | // An incoming map contains no defines. |
| 557 | CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg); |
| 558 | // Compute succ_vreg. |
| 559 | int succ_vreg = succ_val.second->succ_vreg; |
| 560 | if (succ_vreg == kInvalidVreg) { |
| 561 | succ_vreg = succ_val.second->use_vreg; |
| 562 | // Initialize succ_vreg in back propagation chain. |
| 563 | succ_val.second->succ_vreg = succ_vreg; |
| 564 | } |
| 565 | if (succ_vreg == kInvalidVreg) continue; |
| 566 | // May need to transition phi. |
| 567 | if (IsPhi(succ_vreg)) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 568 | const PhiData* phi = GetPhi(succ_vreg); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 569 | if (phi->definition_rpo.ToSize() == succ_index) { |
| 570 | // phi definition block, transition to pred value. |
| 571 | succ_vreg = phi->operands[i]; |
| 572 | } |
| 573 | } |
| 574 | // Push succ_vreg up to all predecessors. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 575 | RpoNumber pred_rpo = block->predecessors()[i]; |
| 576 | OperandMap::Map& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map(); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 577 | auto& pred_val = *pred_map.find(succ_val.first); |
| 578 | if (pred_val.second->use_vreg != kInvalidVreg) { |
| 579 | CHECK_EQ(succ_vreg, pred_val.second->use_vreg); |
| 580 | } |
| 581 | if (pred_val.second->define_vreg != kInvalidVreg) { |
| 582 | CHECK_EQ(succ_vreg, pred_val.second->define_vreg); |
| 583 | } |
| 584 | if (pred_val.second->succ_vreg != kInvalidVreg) { |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 585 | if (succ_vreg != pred_val.second->succ_vreg) { |
| 586 | // When a block introduces 2 identical phis A and B, and both are |
| 587 | // operands to other phis C and D, and we optimized the moves |
| 588 | // defining A or B such that they now appear in the block defining |
| 589 | // A and B, the back propagation will get confused when visiting |
| 590 | // upwards from C and D. The operand in the block defining A and B |
| 591 | // will be attributed to C (or D, depending which of these is |
| 592 | // visited first). |
| 593 | CHECK(IsPhi(pred_val.second->succ_vreg)); |
| 594 | CHECK(IsPhi(succ_vreg)); |
| 595 | const PhiData* current_phi = GetPhi(succ_vreg); |
| 596 | const PhiData* assigned_phi = GetPhi(pred_val.second->succ_vreg); |
| 597 | CHECK_EQ(current_phi->operands.size(), |
| 598 | assigned_phi->operands.size()); |
| 599 | CHECK_EQ(current_phi->definition_rpo, |
| 600 | assigned_phi->definition_rpo); |
| 601 | for (size_t i = 0; i < current_phi->operands.size(); ++i) { |
| 602 | CHECK_EQ(current_phi->operands[i], assigned_phi->operands[i]); |
| 603 | } |
| 604 | } |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 605 | } else { |
| 606 | pred_val.second->succ_vreg = succ_vreg; |
| 607 | block_ids.insert(pred_rpo.ToSize()); |
| 608 | } |
| 609 | } |
| 610 | } |
| 611 | } |
| 612 | // Clear uses and back links for second pass. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 613 | for (OperandMap* operand_map : incoming_maps_) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 614 | for (auto& succ_val : operand_map->map()) { |
| 615 | succ_val.second->incoming = nullptr; |
| 616 | succ_val.second->use_vreg = kInvalidVreg; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 617 | } |
| 618 | } |
| 619 | } |
| 620 | |
| 621 | private: |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 622 | OperandMap* InitializeFromFirstPredecessor(size_t block_index) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 623 | OperandMap* to_init = outgoing_maps_[block_index]; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 624 | CHECK(to_init->map().empty()); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 625 | const InstructionBlock* block = |
| 626 | sequence()->instruction_blocks()[block_index]; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 627 | if (block->predecessors().empty()) return to_init; |
| 628 | size_t predecessor_index = block->predecessors()[0].ToSize(); |
| 629 | // Ensure not a backedge. |
| 630 | CHECK(predecessor_index < block->rpo_number().ToSize()); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 631 | OperandMap* incoming = outgoing_maps_[predecessor_index]; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 632 | // Copy map and replace values. |
| 633 | to_init->map() = incoming->map(); |
| 634 | for (auto& it : to_init->map()) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 635 | OperandMap::MapValue* incoming = it.second; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 636 | it.second = new (zone()) OperandMap::MapValue(); |
| 637 | it.second->incoming = incoming; |
| 638 | } |
| 639 | // Copy to incoming map for second pass. |
| 640 | incoming_maps_[block_index]->map() = to_init->map(); |
| 641 | return to_init; |
| 642 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 643 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 644 | OperandMap* InitializeFromIntersection(size_t block_index) { |
| 645 | return incoming_maps_[block_index]; |
| 646 | } |
| 647 | |
| 648 | void InitializeOperandMaps() { |
| 649 | size_t block_count = sequence()->instruction_blocks().size(); |
| 650 | incoming_maps_.reserve(block_count); |
| 651 | outgoing_maps_.reserve(block_count); |
| 652 | for (size_t i = 0; i < block_count; ++i) { |
| 653 | incoming_maps_.push_back(new (zone()) OperandMap(zone())); |
| 654 | outgoing_maps_.push_back(new (zone()) OperandMap(zone())); |
| 655 | } |
| 656 | } |
| 657 | |
| 658 | void InitializePhis() { |
| 659 | const size_t block_count = sequence()->instruction_blocks().size(); |
| 660 | for (size_t block_index = 0; block_index < block_count; ++block_index) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 661 | const InstructionBlock* block = |
| 662 | sequence()->instruction_blocks()[block_index]; |
| 663 | for (const PhiInstruction* phi : block->phis()) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 664 | int first_pred_vreg = phi->operands()[0]; |
| 665 | const PhiData* first_pred_phi = nullptr; |
| 666 | if (IsPhi(first_pred_vreg)) { |
| 667 | first_pred_phi = GetPhi(first_pred_vreg); |
| 668 | first_pred_vreg = first_pred_phi->first_pred_vreg; |
| 669 | } |
| 670 | CHECK(!IsPhi(first_pred_vreg)); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 671 | PhiData* phi_data = new (zone()) PhiData( |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 672 | block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone()); |
| 673 | auto res = |
| 674 | phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data)); |
| 675 | CHECK(res.second); |
| 676 | phi_map_guard_.Add(phi->virtual_register()); |
| 677 | } |
| 678 | } |
| 679 | } |
| 680 | |
| 681 | typedef ZoneVector<OperandMap*> OperandMaps; |
| 682 | typedef ZoneVector<PhiData*> PhiVector; |
| 683 | |
| 684 | Zone* zone() const { return zone_; } |
| 685 | const InstructionSequence* sequence() const { return sequence_; } |
| 686 | |
| 687 | Zone* const zone_; |
| 688 | const InstructionSequence* const sequence_; |
| 689 | BitVector phi_map_guard_; |
| 690 | PhiMap phi_map_; |
| 691 | OperandMaps incoming_maps_; |
| 692 | OperandMaps outgoing_maps_; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 693 | }; |
| 694 | |
| 695 | |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 696 | void RegisterAllocatorVerifier::VerifyGapMoves() { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 697 | BlockMaps block_maps(zone(), sequence()); |
| 698 | VerifyGapMoves(&block_maps, true); |
| 699 | block_maps.PropagateUsesBackwards(); |
| 700 | VerifyGapMoves(&block_maps, false); |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 701 | } |
| 702 | |
| 703 | |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 704 | // Compute and verify outgoing values for every block. |
| 705 | void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps, |
| 706 | bool initial_pass) { |
| 707 | const size_t block_count = sequence()->instruction_blocks().size(); |
| 708 | for (size_t block_index = 0; block_index < block_count; ++block_index) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 709 | OperandMap* current = |
| 710 | block_maps->InitializeIncoming(block_index, initial_pass); |
| 711 | const InstructionBlock* block = |
| 712 | sequence()->instruction_blocks()[block_index]; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 713 | for (int instr_index = block->code_start(); instr_index < block->code_end(); |
| 714 | ++instr_index) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 715 | const InstructionConstraint& instr_constraint = constraints_[instr_index]; |
| 716 | const Instruction* instr = instr_constraint.instruction_; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 717 | current->RunGaps(zone(), instr); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 718 | const OperandConstraint* op_constraints = |
| 719 | instr_constraint.operand_constraints_; |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 720 | size_t count = 0; |
| 721 | for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 722 | if (op_constraints[count].type_ == kImmediate || |
| 723 | op_constraints[count].type_ == kExplicit) { |
| 724 | continue; |
| 725 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 726 | int virtual_register = op_constraints[count].virtual_register_; |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 727 | const InstructionOperand* op = instr->InputAt(i); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 728 | if (!block_maps->IsPhi(virtual_register)) { |
| 729 | current->Use(op, virtual_register, initial_pass); |
| 730 | } else { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 731 | const PhiData* phi = block_maps->GetPhi(virtual_register); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 732 | current->UsePhi(op, phi, initial_pass); |
| 733 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 734 | } |
| 735 | for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { |
| 736 | current->Drop(instr->TempAt(i)); |
| 737 | } |
| 738 | if (instr->IsCall()) { |
| 739 | current->DropRegisters(config()); |
| 740 | } |
| 741 | for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 742 | int virtual_register = op_constraints[count].virtual_register_; |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 743 | OperandMap::MapValue* value = |
| 744 | current->Define(zone(), instr->OutputAt(i), virtual_register); |
| 745 | if (op_constraints[count].type_ == kRegisterAndSlot) { |
| 746 | const AllocatedOperand* reg_op = |
| 747 | AllocatedOperand::cast(instr->OutputAt(i)); |
| 748 | MachineRepresentation rep = reg_op->representation(); |
| 749 | const AllocatedOperand* stack_op = AllocatedOperand::New( |
| 750 | zone(), LocationOperand::LocationKind::STACK_SLOT, rep, |
| 751 | op_constraints[i].spilled_slot_); |
| 752 | auto insert_result = |
| 753 | current->map().insert(std::make_pair(stack_op, value)); |
| 754 | DCHECK(insert_result.second); |
| 755 | USE(insert_result); |
| 756 | } |
Emily Bernier | d0a1eb7 | 2015-03-24 16:35:39 -0400 | [diff] [blame] | 757 | } |
| 758 | } |
| 759 | } |
| 760 | } |
| 761 | |
| 762 | } // namespace compiler |
| 763 | } // namespace internal |
| 764 | } // namespace v8 |