Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 1 | // Copyright 2012 the V8 project authors. All rights reserved. |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 4 | |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 5 | #include "src/v8.h" |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 6 | |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 7 | #include "src/arm/lithium-codegen-arm.h" |
| 8 | #include "src/arm/lithium-gap-resolver-arm.h" |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 9 | |
| 10 | namespace v8 { |
| 11 | namespace internal { |
| 12 | |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 13 | // We use the root register to spill a value while breaking a cycle in parallel |
| 14 | // moves. We don't need access to roots while resolving the move list and using |
| 15 | // the root register has two advantages: |
| 16 | // - It is not in crankshaft allocatable registers list, so it can't interfere |
| 17 | // with any of the moves we are resolving. |
| 18 | // - We don't need to push it on the stack, as we can reload it with its value |
| 19 | // once we have resolved a cycle. |
| 20 | #define kSavedValueRegister kRootRegister |
| 21 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 22 | |
| 23 | LGapResolver::LGapResolver(LCodeGen* owner) |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 24 | : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false), |
| 25 | saved_destination_(NULL), need_to_restore_root_(false) { } |
| 26 | |
| 27 | |
| 28 | #define __ ACCESS_MASM(cgen_->masm()) |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 29 | |
| 30 | |
| 31 | void LGapResolver::Resolve(LParallelMove* parallel_move) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 32 | DCHECK(moves_.is_empty()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 33 | // Build up a worklist of moves. |
| 34 | BuildInitialMoveList(parallel_move); |
| 35 | |
| 36 | for (int i = 0; i < moves_.length(); ++i) { |
| 37 | LMoveOperands move = moves_[i]; |
| 38 | // Skip constants to perform them last. They don't block other moves |
| 39 | // and skipping such moves with register destinations keeps those |
| 40 | // registers free for the whole algorithm. |
| 41 | if (!move.IsEliminated() && !move.source()->IsConstantOperand()) { |
| 42 | root_index_ = i; // Any cycle is found when by reaching this move again. |
| 43 | PerformMove(i); |
| 44 | if (in_cycle_) { |
| 45 | RestoreValue(); |
| 46 | } |
| 47 | } |
| 48 | } |
| 49 | |
| 50 | // Perform the moves with constant sources. |
| 51 | for (int i = 0; i < moves_.length(); ++i) { |
| 52 | if (!moves_[i].IsEliminated()) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 53 | DCHECK(moves_[i].source()->IsConstantOperand()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 54 | EmitMove(i); |
| 55 | } |
| 56 | } |
| 57 | |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 58 | if (need_to_restore_root_) { |
| 59 | DCHECK(kSavedValueRegister.is(kRootRegister)); |
| 60 | __ InitializeRootRegister(); |
| 61 | need_to_restore_root_ = false; |
| 62 | } |
| 63 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 64 | moves_.Rewind(0); |
| 65 | } |
| 66 | |
| 67 | |
| 68 | void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) { |
| 69 | // Perform a linear sweep of the moves to add them to the initial list of |
| 70 | // moves to perform, ignoring any move that is redundant (the source is |
| 71 | // the same as the destination, the destination is ignored and |
| 72 | // unallocated, or the move was already eliminated). |
| 73 | const ZoneList<LMoveOperands>* moves = parallel_move->move_operands(); |
| 74 | for (int i = 0; i < moves->length(); ++i) { |
| 75 | LMoveOperands move = moves->at(i); |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 76 | if (!move.IsRedundant()) moves_.Add(move, cgen_->zone()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 77 | } |
| 78 | Verify(); |
| 79 | } |
| 80 | |
| 81 | |
| 82 | void LGapResolver::PerformMove(int index) { |
| 83 | // Each call to this function performs a move and deletes it from the move |
| 84 | // graph. We first recursively perform any move blocking this one. We |
| 85 | // mark a move as "pending" on entry to PerformMove in order to detect |
| 86 | // cycles in the move graph. |
| 87 | |
| 88 | // We can only find a cycle, when doing a depth-first traversal of moves, |
| 89 | // be encountering the starting move again. So by spilling the source of |
| 90 | // the starting move, we break the cycle. All moves are then unblocked, |
| 91 | // and the starting move is completed by writing the spilled value to |
| 92 | // its destination. All other moves from the spilled source have been |
| 93 | // completed prior to breaking the cycle. |
| 94 | // An additional complication is that moves to MemOperands with large |
| 95 | // offsets (more than 1K or 4K) require us to spill this spilled value to |
| 96 | // the stack, to free up the register. |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 97 | DCHECK(!moves_[index].IsPending()); |
| 98 | DCHECK(!moves_[index].IsRedundant()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 99 | |
| 100 | // Clear this move's destination to indicate a pending move. The actual |
| 101 | // destination is saved in a stack allocated local. Multiple moves can |
| 102 | // be pending because this function is recursive. |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 103 | DCHECK(moves_[index].source() != NULL); // Or else it will look eliminated. |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 104 | LOperand* destination = moves_[index].destination(); |
| 105 | moves_[index].set_destination(NULL); |
| 106 | |
| 107 | // Perform a depth-first traversal of the move graph to resolve |
| 108 | // dependencies. Any unperformed, unpending move with a source the same |
| 109 | // as this one's destination blocks this one so recursively perform all |
| 110 | // such moves. |
| 111 | for (int i = 0; i < moves_.length(); ++i) { |
| 112 | LMoveOperands other_move = moves_[i]; |
| 113 | if (other_move.Blocks(destination) && !other_move.IsPending()) { |
| 114 | PerformMove(i); |
| 115 | // If there is a blocking, pending move it must be moves_[root_index_] |
| 116 | // and all other moves with the same source as moves_[root_index_] are |
| 117 | // sucessfully executed (because they are cycle-free) by this loop. |
| 118 | } |
| 119 | } |
| 120 | |
| 121 | // We are about to resolve this move and don't need it marked as |
| 122 | // pending, so restore its destination. |
| 123 | moves_[index].set_destination(destination); |
| 124 | |
| 125 | // The move may be blocked on a pending move, which must be the starting move. |
| 126 | // In this case, we have a cycle, and we save the source of this move to |
| 127 | // a scratch register to break it. |
| 128 | LMoveOperands other_move = moves_[root_index_]; |
| 129 | if (other_move.Blocks(destination)) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 130 | DCHECK(other_move.IsPending()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 131 | BreakCycle(index); |
| 132 | return; |
| 133 | } |
| 134 | |
| 135 | // This move is no longer blocked. |
| 136 | EmitMove(index); |
| 137 | } |
| 138 | |
| 139 | |
| 140 | void LGapResolver::Verify() { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 141 | #ifdef ENABLE_SLOW_DCHECKS |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 142 | // No operand should be the destination for more than one move. |
| 143 | for (int i = 0; i < moves_.length(); ++i) { |
| 144 | LOperand* destination = moves_[i].destination(); |
| 145 | for (int j = i + 1; j < moves_.length(); ++j) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 146 | SLOW_DCHECK(!destination->Equals(moves_[j].destination())); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 147 | } |
| 148 | } |
| 149 | #endif |
| 150 | } |
| 151 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 152 | |
| 153 | void LGapResolver::BreakCycle(int index) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 154 | // We save in a register the source of that move and we remember its |
| 155 | // destination. Then we mark this move as resolved so the cycle is |
| 156 | // broken and we can perform the other moves. |
| 157 | DCHECK(moves_[index].destination()->Equals(moves_[root_index_].source())); |
| 158 | DCHECK(!in_cycle_); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 159 | in_cycle_ = true; |
| 160 | LOperand* source = moves_[index].source(); |
| 161 | saved_destination_ = moves_[index].destination(); |
| 162 | if (source->IsRegister()) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 163 | need_to_restore_root_ = true; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 164 | __ mov(kSavedValueRegister, cgen_->ToRegister(source)); |
| 165 | } else if (source->IsStackSlot()) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 166 | need_to_restore_root_ = true; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 167 | __ ldr(kSavedValueRegister, cgen_->ToMemOperand(source)); |
| 168 | } else if (source->IsDoubleRegister()) { |
Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 169 | __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source)); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 170 | } else if (source->IsDoubleStackSlot()) { |
Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 171 | __ vldr(kScratchDoubleReg, cgen_->ToMemOperand(source)); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 172 | } else { |
| 173 | UNREACHABLE(); |
| 174 | } |
| 175 | // This move will be done by restoring the saved value to the destination. |
| 176 | moves_[index].Eliminate(); |
| 177 | } |
| 178 | |
| 179 | |
| 180 | void LGapResolver::RestoreValue() { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 181 | DCHECK(in_cycle_); |
| 182 | DCHECK(saved_destination_ != NULL); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 183 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 184 | if (saved_destination_->IsRegister()) { |
| 185 | __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister); |
| 186 | } else if (saved_destination_->IsStackSlot()) { |
| 187 | __ str(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_)); |
| 188 | } else if (saved_destination_->IsDoubleRegister()) { |
Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 189 | __ vmov(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 190 | } else if (saved_destination_->IsDoubleStackSlot()) { |
Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 191 | __ vstr(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_)); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 192 | } else { |
| 193 | UNREACHABLE(); |
| 194 | } |
| 195 | |
| 196 | in_cycle_ = false; |
| 197 | saved_destination_ = NULL; |
| 198 | } |
| 199 | |
| 200 | |
| 201 | void LGapResolver::EmitMove(int index) { |
| 202 | LOperand* source = moves_[index].source(); |
| 203 | LOperand* destination = moves_[index].destination(); |
| 204 | |
| 205 | // Dispatch on the source and destination operand kinds. Not all |
| 206 | // combinations are possible. |
| 207 | |
| 208 | if (source->IsRegister()) { |
| 209 | Register source_register = cgen_->ToRegister(source); |
| 210 | if (destination->IsRegister()) { |
| 211 | __ mov(cgen_->ToRegister(destination), source_register); |
| 212 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 213 | DCHECK(destination->IsStackSlot()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 214 | __ str(source_register, cgen_->ToMemOperand(destination)); |
| 215 | } |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 216 | } else if (source->IsStackSlot()) { |
| 217 | MemOperand source_operand = cgen_->ToMemOperand(source); |
| 218 | if (destination->IsRegister()) { |
| 219 | __ ldr(cgen_->ToRegister(destination), source_operand); |
| 220 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 221 | DCHECK(destination->IsStackSlot()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 222 | MemOperand destination_operand = cgen_->ToMemOperand(destination); |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 223 | if (!destination_operand.OffsetIsUint12Encodable()) { |
| 224 | // ip is overwritten while saving the value to the destination. |
| 225 | // Therefore we can't use ip. It is OK if the read from the source |
| 226 | // destroys ip, since that happens before the value is read. |
| 227 | __ vldr(kScratchDoubleReg.low(), source_operand); |
| 228 | __ vstr(kScratchDoubleReg.low(), destination_operand); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 229 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 230 | __ ldr(ip, source_operand); |
| 231 | __ str(ip, destination_operand); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 232 | } |
| 233 | } |
| 234 | |
| 235 | } else if (source->IsConstantOperand()) { |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 236 | LConstantOperand* constant_source = LConstantOperand::cast(source); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 237 | if (destination->IsRegister()) { |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 238 | Register dst = cgen_->ToRegister(destination); |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 239 | Representation r = cgen_->IsSmi(constant_source) |
| 240 | ? Representation::Smi() : Representation::Integer32(); |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 241 | if (cgen_->IsInteger32(constant_source)) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 242 | __ mov(dst, Operand(cgen_->ToRepresentation(constant_source, r))); |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 243 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 244 | __ Move(dst, cgen_->ToHandle(constant_source)); |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 245 | } |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 246 | } else if (destination->IsDoubleRegister()) { |
| 247 | DwVfpRegister result = cgen_->ToDoubleRegister(destination); |
| 248 | double v = cgen_->ToDouble(constant_source); |
| 249 | __ Vmov(result, v, ip); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 250 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 251 | DCHECK(destination->IsStackSlot()); |
| 252 | DCHECK(!in_cycle_); // Constant moves happen after all cycles are gone. |
| 253 | need_to_restore_root_ = true; |
| 254 | Representation r = cgen_->IsSmi(constant_source) |
| 255 | ? Representation::Smi() : Representation::Integer32(); |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 256 | if (cgen_->IsInteger32(constant_source)) { |
| 257 | __ mov(kSavedValueRegister, |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 258 | Operand(cgen_->ToRepresentation(constant_source, r))); |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 259 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 260 | __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source)); |
Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 261 | } |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 262 | __ str(kSavedValueRegister, cgen_->ToMemOperand(destination)); |
| 263 | } |
| 264 | |
| 265 | } else if (source->IsDoubleRegister()) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 266 | DwVfpRegister source_register = cgen_->ToDoubleRegister(source); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 267 | if (destination->IsDoubleRegister()) { |
| 268 | __ vmov(cgen_->ToDoubleRegister(destination), source_register); |
| 269 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 270 | DCHECK(destination->IsDoubleStackSlot()); |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 271 | __ vstr(source_register, cgen_->ToMemOperand(destination)); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 272 | } |
| 273 | |
| 274 | } else if (source->IsDoubleStackSlot()) { |
| 275 | MemOperand source_operand = cgen_->ToMemOperand(source); |
| 276 | if (destination->IsDoubleRegister()) { |
| 277 | __ vldr(cgen_->ToDoubleRegister(destination), source_operand); |
| 278 | } else { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 279 | DCHECK(destination->IsDoubleStackSlot()); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 280 | MemOperand destination_operand = cgen_->ToMemOperand(destination); |
| 281 | if (in_cycle_) { |
Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame] | 282 | // kScratchDoubleReg was used to break the cycle. |
| 283 | __ vstm(db_w, sp, kScratchDoubleReg, kScratchDoubleReg); |
| 284 | __ vldr(kScratchDoubleReg, source_operand); |
| 285 | __ vstr(kScratchDoubleReg, destination_operand); |
| 286 | __ vldm(ia_w, sp, kScratchDoubleReg, kScratchDoubleReg); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 287 | } else { |
Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 288 | __ vldr(kScratchDoubleReg, source_operand); |
| 289 | __ vstr(kScratchDoubleReg, destination_operand); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 290 | } |
| 291 | } |
| 292 | } else { |
| 293 | UNREACHABLE(); |
| 294 | } |
| 295 | |
| 296 | moves_[index].Eliminate(); |
| 297 | } |
| 298 | |
| 299 | |
| 300 | #undef __ |
| 301 | |
| 302 | } } // namespace v8::internal |