blob: 2fceec9d21a4df6c7a647f2d9a8b331e8dccae77 [file] [log] [blame]
Ben Murdoch692be652012-01-10 18:47:50 +00001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Steve Block44f0eee2011-05-26 01:26:41 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/arm/lithium-codegen-arm.h"
8#include "src/arm/lithium-gap-resolver-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +01009
10namespace v8 {
11namespace internal {
12
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013// We use the root register to spill a value while breaking a cycle in parallel
14// moves. We don't need access to roots while resolving the move list and using
15// the root register has two advantages:
16// - It is not in crankshaft allocatable registers list, so it can't interfere
17// with any of the moves we are resolving.
18// - We don't need to push it on the stack, as we can reload it with its value
19// once we have resolved a cycle.
20#define kSavedValueRegister kRootRegister
21
Ben Murdoche0cee9b2011-05-25 10:26:03 +010022
23LGapResolver::LGapResolver(LCodeGen* owner)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false),
25 saved_destination_(NULL), need_to_restore_root_(false) { }
26
27
28#define __ ACCESS_MASM(cgen_->masm())
Ben Murdoche0cee9b2011-05-25 10:26:03 +010029
30
31void LGapResolver::Resolve(LParallelMove* parallel_move) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032 DCHECK(moves_.is_empty());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010033 // Build up a worklist of moves.
34 BuildInitialMoveList(parallel_move);
35
36 for (int i = 0; i < moves_.length(); ++i) {
37 LMoveOperands move = moves_[i];
38 // Skip constants to perform them last. They don't block other moves
39 // and skipping such moves with register destinations keeps those
40 // registers free for the whole algorithm.
41 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
42 root_index_ = i; // Any cycle is found when by reaching this move again.
43 PerformMove(i);
44 if (in_cycle_) {
45 RestoreValue();
46 }
47 }
48 }
49
50 // Perform the moves with constant sources.
51 for (int i = 0; i < moves_.length(); ++i) {
52 if (!moves_[i].IsEliminated()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 DCHECK(moves_[i].source()->IsConstantOperand());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010054 EmitMove(i);
55 }
56 }
57
Ben Murdochb8a8cc12014-11-26 15:28:44 +000058 if (need_to_restore_root_) {
59 DCHECK(kSavedValueRegister.is(kRootRegister));
60 __ InitializeRootRegister();
61 need_to_restore_root_ = false;
62 }
63
Ben Murdoche0cee9b2011-05-25 10:26:03 +010064 moves_.Rewind(0);
65}
66
67
68void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
69 // Perform a linear sweep of the moves to add them to the initial list of
70 // moves to perform, ignoring any move that is redundant (the source is
71 // the same as the destination, the destination is ignored and
72 // unallocated, or the move was already eliminated).
73 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
74 for (int i = 0; i < moves->length(); ++i) {
75 LMoveOperands move = moves->at(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010077 }
78 Verify();
79}
80
81
82void LGapResolver::PerformMove(int index) {
83 // Each call to this function performs a move and deletes it from the move
84 // graph. We first recursively perform any move blocking this one. We
85 // mark a move as "pending" on entry to PerformMove in order to detect
86 // cycles in the move graph.
87
88 // We can only find a cycle, when doing a depth-first traversal of moves,
89 // be encountering the starting move again. So by spilling the source of
90 // the starting move, we break the cycle. All moves are then unblocked,
91 // and the starting move is completed by writing the spilled value to
92 // its destination. All other moves from the spilled source have been
93 // completed prior to breaking the cycle.
94 // An additional complication is that moves to MemOperands with large
95 // offsets (more than 1K or 4K) require us to spill this spilled value to
96 // the stack, to free up the register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097 DCHECK(!moves_[index].IsPending());
98 DCHECK(!moves_[index].IsRedundant());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010099
100 // Clear this move's destination to indicate a pending move. The actual
101 // destination is saved in a stack allocated local. Multiple moves can
102 // be pending because this function is recursive.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 DCHECK(moves_[index].source() != NULL); // Or else it will look eliminated.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100104 LOperand* destination = moves_[index].destination();
105 moves_[index].set_destination(NULL);
106
107 // Perform a depth-first traversal of the move graph to resolve
108 // dependencies. Any unperformed, unpending move with a source the same
109 // as this one's destination blocks this one so recursively perform all
110 // such moves.
111 for (int i = 0; i < moves_.length(); ++i) {
112 LMoveOperands other_move = moves_[i];
113 if (other_move.Blocks(destination) && !other_move.IsPending()) {
114 PerformMove(i);
115 // If there is a blocking, pending move it must be moves_[root_index_]
116 // and all other moves with the same source as moves_[root_index_] are
117 // sucessfully executed (because they are cycle-free) by this loop.
118 }
119 }
120
121 // We are about to resolve this move and don't need it marked as
122 // pending, so restore its destination.
123 moves_[index].set_destination(destination);
124
125 // The move may be blocked on a pending move, which must be the starting move.
126 // In this case, we have a cycle, and we save the source of this move to
127 // a scratch register to break it.
128 LMoveOperands other_move = moves_[root_index_];
129 if (other_move.Blocks(destination)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000130 DCHECK(other_move.IsPending());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100131 BreakCycle(index);
132 return;
133 }
134
135 // This move is no longer blocked.
136 EmitMove(index);
137}
138
139
140void LGapResolver::Verify() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000141#ifdef ENABLE_SLOW_DCHECKS
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100142 // No operand should be the destination for more than one move.
143 for (int i = 0; i < moves_.length(); ++i) {
144 LOperand* destination = moves_[i].destination();
145 for (int j = i + 1; j < moves_.length(); ++j) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 SLOW_DCHECK(!destination->Equals(moves_[j].destination()));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100147 }
148 }
149#endif
150}
151
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100152
153void LGapResolver::BreakCycle(int index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 // We save in a register the source of that move and we remember its
155 // destination. Then we mark this move as resolved so the cycle is
156 // broken and we can perform the other moves.
157 DCHECK(moves_[index].destination()->Equals(moves_[root_index_].source()));
158 DCHECK(!in_cycle_);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100159 in_cycle_ = true;
160 LOperand* source = moves_[index].source();
161 saved_destination_ = moves_[index].destination();
162 if (source->IsRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 need_to_restore_root_ = true;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100164 __ mov(kSavedValueRegister, cgen_->ToRegister(source));
165 } else if (source->IsStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 need_to_restore_root_ = true;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100167 __ ldr(kSavedValueRegister, cgen_->ToMemOperand(source));
168 } else if (source->IsDoubleRegister()) {
Ben Murdoch692be652012-01-10 18:47:50 +0000169 __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100170 } else if (source->IsDoubleStackSlot()) {
Ben Murdoch692be652012-01-10 18:47:50 +0000171 __ vldr(kScratchDoubleReg, cgen_->ToMemOperand(source));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100172 } else {
173 UNREACHABLE();
174 }
175 // This move will be done by restoring the saved value to the destination.
176 moves_[index].Eliminate();
177}
178
179
180void LGapResolver::RestoreValue() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 DCHECK(in_cycle_);
182 DCHECK(saved_destination_ != NULL);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100183
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100184 if (saved_destination_->IsRegister()) {
185 __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister);
186 } else if (saved_destination_->IsStackSlot()) {
187 __ str(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
188 } else if (saved_destination_->IsDoubleRegister()) {
Ben Murdoch692be652012-01-10 18:47:50 +0000189 __ vmov(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100190 } else if (saved_destination_->IsDoubleStackSlot()) {
Ben Murdoch692be652012-01-10 18:47:50 +0000191 __ vstr(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100192 } else {
193 UNREACHABLE();
194 }
195
196 in_cycle_ = false;
197 saved_destination_ = NULL;
198}
199
200
201void LGapResolver::EmitMove(int index) {
202 LOperand* source = moves_[index].source();
203 LOperand* destination = moves_[index].destination();
204
205 // Dispatch on the source and destination operand kinds. Not all
206 // combinations are possible.
207
208 if (source->IsRegister()) {
209 Register source_register = cgen_->ToRegister(source);
210 if (destination->IsRegister()) {
211 __ mov(cgen_->ToRegister(destination), source_register);
212 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 DCHECK(destination->IsStackSlot());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100214 __ str(source_register, cgen_->ToMemOperand(destination));
215 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100216 } else if (source->IsStackSlot()) {
217 MemOperand source_operand = cgen_->ToMemOperand(source);
218 if (destination->IsRegister()) {
219 __ ldr(cgen_->ToRegister(destination), source_operand);
220 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000221 DCHECK(destination->IsStackSlot());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100222 MemOperand destination_operand = cgen_->ToMemOperand(destination);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 if (!destination_operand.OffsetIsUint12Encodable()) {
224 // ip is overwritten while saving the value to the destination.
225 // Therefore we can't use ip. It is OK if the read from the source
226 // destroys ip, since that happens before the value is read.
227 __ vldr(kScratchDoubleReg.low(), source_operand);
228 __ vstr(kScratchDoubleReg.low(), destination_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100229 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230 __ ldr(ip, source_operand);
231 __ str(ip, destination_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100232 }
233 }
234
235 } else if (source->IsConstantOperand()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100236 LConstantOperand* constant_source = LConstantOperand::cast(source);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100237 if (destination->IsRegister()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100238 Register dst = cgen_->ToRegister(destination);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 Representation r = cgen_->IsSmi(constant_source)
240 ? Representation::Smi() : Representation::Integer32();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100241 if (cgen_->IsInteger32(constant_source)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 __ mov(dst, Operand(cgen_->ToRepresentation(constant_source, r)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100243 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 __ Move(dst, cgen_->ToHandle(constant_source));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100245 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246 } else if (destination->IsDoubleRegister()) {
247 DwVfpRegister result = cgen_->ToDoubleRegister(destination);
248 double v = cgen_->ToDouble(constant_source);
249 __ Vmov(result, v, ip);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100250 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251 DCHECK(destination->IsStackSlot());
252 DCHECK(!in_cycle_); // Constant moves happen after all cycles are gone.
253 need_to_restore_root_ = true;
254 Representation r = cgen_->IsSmi(constant_source)
255 ? Representation::Smi() : Representation::Integer32();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100256 if (cgen_->IsInteger32(constant_source)) {
257 __ mov(kSavedValueRegister,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 Operand(cgen_->ToRepresentation(constant_source, r)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100259 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260 __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100261 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100262 __ str(kSavedValueRegister, cgen_->ToMemOperand(destination));
263 }
264
265 } else if (source->IsDoubleRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 DwVfpRegister source_register = cgen_->ToDoubleRegister(source);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100267 if (destination->IsDoubleRegister()) {
268 __ vmov(cgen_->ToDoubleRegister(destination), source_register);
269 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270 DCHECK(destination->IsDoubleStackSlot());
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000271 __ vstr(source_register, cgen_->ToMemOperand(destination));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100272 }
273
274 } else if (source->IsDoubleStackSlot()) {
275 MemOperand source_operand = cgen_->ToMemOperand(source);
276 if (destination->IsDoubleRegister()) {
277 __ vldr(cgen_->ToDoubleRegister(destination), source_operand);
278 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000279 DCHECK(destination->IsDoubleStackSlot());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100280 MemOperand destination_operand = cgen_->ToMemOperand(destination);
281 if (in_cycle_) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 // kScratchDoubleReg was used to break the cycle.
283 __ vstm(db_w, sp, kScratchDoubleReg, kScratchDoubleReg);
284 __ vldr(kScratchDoubleReg, source_operand);
285 __ vstr(kScratchDoubleReg, destination_operand);
286 __ vldm(ia_w, sp, kScratchDoubleReg, kScratchDoubleReg);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100287 } else {
Ben Murdoch692be652012-01-10 18:47:50 +0000288 __ vldr(kScratchDoubleReg, source_operand);
289 __ vstr(kScratchDoubleReg, destination_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100290 }
291 }
292 } else {
293 UNREACHABLE();
294 }
295
296 moves_[index].Eliminate();
297}
298
299
300#undef __
301
302} } // namespace v8::internal