blob: 94dffb333ac3961c17b5dc5497db7cf81fc5431e [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2011 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X64
6
7#include "src/crankshaft/x64/lithium-gap-resolver-x64.h"
8
9#include "src/crankshaft/x64/lithium-codegen-x64.h"
10
11namespace v8 {
12namespace internal {
13
14LGapResolver::LGapResolver(LCodeGen* owner)
15 : cgen_(owner), moves_(32, owner->zone()) {}
16
17
18void LGapResolver::Resolve(LParallelMove* parallel_move) {
19 DCHECK(moves_.is_empty());
20 // Build up a worklist of moves.
21 BuildInitialMoveList(parallel_move);
22
23 for (int i = 0; i < moves_.length(); ++i) {
24 LMoveOperands move = moves_[i];
25 // Skip constants to perform them last. They don't block other moves
26 // and skipping such moves with register destinations keeps those
27 // registers free for the whole algorithm.
28 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
29 PerformMove(i);
30 }
31 }
32
33 // Perform the moves with constant sources.
34 for (int i = 0; i < moves_.length(); ++i) {
35 if (!moves_[i].IsEliminated()) {
36 DCHECK(moves_[i].source()->IsConstantOperand());
37 EmitMove(i);
38 }
39 }
40
41 moves_.Rewind(0);
42}
43
44
45void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
46 // Perform a linear sweep of the moves to add them to the initial list of
47 // moves to perform, ignoring any move that is redundant (the source is
48 // the same as the destination, the destination is ignored and
49 // unallocated, or the move was already eliminated).
50 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
51 for (int i = 0; i < moves->length(); ++i) {
52 LMoveOperands move = moves->at(i);
53 if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
54 }
55 Verify();
56}
57
58
59void LGapResolver::PerformMove(int index) {
60 // Each call to this function performs a move and deletes it from the move
61 // graph. We first recursively perform any move blocking this one. We
62 // mark a move as "pending" on entry to PerformMove in order to detect
63 // cycles in the move graph. We use operand swaps to resolve cycles,
64 // which means that a call to PerformMove could change any source operand
65 // in the move graph.
66
67 DCHECK(!moves_[index].IsPending());
68 DCHECK(!moves_[index].IsRedundant());
69
70 // Clear this move's destination to indicate a pending move. The actual
71 // destination is saved in a stack-allocated local. Recursion may allow
72 // multiple moves to be pending.
73 DCHECK(moves_[index].source() != NULL); // Or else it will look eliminated.
74 LOperand* destination = moves_[index].destination();
75 moves_[index].set_destination(NULL);
76
77 // Perform a depth-first traversal of the move graph to resolve
78 // dependencies. Any unperformed, unpending move with a source the same
79 // as this one's destination blocks this one so recursively perform all
80 // such moves.
81 for (int i = 0; i < moves_.length(); ++i) {
82 LMoveOperands other_move = moves_[i];
83 if (other_move.Blocks(destination) && !other_move.IsPending()) {
84 // Though PerformMove can change any source operand in the move graph,
85 // this call cannot create a blocking move via a swap (this loop does
86 // not miss any). Assume there is a non-blocking move with source A
87 // and this move is blocked on source B and there is a swap of A and
88 // B. Then A and B must be involved in the same cycle (or they would
89 // not be swapped). Since this move's destination is B and there is
90 // only a single incoming edge to an operand, this move must also be
91 // involved in the same cycle. In that case, the blocking move will
92 // be created but will be "pending" when we return from PerformMove.
93 PerformMove(i);
94 }
95 }
96
97 // We are about to resolve this move and don't need it marked as
98 // pending, so restore its destination.
99 moves_[index].set_destination(destination);
100
101 // This move's source may have changed due to swaps to resolve cycles and
102 // so it may now be the last move in the cycle. If so remove it.
103 if (moves_[index].source()->Equals(destination)) {
104 moves_[index].Eliminate();
105 return;
106 }
107
108 // The move may be blocked on a (at most one) pending move, in which case
109 // we have a cycle. Search for such a blocking move and perform a swap to
110 // resolve it.
111 for (int i = 0; i < moves_.length(); ++i) {
112 LMoveOperands other_move = moves_[i];
113 if (other_move.Blocks(destination)) {
114 DCHECK(other_move.IsPending());
115 EmitSwap(index);
116 return;
117 }
118 }
119
120 // This move is not blocked.
121 EmitMove(index);
122}
123
124
125void LGapResolver::Verify() {
126#ifdef ENABLE_SLOW_DCHECKS
127 // No operand should be the destination for more than one move.
128 for (int i = 0; i < moves_.length(); ++i) {
129 LOperand* destination = moves_[i].destination();
130 for (int j = i + 1; j < moves_.length(); ++j) {
131 SLOW_DCHECK(!destination->Equals(moves_[j].destination()));
132 }
133 }
134#endif
135}
136
137
138#define __ ACCESS_MASM(cgen_->masm())
139
140
141void LGapResolver::EmitMove(int index) {
142 LOperand* source = moves_[index].source();
143 LOperand* destination = moves_[index].destination();
144
145 // Dispatch on the source and destination operand kinds. Not all
146 // combinations are possible.
147 if (source->IsRegister()) {
148 Register src = cgen_->ToRegister(source);
149 if (destination->IsRegister()) {
150 Register dst = cgen_->ToRegister(destination);
151 __ movp(dst, src);
152 } else {
153 DCHECK(destination->IsStackSlot());
154 Operand dst = cgen_->ToOperand(destination);
155 __ movp(dst, src);
156 }
157
158 } else if (source->IsStackSlot()) {
159 Operand src = cgen_->ToOperand(source);
160 if (destination->IsRegister()) {
161 Register dst = cgen_->ToRegister(destination);
162 __ movp(dst, src);
163 } else {
164 DCHECK(destination->IsStackSlot());
165 Operand dst = cgen_->ToOperand(destination);
166 __ movp(kScratchRegister, src);
167 __ movp(dst, kScratchRegister);
168 }
169
170 } else if (source->IsConstantOperand()) {
171 LConstantOperand* constant_source = LConstantOperand::cast(source);
172 if (destination->IsRegister()) {
173 Register dst = cgen_->ToRegister(destination);
174 if (cgen_->IsSmiConstant(constant_source)) {
175 __ Move(dst, cgen_->ToSmi(constant_source));
176 } else if (cgen_->IsInteger32Constant(constant_source)) {
177 int32_t constant = cgen_->ToInteger32(constant_source);
178 // Do sign extension only for constant used as de-hoisted array key.
179 // Others only need zero extension, which saves 2 bytes.
180 if (cgen_->IsDehoistedKeyConstant(constant_source)) {
181 __ Set(dst, constant);
182 } else {
183 __ Set(dst, static_cast<uint32_t>(constant));
184 }
185 } else {
186 __ Move(dst, cgen_->ToHandle(constant_source));
187 }
188 } else if (destination->IsDoubleRegister()) {
189 double v = cgen_->ToDouble(constant_source);
190 uint64_t int_val = bit_cast<uint64_t, double>(v);
191 XMMRegister dst = cgen_->ToDoubleRegister(destination);
192 if (int_val == 0) {
193 __ Xorpd(dst, dst);
194 } else {
195 __ Set(kScratchRegister, int_val);
196 __ Movq(dst, kScratchRegister);
197 }
198 } else {
199 DCHECK(destination->IsStackSlot());
200 Operand dst = cgen_->ToOperand(destination);
201 if (cgen_->IsSmiConstant(constant_source)) {
202 __ Move(dst, cgen_->ToSmi(constant_source));
203 } else if (cgen_->IsInteger32Constant(constant_source)) {
204 // Do sign extension to 64 bits when stored into stack slot.
205 __ movp(dst, Immediate(cgen_->ToInteger32(constant_source)));
206 } else {
207 __ Move(kScratchRegister, cgen_->ToHandle(constant_source));
208 __ movp(dst, kScratchRegister);
209 }
210 }
211
212 } else if (source->IsDoubleRegister()) {
213 XMMRegister src = cgen_->ToDoubleRegister(source);
214 if (destination->IsDoubleRegister()) {
215 __ Movapd(cgen_->ToDoubleRegister(destination), src);
216 } else {
217 DCHECK(destination->IsDoubleStackSlot());
218 __ Movsd(cgen_->ToOperand(destination), src);
219 }
220 } else if (source->IsDoubleStackSlot()) {
221 Operand src = cgen_->ToOperand(source);
222 if (destination->IsDoubleRegister()) {
223 __ Movsd(cgen_->ToDoubleRegister(destination), src);
224 } else {
225 DCHECK(destination->IsDoubleStackSlot());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100226 __ Movsd(kScratchDoubleReg, src);
227 __ Movsd(cgen_->ToOperand(destination), kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000228 }
229 } else {
230 UNREACHABLE();
231 }
232
233 moves_[index].Eliminate();
234}
235
236
237void LGapResolver::EmitSwap(int index) {
238 LOperand* source = moves_[index].source();
239 LOperand* destination = moves_[index].destination();
240
241 // Dispatch on the source and destination operand kinds. Not all
242 // combinations are possible.
243 if (source->IsRegister() && destination->IsRegister()) {
244 // Swap two general-purpose registers.
245 Register src = cgen_->ToRegister(source);
246 Register dst = cgen_->ToRegister(destination);
247 __ movp(kScratchRegister, src);
248 __ movp(src, dst);
249 __ movp(dst, kScratchRegister);
250
251 } else if ((source->IsRegister() && destination->IsStackSlot()) ||
252 (source->IsStackSlot() && destination->IsRegister())) {
253 // Swap a general-purpose register and a stack slot.
254 Register reg =
255 cgen_->ToRegister(source->IsRegister() ? source : destination);
256 Operand mem =
257 cgen_->ToOperand(source->IsRegister() ? destination : source);
258 __ movp(kScratchRegister, mem);
259 __ movp(mem, reg);
260 __ movp(reg, kScratchRegister);
261
262 } else if ((source->IsStackSlot() && destination->IsStackSlot()) ||
263 (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot())) {
264 // Swap two stack slots or two double stack slots.
265 Operand src = cgen_->ToOperand(source);
266 Operand dst = cgen_->ToOperand(destination);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100267 __ Movsd(kScratchDoubleReg, src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 __ movp(kScratchRegister, dst);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100269 __ Movsd(dst, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 __ movp(src, kScratchRegister);
271
272 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
273 // Swap two double registers.
274 XMMRegister source_reg = cgen_->ToDoubleRegister(source);
275 XMMRegister destination_reg = cgen_->ToDoubleRegister(destination);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100276 __ Movapd(kScratchDoubleReg, source_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277 __ Movapd(source_reg, destination_reg);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100278 __ Movapd(destination_reg, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279
280 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) {
281 // Swap a double register and a double stack slot.
282 DCHECK((source->IsDoubleRegister() && destination->IsDoubleStackSlot()) ||
283 (source->IsDoubleStackSlot() && destination->IsDoubleRegister()));
284 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister()
285 ? source
286 : destination);
287 LOperand* other = source->IsDoubleRegister() ? destination : source;
288 DCHECK(other->IsDoubleStackSlot());
289 Operand other_operand = cgen_->ToOperand(other);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100290 __ Movapd(kScratchDoubleReg, reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291 __ Movsd(reg, other_operand);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100292 __ Movsd(other_operand, kScratchDoubleReg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000293
294 } else {
295 // No other combinations are possible.
296 UNREACHABLE();
297 }
298
299 // The swap of source and destination has executed a move from source to
300 // destination.
301 moves_[index].Eliminate();
302
303 // Any unperformed (including pending) move with a source of either
304 // this move's source or destination needs to have their source
305 // changed to reflect the state of affairs after the swap.
306 for (int i = 0; i < moves_.length(); ++i) {
307 LMoveOperands other_move = moves_[i];
308 if (other_move.Blocks(source)) {
309 moves_[i].set_source(destination);
310 } else if (other_move.Blocks(destination)) {
311 moves_[i].set_source(source);
312 }
313 }
314}
315
316#undef __
317
318} // namespace internal
319} // namespace v8
320
321#endif // V8_TARGET_ARCH_X64