blob: c261b665e74750cf6ab289c23c75e8a660a6c090 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/ppc/lithium-codegen-ppc.h"
8#include "src/ppc/lithium-gap-resolver-ppc.h"
9
10namespace v8 {
11namespace internal {
12
13static const Register kSavedValueRegister = {11};
14
15LGapResolver::LGapResolver(LCodeGen* owner)
16 : cgen_(owner),
17 moves_(32, owner->zone()),
18 root_index_(0),
19 in_cycle_(false),
20 saved_destination_(NULL) {}
21
22
23void LGapResolver::Resolve(LParallelMove* parallel_move) {
24 DCHECK(moves_.is_empty());
25 // Build up a worklist of moves.
26 BuildInitialMoveList(parallel_move);
27
28 for (int i = 0; i < moves_.length(); ++i) {
29 LMoveOperands move = moves_[i];
30 // Skip constants to perform them last. They don't block other moves
31 // and skipping such moves with register destinations keeps those
32 // registers free for the whole algorithm.
33 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
34 root_index_ = i; // Any cycle is found when by reaching this move again.
35 PerformMove(i);
36 if (in_cycle_) {
37 RestoreValue();
38 }
39 }
40 }
41
42 // Perform the moves with constant sources.
43 for (int i = 0; i < moves_.length(); ++i) {
44 if (!moves_[i].IsEliminated()) {
45 DCHECK(moves_[i].source()->IsConstantOperand());
46 EmitMove(i);
47 }
48 }
49
50 moves_.Rewind(0);
51}
52
53
54void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
55 // Perform a linear sweep of the moves to add them to the initial list of
56 // moves to perform, ignoring any move that is redundant (the source is
57 // the same as the destination, the destination is ignored and
58 // unallocated, or the move was already eliminated).
59 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
60 for (int i = 0; i < moves->length(); ++i) {
61 LMoveOperands move = moves->at(i);
62 if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
63 }
64 Verify();
65}
66
67
68void LGapResolver::PerformMove(int index) {
69 // Each call to this function performs a move and deletes it from the move
70 // graph. We first recursively perform any move blocking this one. We
71 // mark a move as "pending" on entry to PerformMove in order to detect
72 // cycles in the move graph.
73
74 // We can only find a cycle, when doing a depth-first traversal of moves,
75 // be encountering the starting move again. So by spilling the source of
76 // the starting move, we break the cycle. All moves are then unblocked,
77 // and the starting move is completed by writing the spilled value to
78 // its destination. All other moves from the spilled source have been
79 // completed prior to breaking the cycle.
80 // An additional complication is that moves to MemOperands with large
81 // offsets (more than 1K or 4K) require us to spill this spilled value to
82 // the stack, to free up the register.
83 DCHECK(!moves_[index].IsPending());
84 DCHECK(!moves_[index].IsRedundant());
85
86 // Clear this move's destination to indicate a pending move. The actual
87 // destination is saved in a stack allocated local. Multiple moves can
88 // be pending because this function is recursive.
89 DCHECK(moves_[index].source() != NULL); // Or else it will look eliminated.
90 LOperand* destination = moves_[index].destination();
91 moves_[index].set_destination(NULL);
92
93 // Perform a depth-first traversal of the move graph to resolve
94 // dependencies. Any unperformed, unpending move with a source the same
95 // as this one's destination blocks this one so recursively perform all
96 // such moves.
97 for (int i = 0; i < moves_.length(); ++i) {
98 LMoveOperands other_move = moves_[i];
99 if (other_move.Blocks(destination) && !other_move.IsPending()) {
100 PerformMove(i);
101 // If there is a blocking, pending move it must be moves_[root_index_]
102 // and all other moves with the same source as moves_[root_index_] are
103 // sucessfully executed (because they are cycle-free) by this loop.
104 }
105 }
106
107 // We are about to resolve this move and don't need it marked as
108 // pending, so restore its destination.
109 moves_[index].set_destination(destination);
110
111 // The move may be blocked on a pending move, which must be the starting move.
112 // In this case, we have a cycle, and we save the source of this move to
113 // a scratch register to break it.
114 LMoveOperands other_move = moves_[root_index_];
115 if (other_move.Blocks(destination)) {
116 DCHECK(other_move.IsPending());
117 BreakCycle(index);
118 return;
119 }
120
121 // This move is no longer blocked.
122 EmitMove(index);
123}
124
125
126void LGapResolver::Verify() {
127#ifdef ENABLE_SLOW_DCHECKS
128 // No operand should be the destination for more than one move.
129 for (int i = 0; i < moves_.length(); ++i) {
130 LOperand* destination = moves_[i].destination();
131 for (int j = i + 1; j < moves_.length(); ++j) {
132 SLOW_DCHECK(!destination->Equals(moves_[j].destination()));
133 }
134 }
135#endif
136}
137
138#define __ ACCESS_MASM(cgen_->masm())
139
140void LGapResolver::BreakCycle(int index) {
141 // We save in a register the value that should end up in the source of
142 // moves_[root_index]. After performing all moves in the tree rooted
143 // in that move, we save the value to that source.
144 DCHECK(moves_[index].destination()->Equals(moves_[root_index_].source()));
145 DCHECK(!in_cycle_);
146 in_cycle_ = true;
147 LOperand* source = moves_[index].source();
148 saved_destination_ = moves_[index].destination();
149 if (source->IsRegister()) {
150 __ mr(kSavedValueRegister, cgen_->ToRegister(source));
151 } else if (source->IsStackSlot()) {
152 __ LoadP(kSavedValueRegister, cgen_->ToMemOperand(source));
153 } else if (source->IsDoubleRegister()) {
154 __ fmr(kScratchDoubleReg, cgen_->ToDoubleRegister(source));
155 } else if (source->IsDoubleStackSlot()) {
156 __ lfd(kScratchDoubleReg, cgen_->ToMemOperand(source));
157 } else {
158 UNREACHABLE();
159 }
160 // This move will be done by restoring the saved value to the destination.
161 moves_[index].Eliminate();
162}
163
164
165void LGapResolver::RestoreValue() {
166 DCHECK(in_cycle_);
167 DCHECK(saved_destination_ != NULL);
168
169 // Spilled value is in kSavedValueRegister or kSavedDoubleValueRegister.
170 if (saved_destination_->IsRegister()) {
171 __ mr(cgen_->ToRegister(saved_destination_), kSavedValueRegister);
172 } else if (saved_destination_->IsStackSlot()) {
173 __ StoreP(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
174 } else if (saved_destination_->IsDoubleRegister()) {
175 __ fmr(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg);
176 } else if (saved_destination_->IsDoubleStackSlot()) {
177 __ stfd(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_));
178 } else {
179 UNREACHABLE();
180 }
181
182 in_cycle_ = false;
183 saved_destination_ = NULL;
184}
185
186
187void LGapResolver::EmitMove(int index) {
188 LOperand* source = moves_[index].source();
189 LOperand* destination = moves_[index].destination();
190
191 // Dispatch on the source and destination operand kinds. Not all
192 // combinations are possible.
193
194 if (source->IsRegister()) {
195 Register source_register = cgen_->ToRegister(source);
196 if (destination->IsRegister()) {
197 __ mr(cgen_->ToRegister(destination), source_register);
198 } else {
199 DCHECK(destination->IsStackSlot());
200 __ StoreP(source_register, cgen_->ToMemOperand(destination));
201 }
202 } else if (source->IsStackSlot()) {
203 MemOperand source_operand = cgen_->ToMemOperand(source);
204 if (destination->IsRegister()) {
205 __ LoadP(cgen_->ToRegister(destination), source_operand);
206 } else {
207 DCHECK(destination->IsStackSlot());
208 MemOperand destination_operand = cgen_->ToMemOperand(destination);
209 if (in_cycle_) {
210 __ LoadP(ip, source_operand);
211 __ StoreP(ip, destination_operand);
212 } else {
213 __ LoadP(kSavedValueRegister, source_operand);
214 __ StoreP(kSavedValueRegister, destination_operand);
215 }
216 }
217
218 } else if (source->IsConstantOperand()) {
219 LConstantOperand* constant_source = LConstantOperand::cast(source);
220 if (destination->IsRegister()) {
221 Register dst = cgen_->ToRegister(destination);
222 if (cgen_->IsInteger32(constant_source)) {
223 cgen_->EmitLoadIntegerConstant(constant_source, dst);
224 } else {
225 __ Move(dst, cgen_->ToHandle(constant_source));
226 }
227 } else if (destination->IsDoubleRegister()) {
228 DoubleRegister result = cgen_->ToDoubleRegister(destination);
229 double v = cgen_->ToDouble(constant_source);
230 __ LoadDoubleLiteral(result, v, ip);
231 } else {
232 DCHECK(destination->IsStackSlot());
233 DCHECK(!in_cycle_); // Constant moves happen after all cycles are gone.
234 if (cgen_->IsInteger32(constant_source)) {
235 cgen_->EmitLoadIntegerConstant(constant_source, kSavedValueRegister);
236 } else {
237 __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source));
238 }
239 __ StoreP(kSavedValueRegister, cgen_->ToMemOperand(destination));
240 }
241
242 } else if (source->IsDoubleRegister()) {
243 DoubleRegister source_register = cgen_->ToDoubleRegister(source);
244 if (destination->IsDoubleRegister()) {
245 __ fmr(cgen_->ToDoubleRegister(destination), source_register);
246 } else {
247 DCHECK(destination->IsDoubleStackSlot());
248 __ stfd(source_register, cgen_->ToMemOperand(destination));
249 }
250
251 } else if (source->IsDoubleStackSlot()) {
252 MemOperand source_operand = cgen_->ToMemOperand(source);
253 if (destination->IsDoubleRegister()) {
254 __ lfd(cgen_->ToDoubleRegister(destination), source_operand);
255 } else {
256 DCHECK(destination->IsDoubleStackSlot());
257 MemOperand destination_operand = cgen_->ToMemOperand(destination);
258 if (in_cycle_) {
259// kSavedDoubleValueRegister was used to break the cycle,
260// but kSavedValueRegister is free.
261#if V8_TARGET_ARCH_PPC64
262 __ ld(kSavedValueRegister, source_operand);
263 __ std(kSavedValueRegister, destination_operand);
264#else
265 MemOperand source_high_operand = cgen_->ToHighMemOperand(source);
266 MemOperand destination_high_operand =
267 cgen_->ToHighMemOperand(destination);
268 __ lwz(kSavedValueRegister, source_operand);
269 __ stw(kSavedValueRegister, destination_operand);
270 __ lwz(kSavedValueRegister, source_high_operand);
271 __ stw(kSavedValueRegister, destination_high_operand);
272#endif
273 } else {
274 __ lfd(kScratchDoubleReg, source_operand);
275 __ stfd(kScratchDoubleReg, destination_operand);
276 }
277 }
278 } else {
279 UNREACHABLE();
280 }
281
282 moves_[index].Eliminate();
283}
284
285
286#undef __
287}
288} // namespace v8::internal