blob: c3c22f1ac63abfb6b9647912b045754d449717c0 [file] [log] [blame]
sgjesse@chromium.orgdf7a2842010-03-25 14:34:15 +00001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "codegen-inl.h"
31#include "jump-target-inl.h"
32#include "register-allocator-inl.h"
33
34namespace v8 {
35namespace internal {
36
37
vegorov@chromium.orgdff694e2010-05-17 09:10:26 +000038bool JumpTarget::compiling_deferred_code_ = false;
39
40
sgjesse@chromium.orgdf7a2842010-03-25 14:34:15 +000041void JumpTarget::Jump(Result* arg) {
42 ASSERT(cgen()->has_valid_frame());
43
44 cgen()->frame()->Push(arg);
45 DoJump();
46}
47
48
49void JumpTarget::Branch(Condition cc, Result* arg, Hint hint) {
50 ASSERT(cgen()->has_valid_frame());
51
52 // We want to check that non-frame registers at the call site stay in
53 // the same registers on the fall-through branch.
54#ifdef DEBUG
55 Result::Type arg_type = arg->type();
56 Register arg_reg = arg->is_register() ? arg->reg() : no_reg;
57#endif
58
59 cgen()->frame()->Push(arg);
60 DoBranch(cc, hint);
61 *arg = cgen()->frame()->Pop();
62
63 ASSERT(arg->type() == arg_type);
64 ASSERT(!arg->is_register() || arg->reg().is(arg_reg));
65}
66
67
68void JumpTarget::Branch(Condition cc, Result* arg0, Result* arg1, Hint hint) {
69 ASSERT(cgen()->has_valid_frame());
70
71 // We want to check that non-frame registers at the call site stay in
72 // the same registers on the fall-through branch.
73#ifdef DEBUG
74 Result::Type arg0_type = arg0->type();
75 Register arg0_reg = arg0->is_register() ? arg0->reg() : no_reg;
76 Result::Type arg1_type = arg1->type();
77 Register arg1_reg = arg1->is_register() ? arg1->reg() : no_reg;
78#endif
79
80 cgen()->frame()->Push(arg0);
81 cgen()->frame()->Push(arg1);
82 DoBranch(cc, hint);
83 *arg1 = cgen()->frame()->Pop();
84 *arg0 = cgen()->frame()->Pop();
85
86 ASSERT(arg0->type() == arg0_type);
87 ASSERT(!arg0->is_register() || arg0->reg().is(arg0_reg));
88 ASSERT(arg1->type() == arg1_type);
89 ASSERT(!arg1->is_register() || arg1->reg().is(arg1_reg));
90}
91
92
93void BreakTarget::Branch(Condition cc, Result* arg, Hint hint) {
94 ASSERT(cgen()->has_valid_frame());
95
96 int count = cgen()->frame()->height() - expected_height_;
97 if (count > 0) {
98 // We negate and branch here rather than using DoBranch's negate
99 // and branch. This gives us a hook to remove statement state
100 // from the frame.
101 JumpTarget fall_through;
102 // Branch to fall through will not negate, because it is a
103 // forward-only target.
104 fall_through.Branch(NegateCondition(cc), NegateHint(hint));
105 Jump(arg); // May emit merge code here.
106 fall_through.Bind();
107 } else {
108#ifdef DEBUG
109 Result::Type arg_type = arg->type();
110 Register arg_reg = arg->is_register() ? arg->reg() : no_reg;
111#endif
112 cgen()->frame()->Push(arg);
113 DoBranch(cc, hint);
114 *arg = cgen()->frame()->Pop();
115 ASSERT(arg->type() == arg_type);
116 ASSERT(!arg->is_register() || arg->reg().is(arg_reg));
117 }
118}
119
120
121void JumpTarget::Bind(Result* arg) {
122 if (cgen()->has_valid_frame()) {
123 cgen()->frame()->Push(arg);
124 }
125 DoBind();
126 *arg = cgen()->frame()->Pop();
127}
128
129
130void JumpTarget::Bind(Result* arg0, Result* arg1) {
131 if (cgen()->has_valid_frame()) {
132 cgen()->frame()->Push(arg0);
133 cgen()->frame()->Push(arg1);
134 }
135 DoBind();
136 *arg1 = cgen()->frame()->Pop();
137 *arg0 = cgen()->frame()->Pop();
138}
139
140
141void JumpTarget::ComputeEntryFrame() {
142 // Given: a collection of frames reaching by forward CFG edges and
143 // the directionality of the block. Compute: an entry frame for the
144 // block.
145
146 Counters::compute_entry_frame.Increment();
147#ifdef DEBUG
148 if (compiling_deferred_code_) {
149 ASSERT(reaching_frames_.length() > 1);
150 VirtualFrame* frame = reaching_frames_[0];
151 bool all_identical = true;
152 for (int i = 1; i < reaching_frames_.length(); i++) {
153 if (!frame->Equals(reaching_frames_[i])) {
154 all_identical = false;
155 break;
156 }
157 }
158 ASSERT(!all_identical || all_identical);
159 }
160#endif
161
162 // Choose an initial frame.
163 VirtualFrame* initial_frame = reaching_frames_[0];
164
165 // A list of pointers to frame elements in the entry frame. NULL
166 // indicates that the element has not yet been determined.
167 int length = initial_frame->element_count();
168 ZoneList<FrameElement*> elements(length);
169
170 // Initially populate the list of elements based on the initial
171 // frame.
172 for (int i = 0; i < length; i++) {
173 FrameElement element = initial_frame->elements_[i];
174 // We do not allow copies or constants in bidirectional frames.
175 if (direction_ == BIDIRECTIONAL) {
176 if (element.is_constant() || element.is_copy()) {
177 elements.Add(NULL);
178 continue;
179 }
180 }
181 elements.Add(&initial_frame->elements_[i]);
182 }
183
184 // Compute elements based on the other reaching frames.
185 if (reaching_frames_.length() > 1) {
186 for (int i = 0; i < length; i++) {
187 FrameElement* element = elements[i];
188 for (int j = 1; j < reaching_frames_.length(); j++) {
189 // Element computation is monotonic: new information will not
190 // change our decision about undetermined or invalid elements.
191 if (element == NULL || !element->is_valid()) break;
192
193 FrameElement* other = &reaching_frames_[j]->elements_[i];
194 element = element->Combine(other);
195 if (element != NULL && !element->is_copy()) {
196 ASSERT(other != NULL);
197 // We overwrite the number information of one of the incoming frames.
198 // This is safe because we only use the frame for emitting merge code.
199 // The number information of incoming frames is not used anymore.
200 element->set_type_info(TypeInfo::Combine(element->type_info(),
201 other->type_info()));
202 }
203 }
204 elements[i] = element;
205 }
206 }
207
208 // Build the new frame. A freshly allocated frame has memory elements
209 // for the parameters and some platform-dependent elements (e.g.,
210 // return address). Replace those first.
211 entry_frame_ = new VirtualFrame();
212 int index = 0;
213 for (; index < entry_frame_->element_count(); index++) {
214 FrameElement* target = elements[index];
215 // If the element is determined, set it now. Count registers. Mark
216 // elements as copied exactly when they have a copy. Undetermined
217 // elements are initially recorded as if in memory.
218 if (target != NULL) {
219 entry_frame_->elements_[index] = *target;
220 InitializeEntryElement(index, target);
221 }
222 }
223 // Then fill in the rest of the frame with new elements.
224 for (; index < length; index++) {
225 FrameElement* target = elements[index];
226 if (target == NULL) {
227 entry_frame_->elements_.Add(
228 FrameElement::MemoryElement(TypeInfo::Uninitialized()));
229 } else {
230 entry_frame_->elements_.Add(*target);
231 InitializeEntryElement(index, target);
232 }
233 }
234
235 // Allocate any still-undetermined frame elements to registers or
236 // memory, from the top down.
237 for (int i = length - 1; i >= 0; i--) {
238 if (elements[i] == NULL) {
239 // Loop over all the reaching frames to check whether the element
240 // is synced on all frames and to count the registers it occupies.
241 bool is_synced = true;
242 RegisterFile candidate_registers;
243 int best_count = kMinInt;
244 int best_reg_num = RegisterAllocator::kInvalidRegister;
245 TypeInfo info = TypeInfo::Uninitialized();
246
247 for (int j = 0; j < reaching_frames_.length(); j++) {
248 FrameElement element = reaching_frames_[j]->elements_[i];
249 if (direction_ == BIDIRECTIONAL) {
250 info = TypeInfo::Unknown();
251 } else if (!element.is_copy()) {
252 info = TypeInfo::Combine(info, element.type_info());
253 } else {
254 // New elements will not be copies, so get number information from
255 // backing element in the reaching frame.
256 info = TypeInfo::Combine(info,
257 reaching_frames_[j]->elements_[element.index()].type_info());
258 }
259 is_synced = is_synced && element.is_synced();
260 if (element.is_register() && !entry_frame_->is_used(element.reg())) {
261 // Count the register occurrence and remember it if better
262 // than the previous best.
263 int num = RegisterAllocator::ToNumber(element.reg());
264 candidate_registers.Use(num);
265 if (candidate_registers.count(num) > best_count) {
266 best_count = candidate_registers.count(num);
267 best_reg_num = num;
268 }
269 }
270 }
271
272 // We must have a number type information now (not for copied elements).
273 ASSERT(entry_frame_->elements_[i].is_copy()
274 || !info.IsUninitialized());
275
276 // If the value is synced on all frames, put it in memory. This
277 // costs nothing at the merge code but will incur a
278 // memory-to-register move when the value is needed later.
279 if (is_synced) {
280 // Already recorded as a memory element.
281 // Set combined number info.
282 entry_frame_->elements_[i].set_type_info(info);
283 continue;
284 }
285
286 // Try to put it in a register. If there was no best choice
287 // consider any free register.
288 if (best_reg_num == RegisterAllocator::kInvalidRegister) {
289 for (int j = 0; j < RegisterAllocator::kNumRegisters; j++) {
290 if (!entry_frame_->is_used(j)) {
291 best_reg_num = j;
292 break;
293 }
294 }
295 }
296
297 if (best_reg_num != RegisterAllocator::kInvalidRegister) {
298 // If there was a register choice, use it. Preserve the copied
299 // flag on the element.
300 bool is_copied = entry_frame_->elements_[i].is_copied();
301 Register reg = RegisterAllocator::ToRegister(best_reg_num);
302 entry_frame_->elements_[i] =
303 FrameElement::RegisterElement(reg, FrameElement::NOT_SYNCED,
304 TypeInfo::Uninitialized());
305 if (is_copied) entry_frame_->elements_[i].set_copied();
306 entry_frame_->set_register_location(reg, i);
307 }
308 // Set combined number info.
309 entry_frame_->elements_[i].set_type_info(info);
310 }
311 }
312
313 // If we have incoming backward edges assert we forget all number information.
314#ifdef DEBUG
315 if (direction_ == BIDIRECTIONAL) {
316 for (int i = 0; i < length; ++i) {
317 if (!entry_frame_->elements_[i].is_copy()) {
318 ASSERT(entry_frame_->elements_[i].type_info().IsUnknown());
319 }
320 }
321 }
322#endif
323
324 // The stack pointer is at the highest synced element or the base of
325 // the expression stack.
326 int stack_pointer = length - 1;
327 while (stack_pointer >= entry_frame_->expression_base_index() &&
328 !entry_frame_->elements_[stack_pointer].is_synced()) {
329 stack_pointer--;
330 }
331 entry_frame_->stack_pointer_ = stack_pointer;
332}
333
334
ricow@chromium.org30ce4112010-05-31 10:38:25 +0000335FrameRegisterState::FrameRegisterState(VirtualFrame* frame) {
sgjesse@chromium.orgdf7a2842010-03-25 14:34:15 +0000336 // Copy the register locations from the code generator's frame.
337 // These are the registers that will be spilled on entry to the
338 // deferred code and restored on exit.
sgjesse@chromium.orgdf7a2842010-03-25 14:34:15 +0000339 int sp_offset = frame->fp_relative(frame->stack_pointer_);
340 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
341 int loc = frame->register_location(i);
342 if (loc == VirtualFrame::kIllegalIndex) {
343 registers_[i] = kIgnore;
344 } else if (frame->elements_[loc].is_synced()) {
345 // Needs to be restored on exit but not saved on entry.
346 registers_[i] = frame->fp_relative(loc) | kSyncedFlag;
347 } else {
348 int offset = frame->fp_relative(loc);
349 registers_[i] = (offset < sp_offset) ? kPush : offset;
350 }
351 }
352}
353
vegorov@chromium.orgdff694e2010-05-17 09:10:26 +0000354
355void JumpTarget::Unuse() {
356 reaching_frames_.Clear();
357 merge_labels_.Clear();
358 entry_frame_ = NULL;
359 entry_label_.Unuse();
360}
361
362
363void JumpTarget::AddReachingFrame(VirtualFrame* frame) {
364 ASSERT(reaching_frames_.length() == merge_labels_.length());
365 ASSERT(entry_frame_ == NULL);
366 Label fresh;
367 merge_labels_.Add(fresh);
368 reaching_frames_.Add(frame);
369}
370
371
372// -------------------------------------------------------------------------
373// BreakTarget implementation.
374
375void BreakTarget::set_direction(Directionality direction) {
376 JumpTarget::set_direction(direction);
377 ASSERT(cgen()->has_valid_frame());
378 expected_height_ = cgen()->frame()->height();
379}
380
381
382void BreakTarget::CopyTo(BreakTarget* destination) {
383 ASSERT(destination != NULL);
384 destination->direction_ = direction_;
385 destination->reaching_frames_.Rewind(0);
386 destination->reaching_frames_.AddAll(reaching_frames_);
387 destination->merge_labels_.Rewind(0);
388 destination->merge_labels_.AddAll(merge_labels_);
389 destination->entry_frame_ = entry_frame_;
390 destination->entry_label_ = entry_label_;
391 destination->expected_height_ = expected_height_;
392}
393
394
395void BreakTarget::Branch(Condition cc, Hint hint) {
396 ASSERT(cgen()->has_valid_frame());
397
398 int count = cgen()->frame()->height() - expected_height_;
399 if (count > 0) {
400 // We negate and branch here rather than using DoBranch's negate
401 // and branch. This gives us a hook to remove statement state
402 // from the frame.
403 JumpTarget fall_through;
404 // Branch to fall through will not negate, because it is a
405 // forward-only target.
406 fall_through.Branch(NegateCondition(cc), NegateHint(hint));
407 Jump(); // May emit merge code here.
408 fall_through.Bind();
409 } else {
410 DoBranch(cc, hint);
411 }
412}
413
ricow@chromium.org30ce4112010-05-31 10:38:25 +0000414
415DeferredCode::DeferredCode()
416 : masm_(CodeGeneratorScope::Current()->masm()),
whesse@chromium.orgf0ac72d2010-11-08 12:47:26 +0000417 statement_position_(masm_->positions_recorder()->
418 current_statement_position()),
419 position_(masm_->positions_recorder()->current_position()),
ricow@chromium.org30ce4112010-05-31 10:38:25 +0000420 frame_state_(CodeGeneratorScope::Current()->frame()) {
421 ASSERT(statement_position_ != RelocInfo::kNoPosition);
422 ASSERT(position_ != RelocInfo::kNoPosition);
423
424 CodeGeneratorScope::Current()->AddDeferred(this);
425#ifdef DEBUG
426 comment_ = "";
427#endif
428}
429
sgjesse@chromium.orgdf7a2842010-03-25 14:34:15 +0000430} } // namespace v8::internal