blob: 31f9527a6de512ddf6814b2939f3a3767e06a073 [file] [log] [blame]
Leon Clarke4515c472010-02-03 11:58:03 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
33#include "register-allocator-inl.h"
34#include "scopes.h"
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080035#include "stub-cache.h"
Steve Block6ded16b2010-05-10 14:33:55 +010036#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000037
38namespace v8 {
39namespace internal {
40
41#define __ ACCESS_MASM(masm())
42
Steve Blocka7e24c12009-10-30 11:49:00 +000043void VirtualFrame::Enter() {
44 // Registers live on entry to a JS frame:
45 // rsp: stack pointer, points to return address from this function.
46 // rbp: base pointer, points to previous JS, ArgumentsAdaptor, or
47 // Trampoline frame.
48 // rsi: context of this function call.
49 // rdi: pointer to this function object.
50 Comment cmnt(masm(), "[ Enter JS frame");
51
52#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +000053 if (FLAG_debug_code) {
54 // Verify that rdi contains a JS function. The following code
55 // relies on rax being available for use.
56 Condition not_smi = NegateCondition(masm()->CheckSmi(rdi));
57 __ Check(not_smi,
58 "VirtualFrame::Enter - rdi is not a function (smi check).");
59 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
60 __ Check(equal,
61 "VirtualFrame::Enter - rdi is not a function (map check).");
62 }
Steve Blocka7e24c12009-10-30 11:49:00 +000063#endif
64
65 EmitPush(rbp);
66
67 __ movq(rbp, rsp);
68
69 // Store the context in the frame. The context is kept in rsi and a
70 // copy is stored in the frame. The external reference to rsi
71 // remains.
72 EmitPush(rsi);
73
74 // Store the function in the frame. The frame owns the register
75 // reference now (ie, it can keep it in rdi or spill it later).
76 Push(rdi);
77 SyncElementAt(element_count() - 1);
78 cgen()->allocator()->Unuse(rdi);
79}
80
81
82void VirtualFrame::Exit() {
83 Comment cmnt(masm(), "[ Exit JS frame");
84 // Record the location of the JS exit code for patching when setting
85 // break point.
86 __ RecordJSReturn();
87
88 // Avoid using the leave instruction here, because it is too
89 // short. We need the return sequence to be a least the size of a
90 // call instruction to support patching the exit code in the
91 // debugger. See GenerateReturnSequence for the full return sequence.
92 // TODO(X64): A patched call will be very long now. Make sure we
93 // have enough room.
94 __ movq(rsp, rbp);
95 stack_pointer_ = frame_pointer();
96 for (int i = element_count() - 1; i > stack_pointer_; i--) {
97 FrameElement last = elements_.RemoveLast();
98 if (last.is_register()) {
99 Unuse(last.reg());
100 }
101 }
102
103 EmitPop(rbp);
104}
105
106
107void VirtualFrame::AllocateStackSlots() {
108 int count = local_count();
109 if (count > 0) {
110 Comment cmnt(masm(), "[ Allocate space for locals");
111 // The locals are initialized to a constant (the undefined value), but
112 // we sync them with the actual frame to allocate space for spilling
113 // them later. First sync everything above the stack pointer so we can
114 // use pushes to allocate and initialize the locals.
115 SyncRange(stack_pointer_ + 1, element_count() - 1);
116 Handle<Object> undefined = Factory::undefined_value();
117 FrameElement initial_value =
118 FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
Steve Block8defd9f2010-07-08 12:39:36 +0100119 if (count < kLocalVarBound) {
120 // For fewer locals the unrolled loop is more compact.
121
122 // Hope for one of the first eight registers, where the push operation
123 // takes only one byte (kScratchRegister needs the REX.W bit).
124 Result tmp = cgen()->allocator()->Allocate();
125 ASSERT(tmp.is_valid());
126 __ movq(tmp.reg(), undefined, RelocInfo::EMBEDDED_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +0000127 for (int i = 0; i < count; i++) {
Steve Block8defd9f2010-07-08 12:39:36 +0100128 __ push(tmp.reg());
Leon Clarkee46be812010-01-19 14:06:41 +0000129 }
130 } else {
131 // For more locals a loop in generated code is more compact.
132 Label alloc_locals_loop;
133 Result cnt = cgen()->allocator()->Allocate();
134 ASSERT(cnt.is_valid());
Leon Clarkee46be812010-01-19 14:06:41 +0000135 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT);
Steve Block8defd9f2010-07-08 12:39:36 +0100136#ifdef DEBUG
137 Label loop_size;
138 __ bind(&loop_size);
139#endif
140 if (is_uint8(count)) {
141 // Loading imm8 is shorter than loading imm32.
142 // Loading only partial byte register, and using decb below.
143 __ movb(cnt.reg(), Immediate(count));
144 } else {
145 __ movl(cnt.reg(), Immediate(count));
146 }
Leon Clarkee46be812010-01-19 14:06:41 +0000147 __ bind(&alloc_locals_loop);
148 __ push(kScratchRegister);
Steve Block8defd9f2010-07-08 12:39:36 +0100149 if (is_uint8(count)) {
150 __ decb(cnt.reg());
151 } else {
152 __ decl(cnt.reg());
153 }
Leon Clarkee46be812010-01-19 14:06:41 +0000154 __ j(not_zero, &alloc_locals_loop);
Steve Block8defd9f2010-07-08 12:39:36 +0100155#ifdef DEBUG
156 CHECK(masm()->SizeOfCodeGeneratedSince(&loop_size) < kLocalVarBound);
157#endif
Leon Clarkee46be812010-01-19 14:06:41 +0000158 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000159 for (int i = 0; i < count; i++) {
160 elements_.Add(initial_value);
161 stack_pointer_++;
Steve Blocka7e24c12009-10-30 11:49:00 +0000162 }
163 }
164}
165
166
167void VirtualFrame::SaveContextRegister() {
168 ASSERT(elements_[context_index()].is_memory());
169 __ movq(Operand(rbp, fp_relative(context_index())), rsi);
170}
171
172
173void VirtualFrame::RestoreContextRegister() {
174 ASSERT(elements_[context_index()].is_memory());
175 __ movq(rsi, Operand(rbp, fp_relative(context_index())));
176}
177
178
179void VirtualFrame::PushReceiverSlotAddress() {
180 Result temp = cgen()->allocator()->Allocate();
181 ASSERT(temp.is_valid());
182 __ lea(temp.reg(), ParameterAt(-1));
183 Push(&temp);
184}
185
186
187void VirtualFrame::EmitPop(Register reg) {
188 ASSERT(stack_pointer_ == element_count() - 1);
189 stack_pointer_--;
190 elements_.RemoveLast();
191 __ pop(reg);
192}
193
194
195void VirtualFrame::EmitPop(const Operand& operand) {
196 ASSERT(stack_pointer_ == element_count() - 1);
197 stack_pointer_--;
198 elements_.RemoveLast();
199 __ pop(operand);
200}
201
202
Steve Block6ded16b2010-05-10 14:33:55 +0100203void VirtualFrame::EmitPush(Register reg, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000205 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +0000206 stack_pointer_++;
207 __ push(reg);
208}
209
210
Steve Block6ded16b2010-05-10 14:33:55 +0100211void VirtualFrame::EmitPush(const Operand& operand, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000212 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000213 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +0000214 stack_pointer_++;
215 __ push(operand);
216}
217
218
Steve Block6ded16b2010-05-10 14:33:55 +0100219void VirtualFrame::EmitPush(Immediate immediate, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000220 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000221 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +0000222 stack_pointer_++;
223 __ push(immediate);
224}
225
226
Steve Block3ce2e202009-11-05 08:53:23 +0000227void VirtualFrame::EmitPush(Smi* smi_value) {
228 ASSERT(stack_pointer_ == element_count() - 1);
Steve Block6ded16b2010-05-10 14:33:55 +0100229 elements_.Add(FrameElement::MemoryElement(TypeInfo::Smi()));
Steve Block3ce2e202009-11-05 08:53:23 +0000230 stack_pointer_++;
231 __ Push(smi_value);
232}
233
234
Steve Blocka7e24c12009-10-30 11:49:00 +0000235void VirtualFrame::EmitPush(Handle<Object> value) {
236 ASSERT(stack_pointer_ == element_count() - 1);
Steve Block6ded16b2010-05-10 14:33:55 +0100237 TypeInfo info = TypeInfo::TypeFromValue(value);
Andrei Popescu402d9372010-02-26 13:31:12 +0000238 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +0000239 stack_pointer_++;
240 __ Push(value);
241}
242
243
Steve Block6ded16b2010-05-10 14:33:55 +0100244void VirtualFrame::EmitPush(Heap::RootListIndex index, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000245 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000246 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +0000247 stack_pointer_++;
248 __ PushRoot(index);
249}
250
251
Steve Block6ded16b2010-05-10 14:33:55 +0100252void VirtualFrame::Push(Expression* expr) {
253 ASSERT(expr->IsTrivial());
254
255 Literal* lit = expr->AsLiteral();
256 if (lit != NULL) {
257 Push(lit->handle());
258 return;
259 }
260
261 VariableProxy* proxy = expr->AsVariableProxy();
262 if (proxy != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100263 Slot* slot = proxy->var()->AsSlot();
Steve Block6ded16b2010-05-10 14:33:55 +0100264 if (slot->type() == Slot::LOCAL) {
265 PushLocalAt(slot->index());
266 return;
267 }
268 if (slot->type() == Slot::PARAMETER) {
269 PushParameterAt(slot->index());
270 return;
271 }
272 }
273 UNREACHABLE();
274}
275
276
Steve Blocka7e24c12009-10-30 11:49:00 +0000277void VirtualFrame::Drop(int count) {
278 ASSERT(count >= 0);
279 ASSERT(height() >= count);
280 int num_virtual_elements = (element_count() - 1) - stack_pointer_;
281
282 // Emit code to lower the stack pointer if necessary.
283 if (num_virtual_elements < count) {
284 int num_dropped = count - num_virtual_elements;
285 stack_pointer_ -= num_dropped;
286 __ addq(rsp, Immediate(num_dropped * kPointerSize));
287 }
288
289 // Discard elements from the virtual frame and free any registers.
290 for (int i = 0; i < count; i++) {
291 FrameElement dropped = elements_.RemoveLast();
292 if (dropped.is_register()) {
293 Unuse(dropped.reg());
294 }
295 }
296}
297
298
299int VirtualFrame::InvalidateFrameSlotAt(int index) {
300 FrameElement original = elements_[index];
301
302 // Is this element the backing store of any copies?
303 int new_backing_index = kIllegalIndex;
304 if (original.is_copied()) {
305 // Verify it is copied, and find first copy.
306 for (int i = index + 1; i < element_count(); i++) {
307 if (elements_[i].is_copy() && elements_[i].index() == index) {
308 new_backing_index = i;
309 break;
310 }
311 }
312 }
313
314 if (new_backing_index == kIllegalIndex) {
315 // No copies found, return kIllegalIndex.
316 if (original.is_register()) {
317 Unuse(original.reg());
318 }
319 elements_[index] = FrameElement::InvalidElement();
320 return kIllegalIndex;
321 }
322
323 // This is the backing store of copies.
324 Register backing_reg;
325 if (original.is_memory()) {
326 Result fresh = cgen()->allocator()->Allocate();
327 ASSERT(fresh.is_valid());
328 Use(fresh.reg(), new_backing_index);
329 backing_reg = fresh.reg();
330 __ movq(backing_reg, Operand(rbp, fp_relative(index)));
331 } else {
332 // The original was in a register.
333 backing_reg = original.reg();
334 set_register_location(backing_reg, new_backing_index);
335 }
336 // Invalidate the element at index.
337 elements_[index] = FrameElement::InvalidElement();
338 // Set the new backing element.
339 if (elements_[new_backing_index].is_synced()) {
340 elements_[new_backing_index] =
Andrei Popescu402d9372010-02-26 13:31:12 +0000341 FrameElement::RegisterElement(backing_reg,
342 FrameElement::SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100343 original.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000344 } else {
345 elements_[new_backing_index] =
Andrei Popescu402d9372010-02-26 13:31:12 +0000346 FrameElement::RegisterElement(backing_reg,
347 FrameElement::NOT_SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100348 original.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000349 }
350 // Update the other copies.
351 for (int i = new_backing_index + 1; i < element_count(); i++) {
352 if (elements_[i].is_copy() && elements_[i].index() == index) {
353 elements_[i].set_index(new_backing_index);
354 elements_[new_backing_index].set_copied();
355 }
356 }
357 return new_backing_index;
358}
359
360
361void VirtualFrame::TakeFrameSlotAt(int index) {
362 ASSERT(index >= 0);
363 ASSERT(index <= element_count());
364 FrameElement original = elements_[index];
365 int new_backing_store_index = InvalidateFrameSlotAt(index);
366 if (new_backing_store_index != kIllegalIndex) {
367 elements_.Add(CopyElementAt(new_backing_store_index));
368 return;
369 }
370
371 switch (original.type()) {
372 case FrameElement::MEMORY: {
373 // Emit code to load the original element's data into a register.
374 // Push that register as a FrameElement on top of the frame.
375 Result fresh = cgen()->allocator()->Allocate();
376 ASSERT(fresh.is_valid());
377 FrameElement new_element =
378 FrameElement::RegisterElement(fresh.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +0000379 FrameElement::NOT_SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100380 original.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000381 Use(fresh.reg(), element_count());
382 elements_.Add(new_element);
383 __ movq(fresh.reg(), Operand(rbp, fp_relative(index)));
384 break;
385 }
386 case FrameElement::REGISTER:
387 Use(original.reg(), element_count());
388 // Fall through.
389 case FrameElement::CONSTANT:
390 case FrameElement::COPY:
391 original.clear_sync();
392 elements_.Add(original);
393 break;
394 case FrameElement::INVALID:
395 UNREACHABLE();
396 break;
397 }
398}
399
400
401void VirtualFrame::StoreToFrameSlotAt(int index) {
402 // Store the value on top of the frame to the virtual frame slot at
403 // a given index. The value on top of the frame is left in place.
404 // This is a duplicating operation, so it can create copies.
405 ASSERT(index >= 0);
406 ASSERT(index < element_count());
407
408 int top_index = element_count() - 1;
409 FrameElement top = elements_[top_index];
410 FrameElement original = elements_[index];
411 if (top.is_copy() && top.index() == index) return;
412 ASSERT(top.is_valid());
413
414 InvalidateFrameSlotAt(index);
415
416 // InvalidateFrameSlotAt can potentially change any frame element, due
417 // to spilling registers to allocate temporaries in order to preserve
418 // the copy-on-write semantics of aliased elements. Reload top from
419 // the frame.
420 top = elements_[top_index];
421
422 if (top.is_copy()) {
423 // There are two cases based on the relative positions of the
424 // stored-to slot and the backing slot of the top element.
425 int backing_index = top.index();
426 ASSERT(backing_index != index);
427 if (backing_index < index) {
428 // 1. The top element is a copy of a slot below the stored-to
429 // slot. The stored-to slot becomes an unsynced copy of that
430 // same backing slot.
431 elements_[index] = CopyElementAt(backing_index);
432 } else {
433 // 2. The top element is a copy of a slot above the stored-to
434 // slot. The stored-to slot becomes the new (unsynced) backing
435 // slot and both the top element and the element at the former
436 // backing slot become copies of it. The sync state of the top
437 // and former backing elements is preserved.
438 FrameElement backing_element = elements_[backing_index];
439 ASSERT(backing_element.is_memory() || backing_element.is_register());
440 if (backing_element.is_memory()) {
441 // Because sets of copies are canonicalized to be backed by
442 // their lowest frame element, and because memory frame
443 // elements are backed by the corresponding stack address, we
444 // have to move the actual value down in the stack.
445 //
446 // TODO(209): considering allocating the stored-to slot to the
447 // temp register. Alternatively, allow copies to appear in
448 // any order in the frame and lazily move the value down to
449 // the slot.
450 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index)));
451 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
452 } else {
453 set_register_location(backing_element.reg(), index);
454 if (backing_element.is_synced()) {
455 // If the element is a register, we will not actually move
456 // anything on the stack but only update the virtual frame
457 // element.
458 backing_element.clear_sync();
459 }
460 }
461 elements_[index] = backing_element;
462
463 // The old backing element becomes a copy of the new backing
464 // element.
465 FrameElement new_element = CopyElementAt(index);
466 elements_[backing_index] = new_element;
467 if (backing_element.is_synced()) {
468 elements_[backing_index].set_sync();
469 }
470
471 // All the copies of the old backing element (including the top
472 // element) become copies of the new backing element.
473 for (int i = backing_index + 1; i < element_count(); i++) {
474 if (elements_[i].is_copy() && elements_[i].index() == backing_index) {
475 elements_[i].set_index(index);
476 }
477 }
478 }
479 return;
480 }
481
482 // Move the top element to the stored-to slot and replace it (the
483 // top element) with a copy.
484 elements_[index] = top;
485 if (top.is_memory()) {
486 // TODO(209): consider allocating the stored-to slot to the temp
487 // register. Alternatively, allow copies to appear in any order
488 // in the frame and lazily move the value down to the slot.
489 FrameElement new_top = CopyElementAt(index);
490 new_top.set_sync();
491 elements_[top_index] = new_top;
492
493 // The sync state of the former top element is correct (synced).
494 // Emit code to move the value down in the frame.
495 __ movq(kScratchRegister, Operand(rsp, 0));
496 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
497 } else if (top.is_register()) {
498 set_register_location(top.reg(), index);
499 // The stored-to slot has the (unsynced) register reference and
500 // the top element becomes a copy. The sync state of the top is
501 // preserved.
502 FrameElement new_top = CopyElementAt(index);
503 if (top.is_synced()) {
504 new_top.set_sync();
505 elements_[index].clear_sync();
506 }
507 elements_[top_index] = new_top;
508 } else {
509 // The stored-to slot holds the same value as the top but
510 // unsynced. (We do not have copies of constants yet.)
511 ASSERT(top.is_constant());
512 elements_[index].clear_sync();
513 }
514}
515
516
517void VirtualFrame::MakeMergable() {
518 for (int i = 0; i < element_count(); i++) {
519 FrameElement element = elements_[i];
520
Andrei Popescu402d9372010-02-26 13:31:12 +0000521 // In all cases we have to reset the number type information
522 // to unknown for a mergable frame because of incoming back edges.
Steve Blocka7e24c12009-10-30 11:49:00 +0000523 if (element.is_constant() || element.is_copy()) {
524 if (element.is_synced()) {
525 // Just spill.
Steve Block6ded16b2010-05-10 14:33:55 +0100526 elements_[i] = FrameElement::MemoryElement(TypeInfo::Unknown());
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 } else {
528 // Allocate to a register.
529 FrameElement backing_element; // Invalid if not a copy.
530 if (element.is_copy()) {
531 backing_element = elements_[element.index()];
532 }
533 Result fresh = cgen()->allocator()->Allocate();
534 ASSERT(fresh.is_valid()); // A register was spilled if all were in use.
535 elements_[i] =
536 FrameElement::RegisterElement(fresh.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +0000537 FrameElement::NOT_SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100538 TypeInfo::Unknown());
Steve Blocka7e24c12009-10-30 11:49:00 +0000539 Use(fresh.reg(), i);
540
541 // Emit a move.
542 if (element.is_constant()) {
543 __ Move(fresh.reg(), element.handle());
544 } else {
545 ASSERT(element.is_copy());
546 // Copies are only backed by register or memory locations.
547 if (backing_element.is_register()) {
548 // The backing store may have been spilled by allocating,
549 // but that's OK. If it was, the value is right where we
550 // want it.
551 if (!fresh.reg().is(backing_element.reg())) {
552 __ movq(fresh.reg(), backing_element.reg());
553 }
554 } else {
555 ASSERT(backing_element.is_memory());
556 __ movq(fresh.reg(), Operand(rbp, fp_relative(element.index())));
557 }
558 }
559 }
560 // No need to set the copied flag --- there are no copies.
561 } else {
562 // Clear the copy flag of non-constant, non-copy elements.
563 // They cannot be copied because copies are not allowed.
564 // The copy flag is not relied on before the end of this loop,
565 // including when registers are spilled.
566 elements_[i].clear_copied();
Steve Block6ded16b2010-05-10 14:33:55 +0100567 elements_[i].set_type_info(TypeInfo::Unknown());
Steve Blocka7e24c12009-10-30 11:49:00 +0000568 }
569 }
570}
571
572
573void VirtualFrame::MergeTo(VirtualFrame* expected) {
574 Comment cmnt(masm(), "[ Merge frame");
575 // We should always be merging the code generator's current frame to an
576 // expected frame.
577 ASSERT(cgen()->frame() == this);
578
579 // Adjust the stack pointer upward (toward the top of the virtual
580 // frame) if necessary.
581 if (stack_pointer_ < expected->stack_pointer_) {
582 int difference = expected->stack_pointer_ - stack_pointer_;
583 stack_pointer_ = expected->stack_pointer_;
584 __ subq(rsp, Immediate(difference * kPointerSize));
585 }
586
587 MergeMoveRegistersToMemory(expected);
588 MergeMoveRegistersToRegisters(expected);
589 MergeMoveMemoryToRegisters(expected);
590
591 // Adjust the stack pointer downward if necessary.
592 if (stack_pointer_ > expected->stack_pointer_) {
593 int difference = stack_pointer_ - expected->stack_pointer_;
594 stack_pointer_ = expected->stack_pointer_;
595 __ addq(rsp, Immediate(difference * kPointerSize));
596 }
597
598 // At this point, the frames should be identical.
599 ASSERT(Equals(expected));
600}
601
602
603void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
604 ASSERT(stack_pointer_ >= expected->stack_pointer_);
605
606 // Move registers, constants, and copies to memory. Perform moves
607 // from the top downward in the frame in order to leave the backing
608 // stores of copies in registers.
609 for (int i = element_count() - 1; i >= 0; i--) {
610 FrameElement target = expected->elements_[i];
611 if (target.is_register()) continue; // Handle registers later.
612 if (target.is_memory()) {
613 FrameElement source = elements_[i];
614 switch (source.type()) {
615 case FrameElement::INVALID:
616 // Not a legal merge move.
617 UNREACHABLE();
618 break;
619
620 case FrameElement::MEMORY:
621 // Already in place.
622 break;
623
624 case FrameElement::REGISTER:
625 Unuse(source.reg());
626 if (!source.is_synced()) {
627 __ movq(Operand(rbp, fp_relative(i)), source.reg());
628 }
629 break;
630
631 case FrameElement::CONSTANT:
632 if (!source.is_synced()) {
633 __ Move(Operand(rbp, fp_relative(i)), source.handle());
634 }
635 break;
636
637 case FrameElement::COPY:
638 if (!source.is_synced()) {
639 int backing_index = source.index();
640 FrameElement backing_element = elements_[backing_index];
641 if (backing_element.is_memory()) {
642 __ movq(kScratchRegister,
643 Operand(rbp, fp_relative(backing_index)));
644 __ movq(Operand(rbp, fp_relative(i)), kScratchRegister);
645 } else {
646 ASSERT(backing_element.is_register());
647 __ movq(Operand(rbp, fp_relative(i)), backing_element.reg());
648 }
649 }
650 break;
651 }
652 }
653 elements_[i] = target;
654 }
655}
656
657
658void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
659 // We have already done X-to-memory moves.
660 ASSERT(stack_pointer_ >= expected->stack_pointer_);
661
662 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
663 // Move the right value into register i if it is currently in a register.
664 int index = expected->register_location(i);
665 int use_index = register_location(i);
666 // Skip if register i is unused in the target or else if source is
667 // not a register (this is not a register-to-register move).
668 if (index == kIllegalIndex || !elements_[index].is_register()) continue;
669
670 Register target = RegisterAllocator::ToRegister(i);
671 Register source = elements_[index].reg();
672 if (index != use_index) {
673 if (use_index == kIllegalIndex) { // Target is currently unused.
674 // Copy contents of source from source to target.
675 // Set frame element register to target.
676 Use(target, index);
677 Unuse(source);
678 __ movq(target, source);
679 } else {
680 // Exchange contents of registers source and target.
681 // Nothing except the register backing use_index has changed.
682 elements_[use_index].set_reg(source);
683 set_register_location(target, index);
684 set_register_location(source, use_index);
685 __ xchg(source, target);
686 }
687 }
688
689 if (!elements_[index].is_synced() &&
690 expected->elements_[index].is_synced()) {
691 __ movq(Operand(rbp, fp_relative(index)), target);
692 }
693 elements_[index] = expected->elements_[index];
694 }
695}
696
697
698void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
699 // Move memory, constants, and copies to registers. This is the
700 // final step and since it is not done from the bottom up, but in
701 // register code order, we have special code to ensure that the backing
702 // elements of copies are in their correct locations when we
703 // encounter the copies.
704 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
705 int index = expected->register_location(i);
706 if (index != kIllegalIndex) {
707 FrameElement source = elements_[index];
708 FrameElement target = expected->elements_[index];
709 Register target_reg = RegisterAllocator::ToRegister(i);
710 ASSERT(target.reg().is(target_reg));
711 switch (source.type()) {
712 case FrameElement::INVALID: // Fall through.
713 UNREACHABLE();
714 break;
715 case FrameElement::REGISTER:
716 ASSERT(source.Equals(target));
717 // Go to next iteration. Skips Use(target_reg) and syncing
718 // below. It is safe to skip syncing because a target
719 // register frame element would only be synced if all source
720 // elements were.
721 continue;
722 break;
723 case FrameElement::MEMORY:
724 ASSERT(index <= stack_pointer_);
725 __ movq(target_reg, Operand(rbp, fp_relative(index)));
726 break;
727
728 case FrameElement::CONSTANT:
729 __ Move(target_reg, source.handle());
730 break;
731
732 case FrameElement::COPY: {
733 int backing_index = source.index();
734 FrameElement backing = elements_[backing_index];
735 ASSERT(backing.is_memory() || backing.is_register());
736 if (backing.is_memory()) {
737 ASSERT(backing_index <= stack_pointer_);
738 // Code optimization if backing store should also move
739 // to a register: move backing store to its register first.
740 if (expected->elements_[backing_index].is_register()) {
741 FrameElement new_backing = expected->elements_[backing_index];
742 Register new_backing_reg = new_backing.reg();
743 ASSERT(!is_used(new_backing_reg));
744 elements_[backing_index] = new_backing;
745 Use(new_backing_reg, backing_index);
746 __ movq(new_backing_reg,
747 Operand(rbp, fp_relative(backing_index)));
748 __ movq(target_reg, new_backing_reg);
749 } else {
750 __ movq(target_reg, Operand(rbp, fp_relative(backing_index)));
751 }
752 } else {
753 __ movq(target_reg, backing.reg());
754 }
755 }
756 }
757 // Ensure the proper sync state.
758 if (target.is_synced() && !source.is_synced()) {
759 __ movq(Operand(rbp, fp_relative(index)), target_reg);
760 }
761 Use(target_reg, index);
762 elements_[index] = target;
763 }
764 }
765}
766
767
768Result VirtualFrame::Pop() {
769 FrameElement element = elements_.RemoveLast();
770 int index = element_count();
771 ASSERT(element.is_valid());
772
Andrei Popescu402d9372010-02-26 13:31:12 +0000773 // Get number type information of the result.
Steve Block6ded16b2010-05-10 14:33:55 +0100774 TypeInfo info;
Andrei Popescu402d9372010-02-26 13:31:12 +0000775 if (!element.is_copy()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100776 info = element.type_info();
Andrei Popescu402d9372010-02-26 13:31:12 +0000777 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100778 info = elements_[element.index()].type_info();
Andrei Popescu402d9372010-02-26 13:31:12 +0000779 }
780
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 bool pop_needed = (stack_pointer_ == index);
782 if (pop_needed) {
783 stack_pointer_--;
784 if (element.is_memory()) {
785 Result temp = cgen()->allocator()->Allocate();
786 ASSERT(temp.is_valid());
787 __ pop(temp.reg());
Steve Block6ded16b2010-05-10 14:33:55 +0100788 temp.set_type_info(info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000789 return temp;
790 }
791
792 __ addq(rsp, Immediate(kPointerSize));
793 }
794 ASSERT(!element.is_memory());
795
796 // The top element is a register, constant, or a copy. Unuse
797 // registers and follow copies to their backing store.
798 if (element.is_register()) {
799 Unuse(element.reg());
800 } else if (element.is_copy()) {
801 ASSERT(element.index() < index);
802 index = element.index();
803 element = elements_[index];
804 }
805 ASSERT(!element.is_copy());
806
807 // The element is memory, a register, or a constant.
808 if (element.is_memory()) {
809 // Memory elements could only be the backing store of a copy.
810 // Allocate the original to a register.
811 ASSERT(index <= stack_pointer_);
812 Result temp = cgen()->allocator()->Allocate();
813 ASSERT(temp.is_valid());
814 Use(temp.reg(), index);
815 FrameElement new_element =
Andrei Popescu402d9372010-02-26 13:31:12 +0000816 FrameElement::RegisterElement(temp.reg(),
817 FrameElement::SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100818 element.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000819 // Preserve the copy flag on the element.
820 if (element.is_copied()) new_element.set_copied();
821 elements_[index] = new_element;
822 __ movq(temp.reg(), Operand(rbp, fp_relative(index)));
Andrei Popescu402d9372010-02-26 13:31:12 +0000823 return Result(temp.reg(), info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000824 } else if (element.is_register()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000825 return Result(element.reg(), info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000826 } else {
827 ASSERT(element.is_constant());
828 return Result(element.handle());
829 }
830}
831
832
833Result VirtualFrame::RawCallStub(CodeStub* stub) {
834 ASSERT(cgen()->HasValidEntryRegisters());
835 __ CallStub(stub);
836 Result result = cgen()->allocator()->Allocate(rax);
837 ASSERT(result.is_valid());
838 return result;
839}
840
841
842Result VirtualFrame::CallStub(CodeStub* stub, Result* arg) {
843 PrepareForCall(0, 0);
844 arg->ToRegister(rax);
845 arg->Unuse();
846 return RawCallStub(stub);
847}
848
849
850Result VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
851 PrepareForCall(0, 0);
852
853 if (arg0->is_register() && arg0->reg().is(rax)) {
854 if (arg1->is_register() && arg1->reg().is(rdx)) {
855 // Wrong registers.
856 __ xchg(rax, rdx);
857 } else {
858 // Register rdx is free for arg0, which frees rax for arg1.
859 arg0->ToRegister(rdx);
860 arg1->ToRegister(rax);
861 }
862 } else {
863 // Register rax is free for arg1, which guarantees rdx is free for
864 // arg0.
865 arg1->ToRegister(rax);
866 arg0->ToRegister(rdx);
867 }
868
869 arg0->Unuse();
870 arg1->Unuse();
871 return RawCallStub(stub);
872}
873
874
Steve Block6ded16b2010-05-10 14:33:55 +0100875Result VirtualFrame::CallJSFunction(int arg_count) {
876 Result function = Pop();
877
878 // InvokeFunction requires function in rdi. Move it in there.
879 function.ToRegister(rdi);
880 function.Unuse();
881
882 // +1 for receiver.
883 PrepareForCall(arg_count + 1, arg_count + 1);
884 ASSERT(cgen()->HasValidEntryRegisters());
885 ParameterCount count(arg_count);
886 __ InvokeFunction(rdi, count, CALL_FUNCTION);
887 RestoreContextRegister();
888 Result result = cgen()->allocator()->Allocate(rax);
889 ASSERT(result.is_valid());
890 return result;
891}
892
893
Steve Blocka7e24c12009-10-30 11:49:00 +0000894void VirtualFrame::SyncElementBelowStackPointer(int index) {
895 // Emit code to write elements below the stack pointer to their
896 // (already allocated) stack address.
897 ASSERT(index <= stack_pointer_);
898 FrameElement element = elements_[index];
899 ASSERT(!element.is_synced());
900 switch (element.type()) {
901 case FrameElement::INVALID:
902 break;
903
904 case FrameElement::MEMORY:
905 // This function should not be called with synced elements.
906 // (memory elements are always synced).
907 UNREACHABLE();
908 break;
909
910 case FrameElement::REGISTER:
911 __ movq(Operand(rbp, fp_relative(index)), element.reg());
912 break;
913
914 case FrameElement::CONSTANT:
915 __ Move(Operand(rbp, fp_relative(index)), element.handle());
916 break;
917
918 case FrameElement::COPY: {
919 int backing_index = element.index();
920 FrameElement backing_element = elements_[backing_index];
921 if (backing_element.is_memory()) {
922 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index)));
923 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
924 } else {
925 ASSERT(backing_element.is_register());
926 __ movq(Operand(rbp, fp_relative(index)), backing_element.reg());
927 }
928 break;
929 }
930 }
931 elements_[index].set_sync();
932}
933
934
935void VirtualFrame::SyncElementByPushing(int index) {
936 // Sync an element of the frame that is just above the stack pointer
937 // by pushing it.
938 ASSERT(index == stack_pointer_ + 1);
939 stack_pointer_++;
940 FrameElement element = elements_[index];
941
942 switch (element.type()) {
943 case FrameElement::INVALID:
Steve Block3ce2e202009-11-05 08:53:23 +0000944 __ Push(Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000945 break;
946
947 case FrameElement::MEMORY:
948 // No memory elements exist above the stack pointer.
949 UNREACHABLE();
950 break;
951
952 case FrameElement::REGISTER:
953 __ push(element.reg());
954 break;
955
956 case FrameElement::CONSTANT:
957 __ Move(kScratchRegister, element.handle());
958 __ push(kScratchRegister);
959 break;
960
961 case FrameElement::COPY: {
962 int backing_index = element.index();
963 FrameElement backing = elements_[backing_index];
964 ASSERT(backing.is_memory() || backing.is_register());
965 if (backing.is_memory()) {
966 __ push(Operand(rbp, fp_relative(backing_index)));
967 } else {
968 __ push(backing.reg());
969 }
970 break;
971 }
972 }
973 elements_[index].set_sync();
974}
975
976
977// Clear the dirty bits for the range of elements in
978// [min(stack_pointer_ + 1,begin), end].
979void VirtualFrame::SyncRange(int begin, int end) {
980 ASSERT(begin >= 0);
981 ASSERT(end < element_count());
982 // Sync elements below the range if they have not been materialized
983 // on the stack.
984 int start = Min(begin, stack_pointer_ + 1);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100985 int end_or_stack_pointer = Min(stack_pointer_, end);
986 // Emit normal push instructions for elements above stack pointer
987 // and use mov instructions if we are below stack pointer.
988 int i = start;
Steve Blocka7e24c12009-10-30 11:49:00 +0000989
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100990 while (i <= end_or_stack_pointer) {
Steve Blockd0582a62009-12-15 09:54:21 +0000991 if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100992 i++;
993 }
994 while (i <= end) {
995 SyncElementByPushing(i);
996 i++;
Steve Blocka7e24c12009-10-30 11:49:00 +0000997 }
998}
999
1000
Steve Blocka7e24c12009-10-30 11:49:00 +00001001//------------------------------------------------------------------------------
1002// Virtual frame stub and IC calling functions.
1003
Steve Blocka7e24c12009-10-30 11:49:00 +00001004Result VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
1005 PrepareForCall(arg_count, arg_count);
1006 ASSERT(cgen()->HasValidEntryRegisters());
1007 __ CallRuntime(f, arg_count);
1008 Result result = cgen()->allocator()->Allocate(rax);
1009 ASSERT(result.is_valid());
1010 return result;
1011}
1012
1013
1014Result VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
1015 PrepareForCall(arg_count, arg_count);
1016 ASSERT(cgen()->HasValidEntryRegisters());
1017 __ CallRuntime(id, arg_count);
1018 Result result = cgen()->allocator()->Allocate(rax);
1019 ASSERT(result.is_valid());
1020 return result;
1021}
1022
1023
Andrei Popescu402d9372010-02-26 13:31:12 +00001024#ifdef ENABLE_DEBUGGER_SUPPORT
1025void VirtualFrame::DebugBreak() {
1026 PrepareForCall(0, 0);
1027 ASSERT(cgen()->HasValidEntryRegisters());
1028 __ DebugBreak();
1029 Result result = cgen()->allocator()->Allocate(rax);
1030 ASSERT(result.is_valid());
1031}
1032#endif
1033
1034
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001035Result VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
1036 InvokeFlag flag,
1037 int arg_count) {
1038 PrepareForCall(arg_count, arg_count);
1039 ASSERT(cgen()->HasValidEntryRegisters());
1040 __ InvokeBuiltin(id, flag);
1041 Result result = cgen()->allocator()->Allocate(rax);
1042 ASSERT(result.is_valid());
1043 return result;
1044}
1045
1046
1047Result VirtualFrame::RawCallCodeObject(Handle<Code> code,
1048 RelocInfo::Mode rmode) {
1049 ASSERT(cgen()->HasValidEntryRegisters());
1050 __ Call(code, rmode);
1051 Result result = cgen()->allocator()->Allocate(rax);
1052 ASSERT(result.is_valid());
1053 return result;
1054}
1055
1056
Leon Clarkef7060e22010-06-03 12:02:55 +01001057// This function assumes that the only results that could be in a_reg or b_reg
1058// are a and b. Other results can be live, but must not be in a_reg or b_reg.
1059void VirtualFrame::MoveResultsToRegisters(Result* a,
1060 Result* b,
1061 Register a_reg,
1062 Register b_reg) {
1063 ASSERT(!a_reg.is(b_reg));
1064 // Assert that cgen()->allocator()->count(a_reg) is accounted for by a and b.
1065 ASSERT(cgen()->allocator()->count(a_reg) <= 2);
1066 ASSERT(cgen()->allocator()->count(a_reg) != 2 || a->reg().is(a_reg));
1067 ASSERT(cgen()->allocator()->count(a_reg) != 2 || b->reg().is(a_reg));
1068 ASSERT(cgen()->allocator()->count(a_reg) != 1 ||
1069 (a->is_register() && a->reg().is(a_reg)) ||
1070 (b->is_register() && b->reg().is(a_reg)));
1071 // Assert that cgen()->allocator()->count(b_reg) is accounted for by a and b.
1072 ASSERT(cgen()->allocator()->count(b_reg) <= 2);
1073 ASSERT(cgen()->allocator()->count(b_reg) != 2 || a->reg().is(b_reg));
1074 ASSERT(cgen()->allocator()->count(b_reg) != 2 || b->reg().is(b_reg));
1075 ASSERT(cgen()->allocator()->count(b_reg) != 1 ||
1076 (a->is_register() && a->reg().is(b_reg)) ||
1077 (b->is_register() && b->reg().is(b_reg)));
1078
1079 if (a->is_register() && a->reg().is(a_reg)) {
1080 b->ToRegister(b_reg);
1081 } else if (!cgen()->allocator()->is_used(a_reg)) {
1082 a->ToRegister(a_reg);
1083 b->ToRegister(b_reg);
1084 } else if (cgen()->allocator()->is_used(b_reg)) {
1085 // a must be in b_reg, b in a_reg.
1086 __ xchg(a_reg, b_reg);
1087 // Results a and b will be invalidated, so it is ok if they are switched.
1088 } else {
1089 b->ToRegister(b_reg);
1090 a->ToRegister(a_reg);
1091 }
1092 a->Unuse();
1093 b->Unuse();
1094}
1095
1096
Steve Blocka7e24c12009-10-30 11:49:00 +00001097Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001098 // Name and receiver are on the top of the frame. Both are dropped.
1099 // The IC expects name in rcx and receiver in rax.
Steve Blocka7e24c12009-10-30 11:49:00 +00001100 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1101 Result name = Pop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001102 Result receiver = Pop();
1103 PrepareForCall(0, 0);
1104 MoveResultsToRegisters(&name, &receiver, rcx, rax);
1105
Steve Blocka7e24c12009-10-30 11:49:00 +00001106 return RawCallCodeObject(ic, mode);
1107}
1108
1109
1110Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001111 // Key and receiver are on top of the frame. Put them in rax and rdx.
1112 Result key = Pop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001113 Result receiver = Pop();
1114 PrepareForCall(0, 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001115 MoveResultsToRegisters(&key, &receiver, rax, rdx);
1116
1117 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001118 return RawCallCodeObject(ic, mode);
1119}
1120
1121
Steve Block1e0659c2011-05-24 12:43:12 +01001122Result VirtualFrame::CallStoreIC(Handle<String> name,
1123 bool is_contextual,
1124 StrictModeFlag strict_mode) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001125 // Value and (if not contextual) receiver are on top of the frame.
1126 // The IC expects name in rcx, value in rax, and receiver in rdx.
Steve Block1e0659c2011-05-24 12:43:12 +01001127 Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
1128 ? Builtins::StoreIC_Initialize_Strict
1129 : Builtins::StoreIC_Initialize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001130 Result value = Pop();
Steve Block1e0659c2011-05-24 12:43:12 +01001131 RelocInfo::Mode mode;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001132 if (is_contextual) {
1133 PrepareForCall(0, 0);
1134 value.ToRegister(rax);
1135 __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1136 value.Unuse();
Steve Block1e0659c2011-05-24 12:43:12 +01001137 mode = RelocInfo::CODE_TARGET_CONTEXT;
Leon Clarkef7060e22010-06-03 12:02:55 +01001138 } else {
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001139 Result receiver = Pop();
1140 PrepareForCall(0, 0);
1141 MoveResultsToRegisters(&value, &receiver, rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01001142 mode = RelocInfo::CODE_TARGET;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001143 }
1144 __ Move(rcx, name);
Steve Block1e0659c2011-05-24 12:43:12 +01001145 return RawCallCodeObject(ic, mode);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001146}
1147
1148
1149Result VirtualFrame::CallKeyedStoreIC() {
1150 // Value, key, and receiver are on the top of the frame. The IC
1151 // expects value in rax, key in rcx, and receiver in rdx.
1152 Result value = Pop();
1153 Result key = Pop();
1154 Result receiver = Pop();
1155 PrepareForCall(0, 0);
1156 if (!cgen()->allocator()->is_used(rax) ||
1157 (value.is_register() && value.reg().is(rax))) {
1158 if (!cgen()->allocator()->is_used(rax)) {
1159 value.ToRegister(rax);
1160 }
1161 MoveResultsToRegisters(&key, &receiver, rcx, rdx);
1162 value.Unuse();
1163 } else if (!cgen()->allocator()->is_used(rcx) ||
1164 (key.is_register() && key.reg().is(rcx))) {
1165 if (!cgen()->allocator()->is_used(rcx)) {
1166 key.ToRegister(rcx);
1167 }
1168 MoveResultsToRegisters(&value, &receiver, rax, rdx);
1169 key.Unuse();
1170 } else if (!cgen()->allocator()->is_used(rdx) ||
1171 (receiver.is_register() && receiver.reg().is(rdx))) {
1172 if (!cgen()->allocator()->is_used(rdx)) {
1173 receiver.ToRegister(rdx);
1174 }
1175 MoveResultsToRegisters(&key, &value, rcx, rax);
1176 receiver.Unuse();
1177 } else {
1178 // All three registers are used, and no value is in the correct place.
1179 // We have one of the two circular permutations of rax, rcx, rdx.
1180 ASSERT(value.is_register());
1181 if (value.reg().is(rcx)) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001182 __ xchg(rax, rdx);
1183 __ xchg(rax, rcx);
1184 } else {
1185 __ xchg(rax, rcx);
1186 __ xchg(rax, rdx);
1187 }
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001188 value.Unuse();
1189 key.Unuse();
1190 receiver.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01001191 }
1192
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001193 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
1195}
1196
1197
1198Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
1199 int arg_count,
1200 int loop_nesting) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001201 // Function name, arguments, and receiver are found on top of the frame
1202 // and dropped by the call. The IC expects the name in rcx and the rest
1203 // on the stack, and drops them all.
Steve Blocka7e24c12009-10-30 11:49:00 +00001204 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001205 Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001206 Result name = Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001207 // Spill args, receiver, and function. The call will drop args and
1208 // receiver.
Andrei Popescu402d9372010-02-26 13:31:12 +00001209 PrepareForCall(arg_count + 1, arg_count + 1);
1210 name.ToRegister(rcx);
1211 name.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00001212 return RawCallCodeObject(ic, mode);
1213}
1214
1215
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001216Result VirtualFrame::CallKeyedCallIC(RelocInfo::Mode mode,
1217 int arg_count,
1218 int loop_nesting) {
1219 // Function name, arguments, and receiver are found on top of the frame
1220 // and dropped by the call. The IC expects the name in rcx and the rest
1221 // on the stack, and drops them all.
1222 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1223 Handle<Code> ic =
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001224 StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001225 Result name = Pop();
1226 // Spill args, receiver, and function. The call will drop args and
1227 // receiver.
1228 PrepareForCall(arg_count + 1, arg_count + 1);
1229 name.ToRegister(rcx);
1230 name.Unuse();
1231 return RawCallCodeObject(ic, mode);
1232}
1233
1234
Steve Blocka7e24c12009-10-30 11:49:00 +00001235Result VirtualFrame::CallConstructor(int arg_count) {
1236 // Arguments, receiver, and function are on top of the frame. The
1237 // IC expects arg count in rax, function in rdi, and the arguments
1238 // and receiver on the stack.
1239 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
1240 // Duplicate the function before preparing the frame.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001241 PushElementAt(arg_count);
Steve Blocka7e24c12009-10-30 11:49:00 +00001242 Result function = Pop();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001243 PrepareForCall(arg_count + 1, arg_count + 1); // Spill function and args.
Steve Blocka7e24c12009-10-30 11:49:00 +00001244 function.ToRegister(rdi);
1245
1246 // Constructors are called with the number of arguments in register
Steve Block3ce2e202009-11-05 08:53:23 +00001247 // rax for now. Another option would be to have separate construct
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 // call trampolines per different arguments counts encountered.
1249 Result num_args = cgen()->allocator()->Allocate(rax);
1250 ASSERT(num_args.is_valid());
Steve Block8defd9f2010-07-08 12:39:36 +01001251 __ Set(num_args.reg(), arg_count);
Steve Blocka7e24c12009-10-30 11:49:00 +00001252
1253 function.Unuse();
1254 num_args.Unuse();
1255 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL);
1256}
1257
1258
Steve Blocka7e24c12009-10-30 11:49:00 +00001259void VirtualFrame::PushTryHandler(HandlerType type) {
1260 ASSERT(cgen()->HasValidEntryRegisters());
1261 // Grow the expression stack by handler size less one (the return
1262 // address is already pushed by a call instruction).
1263 Adjust(kHandlerSize - 1);
1264 __ PushTryHandler(IN_JAVASCRIPT, type);
1265}
1266
1267
1268#undef __
1269
1270} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001271
1272#endif // V8_TARGET_ARCH_X64