blob: ff9132cf71422f175713b38a9784ab99dab5c1da [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
33#include "register-allocator-inl.h"
34#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010035#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036
37namespace v8 {
38namespace internal {
39
40#define __ ACCESS_MASM(masm())
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042void VirtualFrame::SyncElementBelowStackPointer(int index) {
43 // Emit code to write elements below the stack pointer to their
44 // (already allocated) stack address.
45 ASSERT(index <= stack_pointer_);
46 FrameElement element = elements_[index];
47 ASSERT(!element.is_synced());
48 switch (element.type()) {
49 case FrameElement::INVALID:
50 break;
51
52 case FrameElement::MEMORY:
53 // This function should not be called with synced elements.
54 // (memory elements are always synced).
55 UNREACHABLE();
56 break;
57
58 case FrameElement::REGISTER:
59 __ mov(Operand(ebp, fp_relative(index)), element.reg());
60 break;
61
62 case FrameElement::CONSTANT:
63 if (cgen()->IsUnsafeSmi(element.handle())) {
Steve Blockd0582a62009-12-15 09:54:21 +000064 cgen()->StoreUnsafeSmiToLocal(fp_relative(index), element.handle());
Steve Blocka7e24c12009-10-30 11:49:00 +000065 } else {
66 __ Set(Operand(ebp, fp_relative(index)),
67 Immediate(element.handle()));
68 }
69 break;
70
71 case FrameElement::COPY: {
72 int backing_index = element.index();
73 FrameElement backing_element = elements_[backing_index];
74 if (backing_element.is_memory()) {
75 Result temp = cgen()->allocator()->Allocate();
76 ASSERT(temp.is_valid());
77 __ mov(temp.reg(), Operand(ebp, fp_relative(backing_index)));
78 __ mov(Operand(ebp, fp_relative(index)), temp.reg());
79 } else {
80 ASSERT(backing_element.is_register());
81 __ mov(Operand(ebp, fp_relative(index)), backing_element.reg());
82 }
83 break;
84 }
85 }
86 elements_[index].set_sync();
87}
88
89
90void VirtualFrame::SyncElementByPushing(int index) {
91 // Sync an element of the frame that is just above the stack pointer
92 // by pushing it.
93 ASSERT(index == stack_pointer_ + 1);
94 stack_pointer_++;
95 FrameElement element = elements_[index];
96
97 switch (element.type()) {
98 case FrameElement::INVALID:
99 __ push(Immediate(Smi::FromInt(0)));
100 break;
101
102 case FrameElement::MEMORY:
103 // No memory elements exist above the stack pointer.
104 UNREACHABLE();
105 break;
106
107 case FrameElement::REGISTER:
108 __ push(element.reg());
109 break;
110
111 case FrameElement::CONSTANT:
112 if (cgen()->IsUnsafeSmi(element.handle())) {
Steve Blockd0582a62009-12-15 09:54:21 +0000113 cgen()->PushUnsafeSmi(element.handle());
Steve Blocka7e24c12009-10-30 11:49:00 +0000114 } else {
115 __ push(Immediate(element.handle()));
116 }
117 break;
118
119 case FrameElement::COPY: {
120 int backing_index = element.index();
121 FrameElement backing = elements_[backing_index];
122 ASSERT(backing.is_memory() || backing.is_register());
123 if (backing.is_memory()) {
124 __ push(Operand(ebp, fp_relative(backing_index)));
125 } else {
126 __ push(backing.reg());
127 }
128 break;
129 }
130 }
131 elements_[index].set_sync();
132}
133
134
135// Clear the dirty bits for the range of elements in
136// [min(stack_pointer_ + 1,begin), end].
137void VirtualFrame::SyncRange(int begin, int end) {
138 ASSERT(begin >= 0);
139 ASSERT(end < element_count());
140 // Sync elements below the range if they have not been materialized
141 // on the stack.
142 int start = Min(begin, stack_pointer_ + 1);
143
Steve Blockd0582a62009-12-15 09:54:21 +0000144 // Emit normal push instructions for elements above stack pointer
Steve Block3ce2e202009-11-05 08:53:23 +0000145 // and use mov instructions if we are below stack pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 for (int i = start; i <= end; i++) {
Steve Block3ce2e202009-11-05 08:53:23 +0000147 if (!elements_[i].is_synced()) {
148 if (i <= stack_pointer_) {
149 SyncElementBelowStackPointer(i);
150 } else {
151 SyncElementByPushing(i);
152 }
153 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 }
155}
156
157
158void VirtualFrame::MakeMergable() {
159 for (int i = 0; i < element_count(); i++) {
160 FrameElement element = elements_[i];
161
Andrei Popescu402d9372010-02-26 13:31:12 +0000162 // All number type information is reset to unknown for a mergable frame
163 // because of incoming back edges.
Steve Blocka7e24c12009-10-30 11:49:00 +0000164 if (element.is_constant() || element.is_copy()) {
165 if (element.is_synced()) {
166 // Just spill.
Steve Block6ded16b2010-05-10 14:33:55 +0100167 elements_[i] = FrameElement::MemoryElement(TypeInfo::Unknown());
Steve Blocka7e24c12009-10-30 11:49:00 +0000168 } else {
169 // Allocate to a register.
170 FrameElement backing_element; // Invalid if not a copy.
171 if (element.is_copy()) {
172 backing_element = elements_[element.index()];
173 }
174 Result fresh = cgen()->allocator()->Allocate();
175 ASSERT(fresh.is_valid()); // A register was spilled if all were in use.
176 elements_[i] =
177 FrameElement::RegisterElement(fresh.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +0000178 FrameElement::NOT_SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100179 TypeInfo::Unknown());
Steve Blocka7e24c12009-10-30 11:49:00 +0000180 Use(fresh.reg(), i);
181
182 // Emit a move.
183 if (element.is_constant()) {
184 if (cgen()->IsUnsafeSmi(element.handle())) {
Steve Blockd0582a62009-12-15 09:54:21 +0000185 cgen()->MoveUnsafeSmi(fresh.reg(), element.handle());
Steve Blocka7e24c12009-10-30 11:49:00 +0000186 } else {
187 __ Set(fresh.reg(), Immediate(element.handle()));
188 }
189 } else {
190 ASSERT(element.is_copy());
191 // Copies are only backed by register or memory locations.
192 if (backing_element.is_register()) {
193 // The backing store may have been spilled by allocating,
194 // but that's OK. If it was, the value is right where we
195 // want it.
196 if (!fresh.reg().is(backing_element.reg())) {
197 __ mov(fresh.reg(), backing_element.reg());
198 }
199 } else {
200 ASSERT(backing_element.is_memory());
201 __ mov(fresh.reg(), Operand(ebp, fp_relative(element.index())));
202 }
203 }
204 }
205 // No need to set the copied flag --- there are no copies.
206 } else {
207 // Clear the copy flag of non-constant, non-copy elements.
208 // They cannot be copied because copies are not allowed.
209 // The copy flag is not relied on before the end of this loop,
210 // including when registers are spilled.
211 elements_[i].clear_copied();
Steve Block6ded16b2010-05-10 14:33:55 +0100212 elements_[i].set_type_info(TypeInfo::Unknown());
Steve Blocka7e24c12009-10-30 11:49:00 +0000213 }
214 }
215}
216
217
218void VirtualFrame::MergeTo(VirtualFrame* expected) {
219 Comment cmnt(masm(), "[ Merge frame");
220 // We should always be merging the code generator's current frame to an
221 // expected frame.
222 ASSERT(cgen()->frame() == this);
223
224 // Adjust the stack pointer upward (toward the top of the virtual
225 // frame) if necessary.
226 if (stack_pointer_ < expected->stack_pointer_) {
227 int difference = expected->stack_pointer_ - stack_pointer_;
228 stack_pointer_ = expected->stack_pointer_;
229 __ sub(Operand(esp), Immediate(difference * kPointerSize));
230 }
231
232 MergeMoveRegistersToMemory(expected);
233 MergeMoveRegistersToRegisters(expected);
234 MergeMoveMemoryToRegisters(expected);
235
236 // Adjust the stack pointer downward if necessary.
237 if (stack_pointer_ > expected->stack_pointer_) {
238 int difference = stack_pointer_ - expected->stack_pointer_;
239 stack_pointer_ = expected->stack_pointer_;
240 __ add(Operand(esp), Immediate(difference * kPointerSize));
241 }
242
243 // At this point, the frames should be identical.
244 ASSERT(Equals(expected));
245}
246
247
248void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
249 ASSERT(stack_pointer_ >= expected->stack_pointer_);
250
251 // Move registers, constants, and copies to memory. Perform moves
252 // from the top downward in the frame in order to leave the backing
253 // stores of copies in registers.
254 //
255 // Moving memory-backed copies to memory requires a spare register
256 // for the memory-to-memory moves. Since we are performing a merge,
257 // we use esi (which is already saved in the frame). We keep track
258 // of the index of the frame element esi is caching or kIllegalIndex
259 // if esi has not been disturbed.
260 int esi_caches = kIllegalIndex;
261 for (int i = element_count() - 1; i >= 0; i--) {
262 FrameElement target = expected->elements_[i];
263 if (target.is_register()) continue; // Handle registers later.
264 if (target.is_memory()) {
265 FrameElement source = elements_[i];
266 switch (source.type()) {
267 case FrameElement::INVALID:
268 // Not a legal merge move.
269 UNREACHABLE();
270 break;
271
272 case FrameElement::MEMORY:
273 // Already in place.
274 break;
275
276 case FrameElement::REGISTER:
277 Unuse(source.reg());
278 if (!source.is_synced()) {
279 __ mov(Operand(ebp, fp_relative(i)), source.reg());
280 }
281 break;
282
283 case FrameElement::CONSTANT:
284 if (!source.is_synced()) {
285 if (cgen()->IsUnsafeSmi(source.handle())) {
286 esi_caches = i;
Steve Blockd0582a62009-12-15 09:54:21 +0000287 cgen()->MoveUnsafeSmi(esi, source.handle());
Steve Blocka7e24c12009-10-30 11:49:00 +0000288 __ mov(Operand(ebp, fp_relative(i)), esi);
289 } else {
290 __ Set(Operand(ebp, fp_relative(i)), Immediate(source.handle()));
291 }
292 }
293 break;
294
295 case FrameElement::COPY:
296 if (!source.is_synced()) {
297 int backing_index = source.index();
298 FrameElement backing_element = elements_[backing_index];
299 if (backing_element.is_memory()) {
300 // If we have to spill a register, we spill esi.
301 if (esi_caches != backing_index) {
302 esi_caches = backing_index;
303 __ mov(esi, Operand(ebp, fp_relative(backing_index)));
304 }
305 __ mov(Operand(ebp, fp_relative(i)), esi);
306 } else {
307 ASSERT(backing_element.is_register());
308 __ mov(Operand(ebp, fp_relative(i)), backing_element.reg());
309 }
310 }
311 break;
312 }
313 }
314 elements_[i] = target;
315 }
316
317 if (esi_caches != kIllegalIndex) {
318 __ mov(esi, Operand(ebp, fp_relative(context_index())));
319 }
320}
321
322
323void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
324 // We have already done X-to-memory moves.
325 ASSERT(stack_pointer_ >= expected->stack_pointer_);
326
327 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
328 // Move the right value into register i if it is currently in a register.
329 int index = expected->register_location(i);
330 int use_index = register_location(i);
331 // Skip if register i is unused in the target or else if source is
332 // not a register (this is not a register-to-register move).
333 if (index == kIllegalIndex || !elements_[index].is_register()) continue;
334
335 Register target = RegisterAllocator::ToRegister(i);
336 Register source = elements_[index].reg();
337 if (index != use_index) {
338 if (use_index == kIllegalIndex) { // Target is currently unused.
339 // Copy contents of source from source to target.
340 // Set frame element register to target.
341 Use(target, index);
342 Unuse(source);
343 __ mov(target, source);
344 } else {
345 // Exchange contents of registers source and target.
346 // Nothing except the register backing use_index has changed.
347 elements_[use_index].set_reg(source);
348 set_register_location(target, index);
349 set_register_location(source, use_index);
350 __ xchg(source, target);
351 }
352 }
353
354 if (!elements_[index].is_synced() &&
355 expected->elements_[index].is_synced()) {
356 __ mov(Operand(ebp, fp_relative(index)), target);
357 }
358 elements_[index] = expected->elements_[index];
359 }
360}
361
362
363void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
364 // Move memory, constants, and copies to registers. This is the
365 // final step and since it is not done from the bottom up, but in
366 // register code order, we have special code to ensure that the backing
367 // elements of copies are in their correct locations when we
368 // encounter the copies.
369 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
370 int index = expected->register_location(i);
371 if (index != kIllegalIndex) {
372 FrameElement source = elements_[index];
373 FrameElement target = expected->elements_[index];
374 Register target_reg = RegisterAllocator::ToRegister(i);
375 ASSERT(target.reg().is(target_reg));
376 switch (source.type()) {
377 case FrameElement::INVALID: // Fall through.
378 UNREACHABLE();
379 break;
380 case FrameElement::REGISTER:
381 ASSERT(source.Equals(target));
382 // Go to next iteration. Skips Use(target_reg) and syncing
383 // below. It is safe to skip syncing because a target
384 // register frame element would only be synced if all source
385 // elements were.
386 continue;
387 break;
388 case FrameElement::MEMORY:
389 ASSERT(index <= stack_pointer_);
390 __ mov(target_reg, Operand(ebp, fp_relative(index)));
391 break;
392
393 case FrameElement::CONSTANT:
394 if (cgen()->IsUnsafeSmi(source.handle())) {
Steve Blockd0582a62009-12-15 09:54:21 +0000395 cgen()->MoveUnsafeSmi(target_reg, source.handle());
Steve Blocka7e24c12009-10-30 11:49:00 +0000396 } else {
397 __ Set(target_reg, Immediate(source.handle()));
398 }
399 break;
400
401 case FrameElement::COPY: {
402 int backing_index = source.index();
403 FrameElement backing = elements_[backing_index];
404 ASSERT(backing.is_memory() || backing.is_register());
405 if (backing.is_memory()) {
406 ASSERT(backing_index <= stack_pointer_);
407 // Code optimization if backing store should also move
408 // to a register: move backing store to its register first.
409 if (expected->elements_[backing_index].is_register()) {
410 FrameElement new_backing = expected->elements_[backing_index];
411 Register new_backing_reg = new_backing.reg();
412 ASSERT(!is_used(new_backing_reg));
413 elements_[backing_index] = new_backing;
414 Use(new_backing_reg, backing_index);
415 __ mov(new_backing_reg,
416 Operand(ebp, fp_relative(backing_index)));
417 __ mov(target_reg, new_backing_reg);
418 } else {
419 __ mov(target_reg, Operand(ebp, fp_relative(backing_index)));
420 }
421 } else {
422 __ mov(target_reg, backing.reg());
423 }
424 }
425 }
426 // Ensure the proper sync state.
427 if (target.is_synced() && !source.is_synced()) {
428 __ mov(Operand(ebp, fp_relative(index)), target_reg);
429 }
430 Use(target_reg, index);
431 elements_[index] = target;
432 }
433 }
434}
435
436
437void VirtualFrame::Enter() {
438 // Registers live on entry: esp, ebp, esi, edi.
439 Comment cmnt(masm(), "[ Enter JS frame");
440
441#ifdef DEBUG
Steve Block3ce2e202009-11-05 08:53:23 +0000442 if (FLAG_debug_code) {
443 // Verify that edi contains a JS function. The following code
444 // relies on eax being available for use.
445 __ test(edi, Immediate(kSmiTagMask));
446 __ Check(not_zero,
447 "VirtualFrame::Enter - edi is not a function (smi check).");
448 __ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
449 __ Check(equal,
450 "VirtualFrame::Enter - edi is not a function (map check).");
451 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000452#endif
453
454 EmitPush(ebp);
455
456 __ mov(ebp, Operand(esp));
457
458 // Store the context in the frame. The context is kept in esi and a
459 // copy is stored in the frame. The external reference to esi
460 // remains.
461 EmitPush(esi);
462
463 // Store the function in the frame. The frame owns the register
464 // reference now (ie, it can keep it in edi or spill it later).
465 Push(edi);
466 SyncElementAt(element_count() - 1);
467 cgen()->allocator()->Unuse(edi);
468}
469
470
471void VirtualFrame::Exit() {
472 Comment cmnt(masm(), "[ Exit JS frame");
473 // Record the location of the JS exit code for patching when setting
474 // break point.
475 __ RecordJSReturn();
476
477 // Avoid using the leave instruction here, because it is too
478 // short. We need the return sequence to be a least the size of a
479 // call instruction to support patching the exit code in the
480 // debugger. See VisitReturnStatement for the full return sequence.
481 __ mov(esp, Operand(ebp));
482 stack_pointer_ = frame_pointer();
483 for (int i = element_count() - 1; i > stack_pointer_; i--) {
484 FrameElement last = elements_.RemoveLast();
485 if (last.is_register()) {
486 Unuse(last.reg());
487 }
488 }
489
490 EmitPop(ebp);
491}
492
493
494void VirtualFrame::AllocateStackSlots() {
495 int count = local_count();
496 if (count > 0) {
497 Comment cmnt(masm(), "[ Allocate space for locals");
498 // The locals are initialized to a constant (the undefined value), but
499 // we sync them with the actual frame to allocate space for spilling
500 // them later. First sync everything above the stack pointer so we can
501 // use pushes to allocate and initialize the locals.
502 SyncRange(stack_pointer_ + 1, element_count() - 1);
503 Handle<Object> undefined = Factory::undefined_value();
504 FrameElement initial_value =
505 FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
Leon Clarkee46be812010-01-19 14:06:41 +0000506 if (count == 1) {
507 __ push(Immediate(undefined));
508 } else if (count < kLocalVarBound) {
509 // For less locals the unrolled loop is more compact.
510 Result temp = cgen()->allocator()->Allocate();
511 ASSERT(temp.is_valid());
512 __ Set(temp.reg(), Immediate(undefined));
513 for (int i = 0; i < count; i++) {
514 __ push(temp.reg());
515 }
516 } else {
517 // For more locals a loop in generated code is more compact.
518 Label alloc_locals_loop;
519 Result cnt = cgen()->allocator()->Allocate();
520 Result tmp = cgen()->allocator()->Allocate();
521 ASSERT(cnt.is_valid());
522 ASSERT(tmp.is_valid());
523 __ mov(cnt.reg(), Immediate(count));
524 __ mov(tmp.reg(), Immediate(undefined));
525 __ bind(&alloc_locals_loop);
526 __ push(tmp.reg());
527 __ dec(cnt.reg());
528 __ j(not_zero, &alloc_locals_loop);
529 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000530 for (int i = 0; i < count; i++) {
531 elements_.Add(initial_value);
532 stack_pointer_++;
Steve Blocka7e24c12009-10-30 11:49:00 +0000533 }
534 }
535}
536
537
538void VirtualFrame::SaveContextRegister() {
539 ASSERT(elements_[context_index()].is_memory());
540 __ mov(Operand(ebp, fp_relative(context_index())), esi);
541}
542
543
544void VirtualFrame::RestoreContextRegister() {
545 ASSERT(elements_[context_index()].is_memory());
546 __ mov(esi, Operand(ebp, fp_relative(context_index())));
547}
548
549
550void VirtualFrame::PushReceiverSlotAddress() {
551 Result temp = cgen()->allocator()->Allocate();
552 ASSERT(temp.is_valid());
553 __ lea(temp.reg(), ParameterAt(-1));
554 Push(&temp);
555}
556
557
558int VirtualFrame::InvalidateFrameSlotAt(int index) {
559 FrameElement original = elements_[index];
560
561 // Is this element the backing store of any copies?
562 int new_backing_index = kIllegalIndex;
563 if (original.is_copied()) {
564 // Verify it is copied, and find first copy.
565 for (int i = index + 1; i < element_count(); i++) {
566 if (elements_[i].is_copy() && elements_[i].index() == index) {
567 new_backing_index = i;
568 break;
569 }
570 }
571 }
572
573 if (new_backing_index == kIllegalIndex) {
574 // No copies found, return kIllegalIndex.
575 if (original.is_register()) {
576 Unuse(original.reg());
577 }
578 elements_[index] = FrameElement::InvalidElement();
579 return kIllegalIndex;
580 }
581
582 // This is the backing store of copies.
583 Register backing_reg;
584 if (original.is_memory()) {
585 Result fresh = cgen()->allocator()->Allocate();
586 ASSERT(fresh.is_valid());
587 Use(fresh.reg(), new_backing_index);
588 backing_reg = fresh.reg();
589 __ mov(backing_reg, Operand(ebp, fp_relative(index)));
590 } else {
591 // The original was in a register.
592 backing_reg = original.reg();
593 set_register_location(backing_reg, new_backing_index);
594 }
595 // Invalidate the element at index.
596 elements_[index] = FrameElement::InvalidElement();
597 // Set the new backing element.
598 if (elements_[new_backing_index].is_synced()) {
599 elements_[new_backing_index] =
Andrei Popescu402d9372010-02-26 13:31:12 +0000600 FrameElement::RegisterElement(backing_reg,
601 FrameElement::SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100602 original.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 } else {
604 elements_[new_backing_index] =
Andrei Popescu402d9372010-02-26 13:31:12 +0000605 FrameElement::RegisterElement(backing_reg,
606 FrameElement::NOT_SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100607 original.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000608 }
609 // Update the other copies.
610 for (int i = new_backing_index + 1; i < element_count(); i++) {
611 if (elements_[i].is_copy() && elements_[i].index() == index) {
612 elements_[i].set_index(new_backing_index);
613 elements_[new_backing_index].set_copied();
614 }
615 }
616 return new_backing_index;
617}
618
619
620void VirtualFrame::TakeFrameSlotAt(int index) {
621 ASSERT(index >= 0);
622 ASSERT(index <= element_count());
623 FrameElement original = elements_[index];
624 int new_backing_store_index = InvalidateFrameSlotAt(index);
625 if (new_backing_store_index != kIllegalIndex) {
626 elements_.Add(CopyElementAt(new_backing_store_index));
627 return;
628 }
629
630 switch (original.type()) {
631 case FrameElement::MEMORY: {
632 // Emit code to load the original element's data into a register.
633 // Push that register as a FrameElement on top of the frame.
634 Result fresh = cgen()->allocator()->Allocate();
635 ASSERT(fresh.is_valid());
636 FrameElement new_element =
637 FrameElement::RegisterElement(fresh.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +0000638 FrameElement::NOT_SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +0100639 original.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 Use(fresh.reg(), element_count());
641 elements_.Add(new_element);
642 __ mov(fresh.reg(), Operand(ebp, fp_relative(index)));
643 break;
644 }
645 case FrameElement::REGISTER:
646 Use(original.reg(), element_count());
647 // Fall through.
648 case FrameElement::CONSTANT:
649 case FrameElement::COPY:
650 original.clear_sync();
651 elements_.Add(original);
652 break;
653 case FrameElement::INVALID:
654 UNREACHABLE();
655 break;
656 }
657}
658
659
660void VirtualFrame::StoreToFrameSlotAt(int index) {
661 // Store the value on top of the frame to the virtual frame slot at
662 // a given index. The value on top of the frame is left in place.
663 // This is a duplicating operation, so it can create copies.
664 ASSERT(index >= 0);
665 ASSERT(index < element_count());
666
667 int top_index = element_count() - 1;
668 FrameElement top = elements_[top_index];
669 FrameElement original = elements_[index];
670 if (top.is_copy() && top.index() == index) return;
671 ASSERT(top.is_valid());
672
673 InvalidateFrameSlotAt(index);
674
675 // InvalidateFrameSlotAt can potentially change any frame element, due
676 // to spilling registers to allocate temporaries in order to preserve
677 // the copy-on-write semantics of aliased elements. Reload top from
678 // the frame.
679 top = elements_[top_index];
680
681 if (top.is_copy()) {
682 // There are two cases based on the relative positions of the
683 // stored-to slot and the backing slot of the top element.
684 int backing_index = top.index();
685 ASSERT(backing_index != index);
686 if (backing_index < index) {
687 // 1. The top element is a copy of a slot below the stored-to
688 // slot. The stored-to slot becomes an unsynced copy of that
689 // same backing slot.
690 elements_[index] = CopyElementAt(backing_index);
691 } else {
692 // 2. The top element is a copy of a slot above the stored-to
693 // slot. The stored-to slot becomes the new (unsynced) backing
694 // slot and both the top element and the element at the former
695 // backing slot become copies of it. The sync state of the top
696 // and former backing elements is preserved.
697 FrameElement backing_element = elements_[backing_index];
698 ASSERT(backing_element.is_memory() || backing_element.is_register());
699 if (backing_element.is_memory()) {
700 // Because sets of copies are canonicalized to be backed by
701 // their lowest frame element, and because memory frame
702 // elements are backed by the corresponding stack address, we
703 // have to move the actual value down in the stack.
704 //
705 // TODO(209): considering allocating the stored-to slot to the
706 // temp register. Alternatively, allow copies to appear in
707 // any order in the frame and lazily move the value down to
708 // the slot.
709 Result temp = cgen()->allocator()->Allocate();
710 ASSERT(temp.is_valid());
711 __ mov(temp.reg(), Operand(ebp, fp_relative(backing_index)));
712 __ mov(Operand(ebp, fp_relative(index)), temp.reg());
713 } else {
714 set_register_location(backing_element.reg(), index);
715 if (backing_element.is_synced()) {
716 // If the element is a register, we will not actually move
717 // anything on the stack but only update the virtual frame
718 // element.
719 backing_element.clear_sync();
720 }
721 }
722 elements_[index] = backing_element;
723
724 // The old backing element becomes a copy of the new backing
725 // element.
726 FrameElement new_element = CopyElementAt(index);
727 elements_[backing_index] = new_element;
728 if (backing_element.is_synced()) {
729 elements_[backing_index].set_sync();
730 }
731
732 // All the copies of the old backing element (including the top
733 // element) become copies of the new backing element.
734 for (int i = backing_index + 1; i < element_count(); i++) {
735 if (elements_[i].is_copy() && elements_[i].index() == backing_index) {
736 elements_[i].set_index(index);
737 }
738 }
739 }
740 return;
741 }
742
743 // Move the top element to the stored-to slot and replace it (the
744 // top element) with a copy.
745 elements_[index] = top;
746 if (top.is_memory()) {
747 // TODO(209): consider allocating the stored-to slot to the temp
748 // register. Alternatively, allow copies to appear in any order
749 // in the frame and lazily move the value down to the slot.
750 FrameElement new_top = CopyElementAt(index);
751 new_top.set_sync();
752 elements_[top_index] = new_top;
753
754 // The sync state of the former top element is correct (synced).
755 // Emit code to move the value down in the frame.
756 Result temp = cgen()->allocator()->Allocate();
757 ASSERT(temp.is_valid());
758 __ mov(temp.reg(), Operand(esp, 0));
759 __ mov(Operand(ebp, fp_relative(index)), temp.reg());
760 } else if (top.is_register()) {
761 set_register_location(top.reg(), index);
762 // The stored-to slot has the (unsynced) register reference and
763 // the top element becomes a copy. The sync state of the top is
764 // preserved.
765 FrameElement new_top = CopyElementAt(index);
766 if (top.is_synced()) {
767 new_top.set_sync();
768 elements_[index].clear_sync();
769 }
770 elements_[top_index] = new_top;
771 } else {
772 // The stored-to slot holds the same value as the top but
773 // unsynced. (We do not have copies of constants yet.)
774 ASSERT(top.is_constant());
775 elements_[index].clear_sync();
776 }
777}
778
779
Steve Block6ded16b2010-05-10 14:33:55 +0100780void VirtualFrame::UntaggedPushFrameSlotAt(int index) {
781 ASSERT(index >= 0);
782 ASSERT(index <= element_count());
783 FrameElement original = elements_[index];
784 if (original.is_copy()) {
785 original = elements_[original.index()];
786 index = original.index();
787 }
788
789 switch (original.type()) {
790 case FrameElement::MEMORY:
791 case FrameElement::REGISTER: {
792 Label done;
793 // Emit code to load the original element's data into a register.
794 // Push that register as a FrameElement on top of the frame.
795 Result fresh = cgen()->allocator()->Allocate();
796 ASSERT(fresh.is_valid());
797 Register fresh_reg = fresh.reg();
798 FrameElement new_element =
799 FrameElement::RegisterElement(fresh_reg,
800 FrameElement::NOT_SYNCED,
801 original.type_info());
802 new_element.set_untagged_int32(true);
803 Use(fresh_reg, element_count());
804 fresh.Unuse(); // BreakTarget does not handle a live Result well.
805 elements_.Add(new_element);
806 if (original.is_register()) {
807 __ mov(fresh_reg, original.reg());
808 } else {
809 ASSERT(original.is_memory());
810 __ mov(fresh_reg, Operand(ebp, fp_relative(index)));
811 }
812 // Now convert the value to int32, or bail out.
813 if (original.type_info().IsSmi()) {
814 __ SmiUntag(fresh_reg);
815 // Pushing the element is completely done.
816 } else {
817 __ test(fresh_reg, Immediate(kSmiTagMask));
818 Label not_smi;
819 __ j(not_zero, &not_smi);
820 __ SmiUntag(fresh_reg);
821 __ jmp(&done);
822
823 __ bind(&not_smi);
824 if (!original.type_info().IsNumber()) {
825 __ cmp(FieldOperand(fresh_reg, HeapObject::kMapOffset),
826 Factory::heap_number_map());
827 cgen()->unsafe_bailout_->Branch(not_equal);
828 }
829
830 if (!CpuFeatures::IsSupported(SSE2)) {
831 UNREACHABLE();
832 } else {
833 CpuFeatures::Scope use_sse2(SSE2);
834 __ movdbl(xmm0, FieldOperand(fresh_reg, HeapNumber::kValueOffset));
835 __ cvttsd2si(fresh_reg, Operand(xmm0));
836 __ cvtsi2sd(xmm1, Operand(fresh_reg));
837 __ ucomisd(xmm0, xmm1);
838 cgen()->unsafe_bailout_->Branch(not_equal);
839 cgen()->unsafe_bailout_->Branch(parity_even); // NaN.
840 // Test for negative zero.
841 __ test(fresh_reg, Operand(fresh_reg));
842 __ j(not_zero, &done);
843 __ movmskpd(fresh_reg, xmm0);
844 __ and_(fresh_reg, 0x1);
845 cgen()->unsafe_bailout_->Branch(not_equal);
846 }
847 __ bind(&done);
848 }
849 break;
850 }
851 case FrameElement::CONSTANT:
852 elements_.Add(CopyElementAt(index));
853 elements_[element_count() - 1].set_untagged_int32(true);
854 break;
855 case FrameElement::COPY:
856 case FrameElement::INVALID:
857 UNREACHABLE();
858 break;
859 }
860}
861
862
Steve Blocka7e24c12009-10-30 11:49:00 +0000863void VirtualFrame::PushTryHandler(HandlerType type) {
864 ASSERT(cgen()->HasValidEntryRegisters());
865 // Grow the expression stack by handler size less one (the return
866 // address is already pushed by a call instruction).
867 Adjust(kHandlerSize - 1);
868 __ PushTryHandler(IN_JAVASCRIPT, type);
869}
870
871
872Result VirtualFrame::RawCallStub(CodeStub* stub) {
873 ASSERT(cgen()->HasValidEntryRegisters());
874 __ CallStub(stub);
875 Result result = cgen()->allocator()->Allocate(eax);
876 ASSERT(result.is_valid());
877 return result;
878}
879
880
881Result VirtualFrame::CallStub(CodeStub* stub, Result* arg) {
882 PrepareForCall(0, 0);
883 arg->ToRegister(eax);
884 arg->Unuse();
885 return RawCallStub(stub);
886}
887
888
889Result VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
890 PrepareForCall(0, 0);
891
892 if (arg0->is_register() && arg0->reg().is(eax)) {
893 if (arg1->is_register() && arg1->reg().is(edx)) {
894 // Wrong registers.
895 __ xchg(eax, edx);
896 } else {
897 // Register edx is free for arg0, which frees eax for arg1.
898 arg0->ToRegister(edx);
899 arg1->ToRegister(eax);
900 }
901 } else {
902 // Register eax is free for arg1, which guarantees edx is free for
903 // arg0.
904 arg1->ToRegister(eax);
905 arg0->ToRegister(edx);
906 }
907
908 arg0->Unuse();
909 arg1->Unuse();
910 return RawCallStub(stub);
911}
912
913
Steve Block6ded16b2010-05-10 14:33:55 +0100914Result VirtualFrame::CallJSFunction(int arg_count) {
915 Result function = Pop();
916
917 // InvokeFunction requires function in edi. Move it in there.
918 function.ToRegister(edi);
919 function.Unuse();
920
921 // +1 for receiver.
922 PrepareForCall(arg_count + 1, arg_count + 1);
923 ASSERT(cgen()->HasValidEntryRegisters());
924 ParameterCount count(arg_count);
925 __ InvokeFunction(edi, count, CALL_FUNCTION);
926 RestoreContextRegister();
927 Result result = cgen()->allocator()->Allocate(eax);
928 ASSERT(result.is_valid());
929 return result;
930}
931
932
Steve Blocka7e24c12009-10-30 11:49:00 +0000933Result VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
934 PrepareForCall(arg_count, arg_count);
935 ASSERT(cgen()->HasValidEntryRegisters());
936 __ CallRuntime(f, arg_count);
937 Result result = cgen()->allocator()->Allocate(eax);
938 ASSERT(result.is_valid());
939 return result;
940}
941
942
943Result VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
944 PrepareForCall(arg_count, arg_count);
945 ASSERT(cgen()->HasValidEntryRegisters());
946 __ CallRuntime(id, arg_count);
947 Result result = cgen()->allocator()->Allocate(eax);
948 ASSERT(result.is_valid());
949 return result;
950}
951
952
Andrei Popescu402d9372010-02-26 13:31:12 +0000953#ifdef ENABLE_DEBUGGER_SUPPORT
954void VirtualFrame::DebugBreak() {
955 PrepareForCall(0, 0);
956 ASSERT(cgen()->HasValidEntryRegisters());
957 __ DebugBreak();
958 Result result = cgen()->allocator()->Allocate(eax);
959 ASSERT(result.is_valid());
960}
961#endif
962
963
Steve Blocka7e24c12009-10-30 11:49:00 +0000964Result VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
965 InvokeFlag flag,
966 int arg_count) {
967 PrepareForCall(arg_count, arg_count);
968 ASSERT(cgen()->HasValidEntryRegisters());
969 __ InvokeBuiltin(id, flag);
970 Result result = cgen()->allocator()->Allocate(eax);
971 ASSERT(result.is_valid());
972 return result;
973}
974
975
976Result VirtualFrame::RawCallCodeObject(Handle<Code> code,
977 RelocInfo::Mode rmode) {
978 ASSERT(cgen()->HasValidEntryRegisters());
979 __ call(code, rmode);
980 Result result = cgen()->allocator()->Allocate(eax);
981 ASSERT(result.is_valid());
982 return result;
983}
984
985
Steve Block6ded16b2010-05-10 14:33:55 +0100986// This function assumes that the only results that could be in a_reg or b_reg
987// are a and b. Other results can be live, but must not be in a_reg or b_reg.
988void VirtualFrame::MoveResultsToRegisters(Result* a,
989 Result* b,
990 Register a_reg,
991 Register b_reg) {
992 if (a->is_register() && a->reg().is(a_reg)) {
993 b->ToRegister(b_reg);
994 } else if (!cgen()->allocator()->is_used(a_reg)) {
995 a->ToRegister(a_reg);
996 b->ToRegister(b_reg);
997 } else if (cgen()->allocator()->is_used(b_reg)) {
998 // a must be in b_reg, b in a_reg.
999 __ xchg(a_reg, b_reg);
1000 // Results a and b will be invalidated, so it is ok if they are switched.
1001 } else {
1002 b->ToRegister(b_reg);
1003 a->ToRegister(a_reg);
1004 }
1005 a->Unuse();
1006 b->Unuse();
1007}
1008
1009
Steve Blocka7e24c12009-10-30 11:49:00 +00001010Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
1011 // Name and receiver are on the top of the frame. The IC expects
Andrei Popescu402d9372010-02-26 13:31:12 +00001012 // name in ecx and receiver in eax.
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 Result name = Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00001014 Result receiver = Pop();
1015 PrepareForCall(0, 0); // No stack arguments.
Steve Block6ded16b2010-05-10 14:33:55 +01001016 MoveResultsToRegisters(&name, &receiver, ecx, eax);
1017
1018 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001019 return RawCallCodeObject(ic, mode);
1020}
1021
1022
1023Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001024 // Key and receiver are on top of the frame. Put them in eax and edx.
1025 Result key = Pop();
1026 Result receiver = Pop();
1027 PrepareForCall(0, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01001028 MoveResultsToRegisters(&key, &receiver, eax, edx);
Andrei Popescu402d9372010-02-26 13:31:12 +00001029
Steve Blocka7e24c12009-10-30 11:49:00 +00001030 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001031 return RawCallCodeObject(ic, mode);
1032}
1033
1034
Andrei Popescu402d9372010-02-26 13:31:12 +00001035Result VirtualFrame::CallStoreIC(Handle<String> name, bool is_contextual) {
1036 // Value and (if not contextual) receiver are on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001037 // The IC expects name in ecx, value in eax, and receiver in edx.
Steve Blocka7e24c12009-10-30 11:49:00 +00001038 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 Result value = Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00001040 if (is_contextual) {
1041 PrepareForCall(0, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001042 value.ToRegister(eax);
Andrei Popescu402d9372010-02-26 13:31:12 +00001043 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01001044 value.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00001045 } else {
1046 Result receiver = Pop();
1047 PrepareForCall(0, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01001048 MoveResultsToRegisters(&value, &receiver, eax, edx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001049 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001050 __ mov(ecx, name);
Steve Blocka7e24c12009-10-30 11:49:00 +00001051 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
1052}
1053
1054
1055Result VirtualFrame::CallKeyedStoreIC() {
1056 // Value, key, and receiver are on the top of the frame. The IC
Steve Block6ded16b2010-05-10 14:33:55 +01001057 // expects value in eax, key in ecx, and receiver in edx.
Steve Blocka7e24c12009-10-30 11:49:00 +00001058 Result value = Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01001059 Result key = Pop();
1060 Result receiver = Pop();
1061 PrepareForCall(0, 0);
1062 if (!cgen()->allocator()->is_used(eax) ||
1063 (value.is_register() && value.reg().is(eax))) {
1064 if (!cgen()->allocator()->is_used(eax)) {
1065 value.ToRegister(eax);
1066 }
1067 MoveResultsToRegisters(&key, &receiver, ecx, edx);
1068 value.Unuse();
1069 } else if (!cgen()->allocator()->is_used(ecx) ||
1070 (key.is_register() && key.reg().is(ecx))) {
1071 if (!cgen()->allocator()->is_used(ecx)) {
1072 key.ToRegister(ecx);
1073 }
1074 MoveResultsToRegisters(&value, &receiver, eax, edx);
1075 key.Unuse();
1076 } else if (!cgen()->allocator()->is_used(edx) ||
1077 (receiver.is_register() && receiver.reg().is(edx))) {
1078 if (!cgen()->allocator()->is_used(edx)) {
1079 receiver.ToRegister(edx);
1080 }
1081 MoveResultsToRegisters(&key, &value, ecx, eax);
1082 receiver.Unuse();
1083 } else {
1084 // All three registers are used, and no value is in the correct place.
1085 // We have one of the two circular permutations of eax, ecx, edx.
1086 ASSERT(value.is_register());
1087 if (value.reg().is(ecx)) {
1088 __ xchg(eax, edx);
1089 __ xchg(eax, ecx);
1090 } else {
1091 __ xchg(eax, ecx);
1092 __ xchg(eax, edx);
1093 }
1094 value.Unuse();
1095 key.Unuse();
1096 receiver.Unuse();
1097 }
1098
1099 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001100 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
1101}
1102
1103
1104Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
1105 int arg_count,
1106 int loop_nesting) {
Leon Clarkee46be812010-01-19 14:06:41 +00001107 // Function name, arguments, and receiver are on top of the frame.
1108 // The IC expects the name in ecx and the rest on the stack and
1109 // drops them all.
Steve Blocka7e24c12009-10-30 11:49:00 +00001110 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1111 Handle<Code> ic = cgen()->ComputeCallInitialize(arg_count, in_loop);
1112 // Spill args, receiver, and function. The call will drop args and
1113 // receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00001114 Result name = Pop();
1115 PrepareForCall(arg_count + 1, arg_count + 1); // Arguments + receiver.
1116 name.ToRegister(ecx);
1117 name.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00001118 return RawCallCodeObject(ic, mode);
1119}
1120
1121
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001122Result VirtualFrame::CallKeyedCallIC(RelocInfo::Mode mode,
1123 int arg_count,
1124 int loop_nesting) {
1125 // Function name, arguments, and receiver are on top of the frame.
1126 // The IC expects the name in ecx and the rest on the stack and
1127 // drops them all.
1128 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1129 Handle<Code> ic = cgen()->ComputeKeyedCallInitialize(arg_count, in_loop);
1130 // Spill args, receiver, and function. The call will drop args and
1131 // receiver.
1132 Result name = Pop();
1133 PrepareForCall(arg_count + 1, arg_count + 1); // Arguments + receiver.
1134 name.ToRegister(ecx);
1135 name.Unuse();
1136 return RawCallCodeObject(ic, mode);
1137}
1138
1139
Steve Blocka7e24c12009-10-30 11:49:00 +00001140Result VirtualFrame::CallConstructor(int arg_count) {
1141 // Arguments, receiver, and function are on top of the frame. The
1142 // IC expects arg count in eax, function in edi, and the arguments
1143 // and receiver on the stack.
1144 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
1145 // Duplicate the function before preparing the frame.
1146 PushElementAt(arg_count + 1);
1147 Result function = Pop();
1148 PrepareForCall(arg_count + 1, arg_count + 1); // Spill args and receiver.
1149 function.ToRegister(edi);
1150
1151 // Constructors are called with the number of arguments in register
1152 // eax for now. Another option would be to have separate construct
1153 // call trampolines per different arguments counts encountered.
1154 Result num_args = cgen()->allocator()->Allocate(eax);
1155 ASSERT(num_args.is_valid());
1156 __ Set(num_args.reg(), Immediate(arg_count));
1157
1158 function.Unuse();
1159 num_args.Unuse();
1160 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL);
1161}
1162
1163
1164void VirtualFrame::Drop(int count) {
1165 ASSERT(count >= 0);
1166 ASSERT(height() >= count);
1167 int num_virtual_elements = (element_count() - 1) - stack_pointer_;
1168
1169 // Emit code to lower the stack pointer if necessary.
1170 if (num_virtual_elements < count) {
1171 int num_dropped = count - num_virtual_elements;
1172 stack_pointer_ -= num_dropped;
1173 __ add(Operand(esp), Immediate(num_dropped * kPointerSize));
1174 }
1175
1176 // Discard elements from the virtual frame and free any registers.
1177 for (int i = 0; i < count; i++) {
1178 FrameElement dropped = elements_.RemoveLast();
1179 if (dropped.is_register()) {
1180 Unuse(dropped.reg());
1181 }
1182 }
1183}
1184
1185
1186Result VirtualFrame::Pop() {
1187 FrameElement element = elements_.RemoveLast();
1188 int index = element_count();
1189 ASSERT(element.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01001190 ASSERT(element.is_untagged_int32() == cgen()->in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00001191
Andrei Popescu402d9372010-02-26 13:31:12 +00001192 // Get number type information of the result.
Steve Block6ded16b2010-05-10 14:33:55 +01001193 TypeInfo info;
Andrei Popescu402d9372010-02-26 13:31:12 +00001194 if (!element.is_copy()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001195 info = element.type_info();
Andrei Popescu402d9372010-02-26 13:31:12 +00001196 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001197 info = elements_[element.index()].type_info();
Andrei Popescu402d9372010-02-26 13:31:12 +00001198 }
1199
Steve Blocka7e24c12009-10-30 11:49:00 +00001200 bool pop_needed = (stack_pointer_ == index);
1201 if (pop_needed) {
1202 stack_pointer_--;
1203 if (element.is_memory()) {
1204 Result temp = cgen()->allocator()->Allocate();
1205 ASSERT(temp.is_valid());
1206 __ pop(temp.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01001207 temp.set_type_info(info);
1208 temp.set_untagged_int32(element.is_untagged_int32());
Steve Blocka7e24c12009-10-30 11:49:00 +00001209 return temp;
1210 }
1211
1212 __ add(Operand(esp), Immediate(kPointerSize));
1213 }
1214 ASSERT(!element.is_memory());
1215
1216 // The top element is a register, constant, or a copy. Unuse
1217 // registers and follow copies to their backing store.
1218 if (element.is_register()) {
1219 Unuse(element.reg());
1220 } else if (element.is_copy()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001221 ASSERT(!element.is_untagged_int32());
Steve Blocka7e24c12009-10-30 11:49:00 +00001222 ASSERT(element.index() < index);
1223 index = element.index();
1224 element = elements_[index];
1225 }
1226 ASSERT(!element.is_copy());
1227
1228 // The element is memory, a register, or a constant.
1229 if (element.is_memory()) {
1230 // Memory elements could only be the backing store of a copy.
1231 // Allocate the original to a register.
1232 ASSERT(index <= stack_pointer_);
Steve Block6ded16b2010-05-10 14:33:55 +01001233 ASSERT(!element.is_untagged_int32());
Steve Blocka7e24c12009-10-30 11:49:00 +00001234 Result temp = cgen()->allocator()->Allocate();
1235 ASSERT(temp.is_valid());
1236 Use(temp.reg(), index);
1237 FrameElement new_element =
Andrei Popescu402d9372010-02-26 13:31:12 +00001238 FrameElement::RegisterElement(temp.reg(),
1239 FrameElement::SYNCED,
Steve Block6ded16b2010-05-10 14:33:55 +01001240 element.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00001241 // Preserve the copy flag on the element.
1242 if (element.is_copied()) new_element.set_copied();
1243 elements_[index] = new_element;
1244 __ mov(temp.reg(), Operand(ebp, fp_relative(index)));
Andrei Popescu402d9372010-02-26 13:31:12 +00001245 return Result(temp.reg(), info);
Steve Blocka7e24c12009-10-30 11:49:00 +00001246 } else if (element.is_register()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001247 Result return_value(element.reg(), info);
1248 return_value.set_untagged_int32(element.is_untagged_int32());
1249 return return_value;
Steve Blocka7e24c12009-10-30 11:49:00 +00001250 } else {
1251 ASSERT(element.is_constant());
Steve Block6ded16b2010-05-10 14:33:55 +01001252 Result return_value(element.handle());
1253 return_value.set_untagged_int32(element.is_untagged_int32());
1254 return return_value;
Steve Blocka7e24c12009-10-30 11:49:00 +00001255 }
1256}
1257
1258
1259void VirtualFrame::EmitPop(Register reg) {
1260 ASSERT(stack_pointer_ == element_count() - 1);
1261 stack_pointer_--;
1262 elements_.RemoveLast();
1263 __ pop(reg);
1264}
1265
1266
1267void VirtualFrame::EmitPop(Operand operand) {
1268 ASSERT(stack_pointer_ == element_count() - 1);
1269 stack_pointer_--;
1270 elements_.RemoveLast();
1271 __ pop(operand);
1272}
1273
1274
Steve Block6ded16b2010-05-10 14:33:55 +01001275void VirtualFrame::EmitPush(Register reg, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001276 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00001277 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 stack_pointer_++;
1279 __ push(reg);
1280}
1281
1282
Steve Block6ded16b2010-05-10 14:33:55 +01001283void VirtualFrame::EmitPush(Operand operand, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001284 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00001285 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +00001286 stack_pointer_++;
1287 __ push(operand);
1288}
1289
1290
Steve Block6ded16b2010-05-10 14:33:55 +01001291void VirtualFrame::EmitPush(Immediate immediate, TypeInfo info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001292 ASSERT(stack_pointer_ == element_count() - 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00001293 elements_.Add(FrameElement::MemoryElement(info));
Steve Blocka7e24c12009-10-30 11:49:00 +00001294 stack_pointer_++;
1295 __ push(immediate);
1296}
1297
1298
Steve Block6ded16b2010-05-10 14:33:55 +01001299void VirtualFrame::PushUntaggedElement(Handle<Object> value) {
1300 elements_.Add(FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED));
1301 elements_[element_count() - 1].set_untagged_int32(true);
1302}
1303
1304
Andrei Popescu402d9372010-02-26 13:31:12 +00001305void VirtualFrame::Push(Expression* expr) {
1306 ASSERT(expr->IsTrivial());
1307
1308 Literal* lit = expr->AsLiteral();
1309 if (lit != NULL) {
1310 Push(lit->handle());
1311 return;
1312 }
1313
1314 VariableProxy* proxy = expr->AsVariableProxy();
Steve Block6ded16b2010-05-10 14:33:55 +01001315 if (proxy != NULL) {
1316 Slot* slot = proxy->var()->slot();
1317 if (slot->type() == Slot::LOCAL) {
1318 PushLocalAt(slot->index());
1319 return;
1320 }
1321 if (slot->type() == Slot::PARAMETER) {
1322 PushParameterAt(slot->index());
1323 return;
1324 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001325 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001326 UNREACHABLE();
1327}
1328
1329
Steve Blocka7e24c12009-10-30 11:49:00 +00001330#undef __
1331
1332} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001333
1334#endif // V8_TARGET_ARCH_IA32