blob: 0cf68ebb0af9c460bd720bcae525097fee0cc2e8 [file] [log] [blame]
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001// Copyright 2009 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000032#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
34#include "ic-inl.h"
35#include "parser.h"
36#include "register-allocator-inl.h"
37#include "scopes.h"
38
39namespace v8 {
40namespace internal {
41
42#define __ ACCESS_MASM(masm_)
43
44// -------------------------------------------------------------------------
45// Platform-specific DeferredCode functions.
46
47void DeferredCode::SaveRegisters() {
48 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
49 int action = registers_[i];
50 if (action == kPush) {
51 __ push(RegisterAllocator::ToRegister(i));
52 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
53 __ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i));
54 }
55 }
56}
57
Steve Block3ce2e202009-11-05 08:53:23 +000058
Steve Blocka7e24c12009-10-30 11:49:00 +000059void DeferredCode::RestoreRegisters() {
60 // Restore registers in reverse order due to the stack.
61 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
62 int action = registers_[i];
63 if (action == kPush) {
64 __ pop(RegisterAllocator::ToRegister(i));
65 } else if (action != kIgnore) {
66 action &= ~kSyncedFlag;
67 __ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action));
68 }
69 }
70}
71
72
73// -------------------------------------------------------------------------
74// CodeGenState implementation.
75
76CodeGenState::CodeGenState(CodeGenerator* owner)
77 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000078 destination_(NULL),
79 previous_(NULL) {
80 owner_->set_state(this);
81}
82
83
84CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +000085 ControlDestination* destination)
86 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000087 destination_(destination),
88 previous_(owner->state()) {
89 owner_->set_state(this);
90}
91
92
93CodeGenState::~CodeGenState() {
94 ASSERT(owner_->state() == this);
95 owner_->set_state(previous_);
96}
97
98
99// -------------------------------------------------------------------------
100// Deferred code objects
101//
102// These subclasses of DeferredCode add pieces of code to the end of generated
103// code. They are branched to from the generated code, and
104// keep some slower code out of the main body of the generated code.
105// Many of them call a code stub or a runtime function.
106
107class DeferredInlineSmiAdd: public DeferredCode {
108 public:
109 DeferredInlineSmiAdd(Register dst,
110 Smi* value,
111 OverwriteMode overwrite_mode)
112 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
113 set_comment("[ DeferredInlineSmiAdd");
114 }
115
116 virtual void Generate();
117
118 private:
119 Register dst_;
120 Smi* value_;
121 OverwriteMode overwrite_mode_;
122};
123
124
125// The result of value + src is in dst. It either overflowed or was not
126// smi tagged. Undo the speculative addition and call the appropriate
127// specialized stub for add. The result is left in dst.
128class DeferredInlineSmiAddReversed: public DeferredCode {
129 public:
130 DeferredInlineSmiAddReversed(Register dst,
131 Smi* value,
132 OverwriteMode overwrite_mode)
133 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
134 set_comment("[ DeferredInlineSmiAddReversed");
135 }
136
137 virtual void Generate();
138
139 private:
140 Register dst_;
141 Smi* value_;
142 OverwriteMode overwrite_mode_;
143};
144
145
146class DeferredInlineSmiSub: public DeferredCode {
147 public:
148 DeferredInlineSmiSub(Register dst,
149 Smi* value,
150 OverwriteMode overwrite_mode)
151 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
152 set_comment("[ DeferredInlineSmiSub");
153 }
154
155 virtual void Generate();
156
157 private:
158 Register dst_;
159 Smi* value_;
160 OverwriteMode overwrite_mode_;
161};
162
163
164// Call the appropriate binary operation stub to compute src op value
165// and leave the result in dst.
166class DeferredInlineSmiOperation: public DeferredCode {
167 public:
168 DeferredInlineSmiOperation(Token::Value op,
169 Register dst,
170 Register src,
171 Smi* value,
172 OverwriteMode overwrite_mode)
173 : op_(op),
174 dst_(dst),
175 src_(src),
176 value_(value),
177 overwrite_mode_(overwrite_mode) {
178 set_comment("[ DeferredInlineSmiOperation");
179 }
180
181 virtual void Generate();
182
183 private:
184 Token::Value op_;
185 Register dst_;
186 Register src_;
187 Smi* value_;
188 OverwriteMode overwrite_mode_;
189};
190
191
192class FloatingPointHelper : public AllStatic {
193 public:
194 // Code pattern for loading a floating point value. Input value must
195 // be either a smi or a heap number object (fp value). Requirements:
196 // operand on TOS+1. Returns operand as floating point number on FPU
197 // stack.
198 static void LoadFloatOperand(MacroAssembler* masm, Register scratch);
199
200 // Code pattern for loading a floating point value. Input value must
201 // be either a smi or a heap number object (fp value). Requirements:
202 // operand in src register. Returns operand as floating point number
203 // in XMM register
204 static void LoadFloatOperand(MacroAssembler* masm,
205 Register src,
206 XMMRegister dst);
207
208 // Code pattern for loading floating point values. Input values must
209 // be either smi or heap number objects (fp values). Requirements:
210 // operand_1 on TOS+1 , operand_2 on TOS+2; Returns operands as
211 // floating point numbers in XMM registers.
212 static void LoadFloatOperands(MacroAssembler* masm,
213 XMMRegister dst1,
214 XMMRegister dst2);
215
216 // Code pattern for loading floating point values onto the fp stack.
217 // Input values must be either smi or heap number objects (fp values).
218 // Requirements:
219 // Register version: operands in registers lhs and rhs.
220 // Stack version: operands on TOS+1 and TOS+2.
221 // Returns operands as floating point numbers on fp stack.
222 static void LoadFloatOperands(MacroAssembler* masm);
223 static void LoadFloatOperands(MacroAssembler* masm,
224 Register lhs,
225 Register rhs);
226
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000227 // Code pattern for loading a floating point value and converting it
228 // to a 32 bit integer. Input value must be either a smi or a heap number
229 // object.
230 // Returns operands as 32-bit sign extended integers in a general purpose
231 // registers.
232 static void LoadInt32Operand(MacroAssembler* masm,
233 const Operand& src,
234 Register dst);
235
Steve Blocka7e24c12009-10-30 11:49:00 +0000236 // Test if operands are smi or number objects (fp). Requirements:
237 // operand_1 in rax, operand_2 in rdx; falls through on float or smi
238 // operands, jumps to the non_float label otherwise.
Steve Block3ce2e202009-11-05 08:53:23 +0000239 static void CheckNumberOperands(MacroAssembler* masm,
240 Label* non_float);
Steve Blocka7e24c12009-10-30 11:49:00 +0000241};
242
243
244// -----------------------------------------------------------------------------
245// CodeGenerator implementation.
246
247CodeGenerator::CodeGenerator(int buffer_size,
248 Handle<Script> script,
249 bool is_eval)
250 : is_eval_(is_eval),
251 script_(script),
252 deferred_(8),
253 masm_(new MacroAssembler(NULL, buffer_size)),
254 scope_(NULL),
255 frame_(NULL),
256 allocator_(NULL),
257 state_(NULL),
258 loop_nesting_(0),
259 function_return_is_shadowed_(false),
260 in_spilled_code_(false) {
261}
262
263
264void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
265 // Call the runtime to declare the globals. The inevitable call
266 // will sync frame elements to memory anyway, so we do it eagerly to
267 // allow us to push the arguments directly into place.
268 frame_->SyncRange(0, frame_->element_count() - 1);
269
270 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT);
Steve Block3ce2e202009-11-05 08:53:23 +0000271 frame_->EmitPush(rsi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 frame_->EmitPush(kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000273 frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000274 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
275 // Return value is ignored.
276}
277
278
279void CodeGenerator::GenCode(FunctionLiteral* function) {
280 // Record the position for debugging purposes.
281 CodeForFunctionPosition(function);
282 ZoneList<Statement*>* body = function->body();
283
284 // Initialize state.
285 ASSERT(scope_ == NULL);
286 scope_ = function->scope();
287 ASSERT(allocator_ == NULL);
288 RegisterAllocator register_allocator(this);
289 allocator_ = &register_allocator;
290 ASSERT(frame_ == NULL);
291 frame_ = new VirtualFrame();
292 set_in_spilled_code(false);
293
294 // Adjust for function-level loop nesting.
295 loop_nesting_ += function->loop_nesting();
296
297 JumpTarget::set_compiling_deferred_code(false);
298
299#ifdef DEBUG
300 if (strlen(FLAG_stop_at) > 0 &&
301 function->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
302 frame_->SpillAll();
303 __ int3();
304 }
305#endif
306
307 // New scope to get automatic timing calculation.
308 { // NOLINT
309 HistogramTimerScope codegen_timer(&Counters::code_generation);
310 CodeGenState state(this);
311
312 // Entry:
313 // Stack: receiver, arguments, return address.
314 // rbp: caller's frame pointer
315 // rsp: stack pointer
316 // rdi: called JS function
317 // rsi: callee's context
318 allocator_->Initialize();
319 frame_->Enter();
320
321 // Allocate space for locals and initialize them.
322 frame_->AllocateStackSlots();
323 // Initialize the function return target after the locals are set
324 // up, because it needs the expected frame height from the frame.
325 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
326 function_return_is_shadowed_ = false;
327
328 // Allocate the local context if needed.
Leon Clarkee46be812010-01-19 14:06:41 +0000329 int heap_slots = scope_->num_heap_slots();
330 if (heap_slots > 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000331 Comment cmnt(masm_, "[ allocate local context");
332 // Allocate local context.
333 // Get outer context and create a new context based on it.
334 frame_->PushFunction();
Leon Clarkee46be812010-01-19 14:06:41 +0000335 Result context;
336 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
337 FastNewContextStub stub(heap_slots);
338 context = frame_->CallStub(&stub, 1);
339 } else {
340 context = frame_->CallRuntime(Runtime::kNewContext, 1);
341 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000342
343 // Update context local.
344 frame_->SaveContextRegister();
345
346 // Verify that the runtime call result and rsi agree.
347 if (FLAG_debug_code) {
348 __ cmpq(context.reg(), rsi);
349 __ Assert(equal, "Runtime::NewContext should end up in rsi");
350 }
351 }
352
353 // TODO(1241774): Improve this code:
354 // 1) only needed if we have a context
355 // 2) no need to recompute context ptr every single time
356 // 3) don't copy parameter operand code from SlotOperand!
357 {
358 Comment cmnt2(masm_, "[ copy context parameters into .context");
359
360 // Note that iteration order is relevant here! If we have the same
361 // parameter twice (e.g., function (x, y, x)), and that parameter
362 // needs to be copied into the context, it must be the last argument
363 // passed to the parameter that needs to be copied. This is a rare
364 // case so we don't check for it, instead we rely on the copying
365 // order: such a parameter is copied repeatedly into the same
366 // context location and thus the last value is what is seen inside
367 // the function.
368 for (int i = 0; i < scope_->num_parameters(); i++) {
369 Variable* par = scope_->parameter(i);
370 Slot* slot = par->slot();
371 if (slot != NULL && slot->type() == Slot::CONTEXT) {
372 // The use of SlotOperand below is safe in unspilled code
373 // because the slot is guaranteed to be a context slot.
374 //
375 // There are no parameters in the global scope.
376 ASSERT(!scope_->is_global_scope());
377 frame_->PushParameterAt(i);
378 Result value = frame_->Pop();
379 value.ToRegister();
380
381 // SlotOperand loads context.reg() with the context object
382 // stored to, used below in RecordWrite.
383 Result context = allocator_->Allocate();
384 ASSERT(context.is_valid());
385 __ movq(SlotOperand(slot, context.reg()), value.reg());
386 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
387 Result scratch = allocator_->Allocate();
388 ASSERT(scratch.is_valid());
389 frame_->Spill(context.reg());
390 frame_->Spill(value.reg());
391 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
392 }
393 }
394 }
395
396 // Store the arguments object. This must happen after context
397 // initialization because the arguments object may be stored in
398 // the context.
399 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
400 StoreArgumentsObject(true);
401 }
402
Leon Clarkee46be812010-01-19 14:06:41 +0000403 // Initialize ThisFunction reference if present.
404 if (scope_->is_function_scope() && scope_->function() != NULL) {
405 frame_->Push(Factory::the_hole_value());
406 StoreToSlot(scope_->function()->slot(), NOT_CONST_INIT);
407 }
408
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 // Generate code to 'execute' declarations and initialize functions
410 // (source elements). In case of an illegal redeclaration we need to
411 // handle that instead of processing the declarations.
412 if (scope_->HasIllegalRedeclaration()) {
413 Comment cmnt(masm_, "[ illegal redeclarations");
414 scope_->VisitIllegalRedeclaration(this);
415 } else {
416 Comment cmnt(masm_, "[ declarations");
417 ProcessDeclarations(scope_->declarations());
418 // Bail out if a stack-overflow exception occurred when processing
419 // declarations.
420 if (HasStackOverflow()) return;
421 }
422
423 if (FLAG_trace) {
424 frame_->CallRuntime(Runtime::kTraceEnter, 0);
425 // Ignore the return value.
426 }
427 CheckStack();
428
429 // Compile the body of the function in a vanilla state. Don't
430 // bother compiling all the code if the scope has an illegal
431 // redeclaration.
432 if (!scope_->HasIllegalRedeclaration()) {
433 Comment cmnt(masm_, "[ function body");
434#ifdef DEBUG
435 bool is_builtin = Bootstrapper::IsActive();
436 bool should_trace =
437 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
438 if (should_trace) {
439 frame_->CallRuntime(Runtime::kDebugTrace, 0);
440 // Ignore the return value.
441 }
442#endif
443 VisitStatements(body);
444
445 // Handle the return from the function.
446 if (has_valid_frame()) {
447 // If there is a valid frame, control flow can fall off the end of
448 // the body. In that case there is an implicit return statement.
449 ASSERT(!function_return_is_shadowed_);
450 CodeForReturnPosition(function);
451 frame_->PrepareForReturn();
452 Result undefined(Factory::undefined_value());
453 if (function_return_.is_bound()) {
454 function_return_.Jump(&undefined);
455 } else {
456 function_return_.Bind(&undefined);
457 GenerateReturnSequence(&undefined);
458 }
459 } else if (function_return_.is_linked()) {
460 // If the return target has dangling jumps to it, then we have not
461 // yet generated the return sequence. This can happen when (a)
462 // control does not flow off the end of the body so we did not
463 // compile an artificial return statement just above, and (b) there
464 // are return statements in the body but (c) they are all shadowed.
465 Result return_value;
466 function_return_.Bind(&return_value);
467 GenerateReturnSequence(&return_value);
468 }
469 }
470 }
471
472 // Adjust for function-level loop nesting.
473 loop_nesting_ -= function->loop_nesting();
474
475 // Code generation state must be reset.
476 ASSERT(state_ == NULL);
477 ASSERT(loop_nesting() == 0);
478 ASSERT(!function_return_is_shadowed_);
479 function_return_.Unuse();
480 DeleteFrame();
481
482 // Process any deferred code using the register allocator.
483 if (!HasStackOverflow()) {
484 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
485 JumpTarget::set_compiling_deferred_code(true);
486 ProcessDeferred();
487 JumpTarget::set_compiling_deferred_code(false);
488 }
489
490 // There is no need to delete the register allocator, it is a
491 // stack-allocated local.
492 allocator_ = NULL;
493 scope_ = NULL;
494}
495
496void CodeGenerator::GenerateReturnSequence(Result* return_value) {
497 // The return value is a live (but not currently reference counted)
498 // reference to rax. This is safe because the current frame does not
499 // contain a reference to rax (it is prepared for the return by spilling
500 // all registers).
501 if (FLAG_trace) {
502 frame_->Push(return_value);
503 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
504 }
505 return_value->ToRegister(rax);
506
507 // Add a label for checking the size of the code used for returning.
508#ifdef DEBUG
509 Label check_exit_codesize;
510 masm_->bind(&check_exit_codesize);
511#endif
512
513 // Leave the frame and return popping the arguments and the
514 // receiver.
515 frame_->Exit();
516 masm_->ret((scope_->num_parameters() + 1) * kPointerSize);
517#ifdef ENABLE_DEBUGGER_SUPPORT
518 // Add padding that will be overwritten by a debugger breakpoint.
519 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k"
520 // with length 7 (3 + 1 + 3).
Steve Blockd0582a62009-12-15 09:54:21 +0000521 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
Steve Blocka7e24c12009-10-30 11:49:00 +0000522 for (int i = 0; i < kPadding; ++i) {
523 masm_->int3();
524 }
525 // Check that the size of the code used for returning matches what is
526 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +0000527 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +0000528 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
529#endif
530 DeleteFrame();
531}
532
533
534#ifdef DEBUG
535bool CodeGenerator::HasValidEntryRegisters() {
536 return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0))
537 && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0))
538 && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0))
539 && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0))
540 && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0))
541 && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0))
542 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0))
543 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0))
544 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0))
545 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0))
546 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0));
547}
548#endif
549
550
551class DeferredReferenceGetKeyedValue: public DeferredCode {
552 public:
553 explicit DeferredReferenceGetKeyedValue(Register dst,
554 Register receiver,
555 Register key,
556 bool is_global)
557 : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) {
558 set_comment("[ DeferredReferenceGetKeyedValue");
559 }
560
561 virtual void Generate();
562
563 Label* patch_site() { return &patch_site_; }
564
565 private:
566 Label patch_site_;
567 Register dst_;
568 Register receiver_;
569 Register key_;
570 bool is_global_;
571};
572
573
574void DeferredReferenceGetKeyedValue::Generate() {
575 __ push(receiver_); // First IC argument.
576 __ push(key_); // Second IC argument.
577
578 // Calculate the delta from the IC call instruction to the map check
579 // movq instruction in the inlined version. This delta is stored in
580 // a test(rax, delta) instruction after the call so that we can find
581 // it in the IC initialization code and patch the movq instruction.
582 // This means that we cannot allow test instructions after calls to
583 // KeyedLoadIC stubs in other places.
584 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
585 RelocInfo::Mode mode = is_global_
586 ? RelocInfo::CODE_TARGET_CONTEXT
587 : RelocInfo::CODE_TARGET;
588 __ Call(ic, mode);
589 // The delta from the start of the map-compare instruction to the
590 // test instruction. We use masm_-> directly here instead of the __
591 // macro because the macro sometimes uses macro expansion to turn
592 // into something that can't return a value. This is encountered
593 // when doing generated code coverage tests.
594 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
595 // Here we use masm_-> instead of the __ macro because this is the
596 // instruction that gets patched and coverage code gets in the way.
597 // TODO(X64): Consider whether it's worth switching the test to a
598 // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
599 // be generated normally.
600 masm_->testl(rax, Immediate(-delta_to_patch_site));
601 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
602
603 if (!dst_.is(rax)) __ movq(dst_, rax);
604 __ pop(key_);
605 __ pop(receiver_);
606}
607
608
609class DeferredReferenceSetKeyedValue: public DeferredCode {
610 public:
611 DeferredReferenceSetKeyedValue(Register value,
612 Register key,
613 Register receiver)
614 : value_(value), key_(key), receiver_(receiver) {
615 set_comment("[ DeferredReferenceSetKeyedValue");
616 }
617
618 virtual void Generate();
619
620 Label* patch_site() { return &patch_site_; }
621
622 private:
623 Register value_;
624 Register key_;
625 Register receiver_;
626 Label patch_site_;
627};
628
629
630void DeferredReferenceSetKeyedValue::Generate() {
631 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
632 // Push receiver and key arguments on the stack.
633 __ push(receiver_);
634 __ push(key_);
635 // Move value argument to eax as expected by the IC stub.
636 if (!value_.is(rax)) __ movq(rax, value_);
637 // Call the IC stub.
638 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
639 __ Call(ic, RelocInfo::CODE_TARGET);
640 // The delta from the start of the map-compare instructions (initial movq)
641 // to the test instruction. We use masm_-> directly here instead of the
642 // __ macro because the macro sometimes uses macro expansion to turn
643 // into something that can't return a value. This is encountered
644 // when doing generated code coverage tests.
645 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
646 // Here we use masm_-> instead of the __ macro because this is the
647 // instruction that gets patched and coverage code gets in the way.
648 masm_->testl(rax, Immediate(-delta_to_patch_site));
649 // Restore value (returned from store IC), key and receiver
650 // registers.
651 if (!value_.is(rax)) __ movq(value_, rax);
652 __ pop(key_);
653 __ pop(receiver_);
654}
655
656
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000657void CodeGenerator::CallApplyLazy(Property* apply,
Steve Blocka7e24c12009-10-30 11:49:00 +0000658 Expression* receiver,
659 VariableProxy* arguments,
660 int position) {
661 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
662 ASSERT(arguments->IsArguments());
663
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000664 JumpTarget slow, done;
665
666 // Load the apply function onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +0000667 // give us a megamorphic load site. Not super, but it works.
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000668 Reference ref(this, apply);
669 ref.GetValue();
670 ASSERT(ref.type() == Reference::NAMED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000671
672 // Load the receiver and the existing arguments object onto the
673 // expression stack. Avoid allocating the arguments object here.
674 Load(receiver);
675 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
676
677 // Emit the source position information after having loaded the
678 // receiver and the arguments.
679 CodeForSourcePosition(position);
680
681 // Check if the arguments object has been lazily allocated
682 // already. If so, just use that instead of copying the arguments
683 // from the stack. This also deals with cases where a local variable
684 // named 'arguments' has been introduced.
685 frame_->Dup();
686 Result probe = frame_->Pop();
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000687 bool try_lazy = true;
688 if (probe.is_constant()) {
689 try_lazy = probe.handle()->IsTheHole();
690 } else {
691 __ Cmp(probe.reg(), Factory::the_hole_value());
692 probe.Unuse();
693 slow.Branch(not_equal);
694 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000695
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000696 if (try_lazy) {
697 JumpTarget build_args;
Steve Blocka7e24c12009-10-30 11:49:00 +0000698
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000699 // Get rid of the arguments object probe.
700 frame_->Drop();
701
702 // Before messing with the execution stack, we sync all
703 // elements. This is bound to happen anyway because we're
704 // about to call a function.
705 frame_->SyncRange(0, frame_->element_count() - 1);
706
707 // Check that the receiver really is a JavaScript object.
708 {
709 frame_->PushElementAt(0);
710 Result receiver = frame_->Pop();
711 receiver.ToRegister();
712 Condition is_smi = masm_->CheckSmi(receiver.reg());
713 build_args.Branch(is_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 // We allow all JSObjects including JSFunctions. As long as
715 // JS_FUNCTION_TYPE is the last instance type and it is right
716 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
717 // bound.
718 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
719 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000720 __ CmpObjectType(receiver.reg(), FIRST_JS_OBJECT_TYPE, kScratchRegister);
721 build_args.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +0000722 }
723
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000724 // Verify that we're invoking Function.prototype.apply.
725 {
726 frame_->PushElementAt(1);
727 Result apply = frame_->Pop();
728 apply.ToRegister();
729 Condition is_smi = masm_->CheckSmi(apply.reg());
730 build_args.Branch(is_smi);
731 Result tmp = allocator_->Allocate();
732 __ CmpObjectType(apply.reg(), JS_FUNCTION_TYPE, tmp.reg());
733 build_args.Branch(not_equal);
734 __ movq(tmp.reg(),
735 FieldOperand(apply.reg(), JSFunction::kSharedFunctionInfoOffset));
736 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
737 __ Cmp(FieldOperand(tmp.reg(), SharedFunctionInfo::kCodeOffset),
738 apply_code);
739 build_args.Branch(not_equal);
740 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000741
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000742 // Get the function receiver from the stack. Check that it
743 // really is a function.
744 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
745 Condition is_smi = masm_->CheckSmi(rdi);
746 build_args.Branch(is_smi);
747 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
748 build_args.Branch(not_equal);
749
750 // Copy the arguments to this function possibly from the
751 // adaptor frame below it.
752 Label invoke, adapted;
753 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
754 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
755 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
756 __ j(equal, &adapted);
757
758 // No arguments adaptor frame. Copy fixed number of arguments.
759 __ movq(rax, Immediate(scope_->num_parameters()));
760 for (int i = 0; i < scope_->num_parameters(); i++) {
761 __ push(frame_->ParameterAt(i));
762 }
763 __ jmp(&invoke);
764
765 // Arguments adaptor frame present. Copy arguments from there, but
766 // avoid copying too many arguments to avoid stack overflows.
767 __ bind(&adapted);
768 static const uint32_t kArgumentsLimit = 1 * KB;
769 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
770 __ SmiToInteger32(rax, rax);
771 __ movq(rcx, rax);
772 __ cmpq(rax, Immediate(kArgumentsLimit));
773 build_args.Branch(above);
774
775 // Loop through the arguments pushing them onto the execution
776 // stack. We don't inform the virtual frame of the push, so we don't
777 // have to worry about getting rid of the elements from the virtual
778 // frame.
779 Label loop;
780 __ testl(rcx, rcx);
781 __ j(zero, &invoke);
782 __ bind(&loop);
783 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize));
784 __ decl(rcx);
785 __ j(not_zero, &loop);
786
787 // Invoke the function. The virtual frame knows about the receiver
788 // so make sure to forget that explicitly.
789 __ bind(&invoke);
790 ParameterCount actual(rax);
791 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
792 frame_->Forget(1);
793 Result result = allocator()->Allocate(rax);
794 frame_->SetElementAt(0, &result);
795 done.Jump();
796
797 // Slow-case: Allocate the arguments object since we know it isn't
798 // there, and fall-through to the slow-case where we call
799 // Function.prototype.apply.
800 build_args.Bind();
801 Result arguments_object = StoreArgumentsObject(false);
802 frame_->Push(&arguments_object);
803 slow.Bind();
804 }
805
806 // Flip the apply function and the function to call on the stack, so
807 // the function looks like the receiver of the apply call. This way,
808 // the generic Function.prototype.apply implementation can deal with
809 // the call like it usually does.
810 Result a2 = frame_->Pop();
811 Result a1 = frame_->Pop();
812 Result ap = frame_->Pop();
813 Result fn = frame_->Pop();
814 frame_->Push(&ap);
815 frame_->Push(&fn);
816 frame_->Push(&a1);
817 frame_->Push(&a2);
818 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
819 Result res = frame_->CallStub(&call_function, 3);
820 frame_->Push(&res);
821
822 // All done. Restore context register after call.
823 if (try_lazy) done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000824 frame_->RestoreContextRegister();
825}
826
827
828class DeferredStackCheck: public DeferredCode {
829 public:
830 DeferredStackCheck() {
831 set_comment("[ DeferredStackCheck");
832 }
833
834 virtual void Generate();
835};
836
837
838void DeferredStackCheck::Generate() {
839 StackCheckStub stub;
840 __ CallStub(&stub);
841}
842
843
844void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +0000845 DeferredStackCheck* deferred = new DeferredStackCheck;
846 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
847 deferred->Branch(below);
848 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +0000849}
850
851
852void CodeGenerator::VisitAndSpill(Statement* statement) {
853 // TODO(X64): No architecture specific code. Move to shared location.
854 ASSERT(in_spilled_code());
855 set_in_spilled_code(false);
856 Visit(statement);
857 if (frame_ != NULL) {
858 frame_->SpillAll();
859 }
860 set_in_spilled_code(true);
861}
862
863
864void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
865 ASSERT(in_spilled_code());
866 set_in_spilled_code(false);
867 VisitStatements(statements);
868 if (frame_ != NULL) {
869 frame_->SpillAll();
870 }
871 set_in_spilled_code(true);
872}
873
874
875void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
876 ASSERT(!in_spilled_code());
877 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
878 Visit(statements->at(i));
879 }
880}
881
882
883void CodeGenerator::VisitBlock(Block* node) {
884 ASSERT(!in_spilled_code());
885 Comment cmnt(masm_, "[ Block");
886 CodeForStatementPosition(node);
887 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
888 VisitStatements(node->statements());
889 if (node->break_target()->is_linked()) {
890 node->break_target()->Bind();
891 }
892 node->break_target()->Unuse();
893}
894
895
896void CodeGenerator::VisitDeclaration(Declaration* node) {
897 Comment cmnt(masm_, "[ Declaration");
898 Variable* var = node->proxy()->var();
899 ASSERT(var != NULL); // must have been resolved
900 Slot* slot = var->slot();
901
902 // If it was not possible to allocate the variable at compile time,
903 // we need to "declare" it at runtime to make sure it actually
904 // exists in the local context.
905 if (slot != NULL && slot->type() == Slot::LOOKUP) {
906 // Variables with a "LOOKUP" slot were introduced as non-locals
907 // during variable resolution and must have mode DYNAMIC.
908 ASSERT(var->is_dynamic());
909 // For now, just do a runtime call. Sync the virtual frame eagerly
910 // so we can simply push the arguments into place.
911 frame_->SyncRange(0, frame_->element_count() - 1);
912 frame_->EmitPush(rsi);
913 __ movq(kScratchRegister, var->name(), RelocInfo::EMBEDDED_OBJECT);
914 frame_->EmitPush(kScratchRegister);
915 // Declaration nodes are always introduced in one of two modes.
916 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
917 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block3ce2e202009-11-05 08:53:23 +0000918 frame_->EmitPush(Smi::FromInt(attr));
Steve Blocka7e24c12009-10-30 11:49:00 +0000919 // Push initial value, if any.
920 // Note: For variables we must not push an initial value (such as
921 // 'undefined') because we may have a (legal) redeclaration and we
922 // must not destroy the current value.
923 if (node->mode() == Variable::CONST) {
924 frame_->EmitPush(Heap::kTheHoleValueRootIndex);
925 } else if (node->fun() != NULL) {
926 Load(node->fun());
927 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000928 frame_->EmitPush(Smi::FromInt(0)); // no initial value!
Steve Blocka7e24c12009-10-30 11:49:00 +0000929 }
930 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
931 // Ignore the return value (declarations are statements).
932 return;
933 }
934
935 ASSERT(!var->is_global());
936
937 // If we have a function or a constant, we need to initialize the variable.
938 Expression* val = NULL;
939 if (node->mode() == Variable::CONST) {
940 val = new Literal(Factory::the_hole_value());
941 } else {
942 val = node->fun(); // NULL if we don't have a function
943 }
944
945 if (val != NULL) {
946 {
947 // Set the initial value.
948 Reference target(this, node->proxy());
949 Load(val);
950 target.SetValue(NOT_CONST_INIT);
951 // The reference is removed from the stack (preserving TOS) when
952 // it goes out of scope.
953 }
954 // Get rid of the assigned value (declarations are statements).
955 frame_->Drop();
956 }
957}
958
959
960void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
961 ASSERT(!in_spilled_code());
962 Comment cmnt(masm_, "[ ExpressionStatement");
963 CodeForStatementPosition(node);
964 Expression* expression = node->expression();
965 expression->MarkAsStatement();
966 Load(expression);
967 // Remove the lingering expression result from the top of stack.
968 frame_->Drop();
969}
970
971
972void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
973 ASSERT(!in_spilled_code());
974 Comment cmnt(masm_, "// EmptyStatement");
975 CodeForStatementPosition(node);
976 // nothing to do
977}
978
979
980void CodeGenerator::VisitIfStatement(IfStatement* node) {
981 ASSERT(!in_spilled_code());
982 Comment cmnt(masm_, "[ IfStatement");
983 // Generate different code depending on which parts of the if statement
984 // are present or not.
985 bool has_then_stm = node->HasThenStatement();
986 bool has_else_stm = node->HasElseStatement();
987
988 CodeForStatementPosition(node);
989 JumpTarget exit;
990 if (has_then_stm && has_else_stm) {
991 JumpTarget then;
992 JumpTarget else_;
993 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000994 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +0000995
996 if (dest.false_was_fall_through()) {
997 // The else target was bound, so we compile the else part first.
998 Visit(node->else_statement());
999
1000 // We may have dangling jumps to the then part.
1001 if (then.is_linked()) {
1002 if (has_valid_frame()) exit.Jump();
1003 then.Bind();
1004 Visit(node->then_statement());
1005 }
1006 } else {
1007 // The then target was bound, so we compile the then part first.
1008 Visit(node->then_statement());
1009
1010 if (else_.is_linked()) {
1011 if (has_valid_frame()) exit.Jump();
1012 else_.Bind();
1013 Visit(node->else_statement());
1014 }
1015 }
1016
1017 } else if (has_then_stm) {
1018 ASSERT(!has_else_stm);
1019 JumpTarget then;
1020 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001021 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022
1023 if (dest.false_was_fall_through()) {
1024 // The exit label was bound. We may have dangling jumps to the
1025 // then part.
1026 if (then.is_linked()) {
1027 exit.Unuse();
1028 exit.Jump();
1029 then.Bind();
1030 Visit(node->then_statement());
1031 }
1032 } else {
1033 // The then label was bound.
1034 Visit(node->then_statement());
1035 }
1036
1037 } else if (has_else_stm) {
1038 ASSERT(!has_then_stm);
1039 JumpTarget else_;
1040 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001041 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001042
1043 if (dest.true_was_fall_through()) {
1044 // The exit label was bound. We may have dangling jumps to the
1045 // else part.
1046 if (else_.is_linked()) {
1047 exit.Unuse();
1048 exit.Jump();
1049 else_.Bind();
1050 Visit(node->else_statement());
1051 }
1052 } else {
1053 // The else label was bound.
1054 Visit(node->else_statement());
1055 }
1056
1057 } else {
1058 ASSERT(!has_then_stm && !has_else_stm);
1059 // We only care about the condition's side effects (not its value
1060 // or control flow effect). LoadCondition is called without
1061 // forcing control flow.
1062 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001063 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00001064 if (!dest.is_used()) {
1065 // We got a value on the frame rather than (or in addition to)
1066 // control flow.
1067 frame_->Drop();
1068 }
1069 }
1070
1071 if (exit.is_linked()) {
1072 exit.Bind();
1073 }
1074}
1075
1076
1077void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
1078 ASSERT(!in_spilled_code());
1079 Comment cmnt(masm_, "[ ContinueStatement");
1080 CodeForStatementPosition(node);
1081 node->target()->continue_target()->Jump();
1082}
1083
1084
1085void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
1086 ASSERT(!in_spilled_code());
1087 Comment cmnt(masm_, "[ BreakStatement");
1088 CodeForStatementPosition(node);
1089 node->target()->break_target()->Jump();
1090}
1091
1092
1093void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
1094 ASSERT(!in_spilled_code());
1095 Comment cmnt(masm_, "[ ReturnStatement");
1096
1097 CodeForStatementPosition(node);
1098 Load(node->expression());
1099 Result return_value = frame_->Pop();
1100 if (function_return_is_shadowed_) {
1101 function_return_.Jump(&return_value);
1102 } else {
1103 frame_->PrepareForReturn();
1104 if (function_return_.is_bound()) {
1105 // If the function return label is already bound we reuse the
1106 // code by jumping to the return site.
1107 function_return_.Jump(&return_value);
1108 } else {
1109 function_return_.Bind(&return_value);
1110 GenerateReturnSequence(&return_value);
1111 }
1112 }
1113}
1114
1115
1116void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1117 ASSERT(!in_spilled_code());
1118 Comment cmnt(masm_, "[ WithEnterStatement");
1119 CodeForStatementPosition(node);
1120 Load(node->expression());
1121 Result context;
1122 if (node->is_catch_block()) {
1123 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1124 } else {
1125 context = frame_->CallRuntime(Runtime::kPushContext, 1);
1126 }
1127
1128 // Update context local.
1129 frame_->SaveContextRegister();
1130
1131 // Verify that the runtime call result and rsi agree.
1132 if (FLAG_debug_code) {
1133 __ cmpq(context.reg(), rsi);
1134 __ Assert(equal, "Runtime::NewContext should end up in rsi");
1135 }
1136}
1137
1138
1139void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
1140 ASSERT(!in_spilled_code());
1141 Comment cmnt(masm_, "[ WithExitStatement");
1142 CodeForStatementPosition(node);
1143 // Pop context.
1144 __ movq(rsi, ContextOperand(rsi, Context::PREVIOUS_INDEX));
1145 // Update context local.
1146 frame_->SaveContextRegister();
1147}
1148
1149
1150void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
1151 // TODO(X64): This code is completely generic and should be moved somewhere
1152 // where it can be shared between architectures.
1153 ASSERT(!in_spilled_code());
1154 Comment cmnt(masm_, "[ SwitchStatement");
1155 CodeForStatementPosition(node);
1156 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1157
1158 // Compile the switch value.
1159 Load(node->tag());
1160
1161 ZoneList<CaseClause*>* cases = node->cases();
1162 int length = cases->length();
1163 CaseClause* default_clause = NULL;
1164
1165 JumpTarget next_test;
1166 // Compile the case label expressions and comparisons. Exit early
1167 // if a comparison is unconditionally true. The target next_test is
1168 // bound before the loop in order to indicate control flow to the
1169 // first comparison.
1170 next_test.Bind();
1171 for (int i = 0; i < length && !next_test.is_unused(); i++) {
1172 CaseClause* clause = cases->at(i);
1173 // The default is not a test, but remember it for later.
1174 if (clause->is_default()) {
1175 default_clause = clause;
1176 continue;
1177 }
1178
1179 Comment cmnt(masm_, "[ Case comparison");
1180 // We recycle the same target next_test for each test. Bind it if
1181 // the previous test has not done so and then unuse it for the
1182 // loop.
1183 if (next_test.is_linked()) {
1184 next_test.Bind();
1185 }
1186 next_test.Unuse();
1187
1188 // Duplicate the switch value.
1189 frame_->Dup();
1190
1191 // Compile the label expression.
1192 Load(clause->label());
1193
1194 // Compare and branch to the body if true or the next test if
1195 // false. Prefer the next test as a fall through.
1196 ControlDestination dest(clause->body_target(), &next_test, false);
1197 Comparison(equal, true, &dest);
1198
1199 // If the comparison fell through to the true target, jump to the
1200 // actual body.
1201 if (dest.true_was_fall_through()) {
1202 clause->body_target()->Unuse();
1203 clause->body_target()->Jump();
1204 }
1205 }
1206
1207 // If there was control flow to a next test from the last one
1208 // compiled, compile a jump to the default or break target.
1209 if (!next_test.is_unused()) {
1210 if (next_test.is_linked()) {
1211 next_test.Bind();
1212 }
1213 // Drop the switch value.
1214 frame_->Drop();
1215 if (default_clause != NULL) {
1216 default_clause->body_target()->Jump();
1217 } else {
1218 node->break_target()->Jump();
1219 }
1220 }
1221
1222 // The last instruction emitted was a jump, either to the default
1223 // clause or the break target, or else to a case body from the loop
1224 // that compiles the tests.
1225 ASSERT(!has_valid_frame());
1226 // Compile case bodies as needed.
1227 for (int i = 0; i < length; i++) {
1228 CaseClause* clause = cases->at(i);
1229
1230 // There are two ways to reach the body: from the corresponding
1231 // test or as the fall through of the previous body.
1232 if (clause->body_target()->is_linked() || has_valid_frame()) {
1233 if (clause->body_target()->is_linked()) {
1234 if (has_valid_frame()) {
1235 // If we have both a jump to the test and a fall through, put
1236 // a jump on the fall through path to avoid the dropping of
1237 // the switch value on the test path. The exception is the
1238 // default which has already had the switch value dropped.
1239 if (clause->is_default()) {
1240 clause->body_target()->Bind();
1241 } else {
1242 JumpTarget body;
1243 body.Jump();
1244 clause->body_target()->Bind();
1245 frame_->Drop();
1246 body.Bind();
1247 }
1248 } else {
1249 // No fall through to worry about.
1250 clause->body_target()->Bind();
1251 if (!clause->is_default()) {
1252 frame_->Drop();
1253 }
1254 }
1255 } else {
1256 // Otherwise, we have only fall through.
1257 ASSERT(has_valid_frame());
1258 }
1259
1260 // We are now prepared to compile the body.
1261 Comment cmnt(masm_, "[ Case body");
1262 VisitStatements(clause->statements());
1263 }
1264 clause->body_target()->Unuse();
1265 }
1266
1267 // We may not have a valid frame here so bind the break target only
1268 // if needed.
1269 if (node->break_target()->is_linked()) {
1270 node->break_target()->Bind();
1271 }
1272 node->break_target()->Unuse();
1273}
1274
1275
Steve Block3ce2e202009-11-05 08:53:23 +00001276void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001277 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00001278 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00001279 CodeForStatementPosition(node);
1280 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00001281 JumpTarget body(JumpTarget::BIDIRECTIONAL);
1282 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00001283
Steve Block3ce2e202009-11-05 08:53:23 +00001284 ConditionAnalysis info = AnalyzeCondition(node->cond());
1285 // Label the top of the loop for the backward jump if necessary.
1286 switch (info) {
1287 case ALWAYS_TRUE:
1288 // Use the continue target.
1289 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1290 node->continue_target()->Bind();
1291 break;
1292 case ALWAYS_FALSE:
1293 // No need to label it.
1294 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1295 break;
1296 case DONT_KNOW:
1297 // Continue is the test, so use the backward body target.
1298 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1299 body.Bind();
1300 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001301 }
1302
Steve Block3ce2e202009-11-05 08:53:23 +00001303 CheckStack(); // TODO(1222600): ignore if body contains calls.
1304 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00001305
Steve Block3ce2e202009-11-05 08:53:23 +00001306 // Compile the test.
1307 switch (info) {
1308 case ALWAYS_TRUE:
1309 // If control flow can fall off the end of the body, jump back
1310 // to the top and bind the break target at the exit.
1311 if (has_valid_frame()) {
1312 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00001313 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001314 if (node->break_target()->is_linked()) {
1315 node->break_target()->Bind();
1316 }
1317 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001318 case ALWAYS_FALSE:
1319 // We may have had continues or breaks in the body.
1320 if (node->continue_target()->is_linked()) {
1321 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001322 }
Steve Block3ce2e202009-11-05 08:53:23 +00001323 if (node->break_target()->is_linked()) {
1324 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001325 }
Steve Block3ce2e202009-11-05 08:53:23 +00001326 break;
1327 case DONT_KNOW:
1328 // We have to compile the test expression if it can be reached by
1329 // control flow falling out of the body or via continue.
1330 if (node->continue_target()->is_linked()) {
1331 node->continue_target()->Bind();
1332 }
1333 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001334 Comment cmnt(masm_, "[ DoWhileCondition");
1335 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00001336 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001337 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001338 }
Steve Block3ce2e202009-11-05 08:53:23 +00001339 if (node->break_target()->is_linked()) {
1340 node->break_target()->Bind();
1341 }
1342 break;
1343 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001344
Steve Block3ce2e202009-11-05 08:53:23 +00001345 DecrementLoopNesting();
1346 node->continue_target()->Unuse();
1347 node->break_target()->Unuse();
1348}
Steve Blocka7e24c12009-10-30 11:49:00 +00001349
Steve Block3ce2e202009-11-05 08:53:23 +00001350
1351void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
1352 ASSERT(!in_spilled_code());
1353 Comment cmnt(masm_, "[ WhileStatement");
1354 CodeForStatementPosition(node);
1355
1356 // If the condition is always false and has no side effects, we do not
1357 // need to compile anything.
1358 ConditionAnalysis info = AnalyzeCondition(node->cond());
1359 if (info == ALWAYS_FALSE) return;
1360
1361 // Do not duplicate conditions that may have function literal
1362 // subexpressions. This can cause us to compile the function literal
1363 // twice.
1364 bool test_at_bottom = !node->may_have_function_literal();
1365 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1366 IncrementLoopNesting();
1367 JumpTarget body;
1368 if (test_at_bottom) {
1369 body.set_direction(JumpTarget::BIDIRECTIONAL);
1370 }
1371
1372 // Based on the condition analysis, compile the test as necessary.
1373 switch (info) {
1374 case ALWAYS_TRUE:
1375 // We will not compile the test expression. Label the top of the
1376 // loop with the continue target.
1377 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1378 node->continue_target()->Bind();
1379 break;
1380 case DONT_KNOW: {
1381 if (test_at_bottom) {
1382 // Continue is the test at the bottom, no need to label the test
1383 // at the top. The body is a backward target.
1384 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1385 } else {
1386 // Label the test at the top as the continue target. The body
1387 // is a forward-only target.
1388 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1389 node->continue_target()->Bind();
1390 }
1391 // Compile the test with the body as the true target and preferred
1392 // fall-through and with the break target as the false target.
1393 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00001394 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001395
1396 if (dest.false_was_fall_through()) {
1397 // If we got the break target as fall-through, the test may have
1398 // been unconditionally false (if there are no jumps to the
1399 // body).
1400 if (!body.is_linked()) {
1401 DecrementLoopNesting();
1402 return;
1403 }
1404
1405 // Otherwise, jump around the body on the fall through and then
1406 // bind the body target.
1407 node->break_target()->Unuse();
1408 node->break_target()->Jump();
1409 body.Bind();
1410 }
1411 break;
1412 }
1413 case ALWAYS_FALSE:
1414 UNREACHABLE();
1415 break;
1416 }
1417
1418 CheckStack(); // TODO(1222600): ignore if body contains calls.
1419 Visit(node->body());
1420
1421 // Based on the condition analysis, compile the backward jump as
1422 // necessary.
1423 switch (info) {
1424 case ALWAYS_TRUE:
1425 // The loop body has been labeled with the continue target.
1426 if (has_valid_frame()) {
1427 node->continue_target()->Jump();
1428 }
1429 break;
1430 case DONT_KNOW:
1431 if (test_at_bottom) {
1432 // If we have chosen to recompile the test at the bottom,
1433 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00001434 if (node->continue_target()->is_linked()) {
1435 node->continue_target()->Bind();
1436 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001437 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001438 // The break target is the fall-through (body is a backward
1439 // jump from here and thus an invalid fall-through).
1440 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001441 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001442 }
1443 } else {
1444 // If we have chosen not to recompile the test at the
1445 // bottom, jump back to the one at the top.
1446 if (has_valid_frame()) {
1447 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00001448 }
1449 }
Steve Block3ce2e202009-11-05 08:53:23 +00001450 break;
1451 case ALWAYS_FALSE:
1452 UNREACHABLE();
1453 break;
1454 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001455
Steve Block3ce2e202009-11-05 08:53:23 +00001456 // The break target may be already bound (by the condition), or there
1457 // may not be a valid frame. Bind it only if needed.
1458 if (node->break_target()->is_linked()) {
1459 node->break_target()->Bind();
1460 }
1461 DecrementLoopNesting();
1462}
1463
1464
1465void CodeGenerator::VisitForStatement(ForStatement* node) {
1466 ASSERT(!in_spilled_code());
1467 Comment cmnt(masm_, "[ ForStatement");
1468 CodeForStatementPosition(node);
1469
1470 // Compile the init expression if present.
1471 if (node->init() != NULL) {
1472 Visit(node->init());
1473 }
1474
1475 // If the condition is always false and has no side effects, we do not
1476 // need to compile anything else.
1477 ConditionAnalysis info = AnalyzeCondition(node->cond());
1478 if (info == ALWAYS_FALSE) return;
1479
1480 // Do not duplicate conditions that may have function literal
1481 // subexpressions. This can cause us to compile the function literal
1482 // twice.
1483 bool test_at_bottom = !node->may_have_function_literal();
1484 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1485 IncrementLoopNesting();
1486
1487 // Target for backward edge if no test at the bottom, otherwise
1488 // unused.
1489 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
1490
1491 // Target for backward edge if there is a test at the bottom,
1492 // otherwise used as target for test at the top.
1493 JumpTarget body;
1494 if (test_at_bottom) {
1495 body.set_direction(JumpTarget::BIDIRECTIONAL);
1496 }
1497
1498 // Based on the condition analysis, compile the test as necessary.
1499 switch (info) {
1500 case ALWAYS_TRUE:
1501 // We will not compile the test expression. Label the top of the
1502 // loop.
1503 if (node->next() == NULL) {
1504 // Use the continue target if there is no update expression.
1505 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1506 node->continue_target()->Bind();
1507 } else {
1508 // Otherwise use the backward loop target.
1509 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1510 loop.Bind();
1511 }
1512 break;
1513 case DONT_KNOW: {
1514 if (test_at_bottom) {
1515 // Continue is either the update expression or the test at the
1516 // bottom, no need to label the test at the top.
1517 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1518 } else if (node->next() == NULL) {
1519 // We are not recompiling the test at the bottom and there is no
1520 // update expression.
1521 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1522 node->continue_target()->Bind();
1523 } else {
1524 // We are not recompiling the test at the bottom and there is an
1525 // update expression.
1526 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1527 loop.Bind();
1528 }
1529
1530 // Compile the test with the body as the true target and preferred
1531 // fall-through and with the break target as the false target.
1532 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00001533 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001534
1535 if (dest.false_was_fall_through()) {
1536 // If we got the break target as fall-through, the test may have
1537 // been unconditionally false (if there are no jumps to the
1538 // body).
1539 if (!body.is_linked()) {
1540 DecrementLoopNesting();
1541 return;
1542 }
1543
1544 // Otherwise, jump around the body on the fall through and then
1545 // bind the body target.
1546 node->break_target()->Unuse();
1547 node->break_target()->Jump();
1548 body.Bind();
1549 }
1550 break;
1551 }
1552 case ALWAYS_FALSE:
1553 UNREACHABLE();
1554 break;
1555 }
1556
1557 CheckStack(); // TODO(1222600): ignore if body contains calls.
1558 Visit(node->body());
1559
1560 // If there is an update expression, compile it if necessary.
1561 if (node->next() != NULL) {
1562 if (node->continue_target()->is_linked()) {
1563 node->continue_target()->Bind();
1564 }
1565
1566 // Control can reach the update by falling out of the body or by a
1567 // continue.
1568 if (has_valid_frame()) {
1569 // Record the source position of the statement as this code which
1570 // is after the code for the body actually belongs to the loop
1571 // statement and not the body.
1572 CodeForStatementPosition(node);
1573 Visit(node->next());
1574 }
1575 }
1576
1577 // Based on the condition analysis, compile the backward jump as
1578 // necessary.
1579 switch (info) {
1580 case ALWAYS_TRUE:
1581 if (has_valid_frame()) {
1582 if (node->next() == NULL) {
1583 node->continue_target()->Jump();
1584 } else {
1585 loop.Jump();
1586 }
1587 }
1588 break;
1589 case DONT_KNOW:
1590 if (test_at_bottom) {
1591 if (node->continue_target()->is_linked()) {
1592 // We can have dangling jumps to the continue target if there
1593 // was no update expression.
1594 node->continue_target()->Bind();
1595 }
1596 // Control can reach the test at the bottom by falling out of
1597 // the body, by a continue in the body, or from the update
1598 // expression.
1599 if (has_valid_frame()) {
1600 // The break target is the fall-through (body is a backward
1601 // jump from here).
1602 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001603 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001604 }
1605 } else {
1606 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00001607 if (has_valid_frame()) {
1608 if (node->next() == NULL) {
1609 node->continue_target()->Jump();
1610 } else {
1611 loop.Jump();
1612 }
1613 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001614 }
1615 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001616 case ALWAYS_FALSE:
1617 UNREACHABLE();
1618 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001619 }
1620
Steve Block3ce2e202009-11-05 08:53:23 +00001621 // The break target may be already bound (by the condition), or there
1622 // may not be a valid frame. Bind it only if needed.
1623 if (node->break_target()->is_linked()) {
1624 node->break_target()->Bind();
1625 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001626 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00001627}
1628
1629
1630void CodeGenerator::VisitForInStatement(ForInStatement* node) {
1631 ASSERT(!in_spilled_code());
1632 VirtualFrame::SpilledScope spilled_scope;
1633 Comment cmnt(masm_, "[ ForInStatement");
1634 CodeForStatementPosition(node);
1635
1636 JumpTarget primitive;
1637 JumpTarget jsobject;
1638 JumpTarget fixed_array;
1639 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
1640 JumpTarget end_del_check;
1641 JumpTarget exit;
1642
1643 // Get the object to enumerate over (converted to JSObject).
1644 LoadAndSpill(node->enumerable());
1645
1646 // Both SpiderMonkey and kjs ignore null and undefined in contrast
1647 // to the specification. 12.6.4 mandates a call to ToObject.
1648 frame_->EmitPop(rax);
1649
1650 // rax: value to be iterated over
1651 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1652 exit.Branch(equal);
1653 __ CompareRoot(rax, Heap::kNullValueRootIndex);
1654 exit.Branch(equal);
1655
1656 // Stack layout in body:
1657 // [iteration counter (smi)] <- slot 0
1658 // [length of array] <- slot 1
1659 // [FixedArray] <- slot 2
1660 // [Map or 0] <- slot 3
1661 // [Object] <- slot 4
1662
1663 // Check if enumerable is already a JSObject
1664 // rax: value to be iterated over
1665 Condition is_smi = masm_->CheckSmi(rax);
1666 primitive.Branch(is_smi);
1667 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
1668 jsobject.Branch(above_equal);
1669
1670 primitive.Bind();
1671 frame_->EmitPush(rax);
1672 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
1673 // function call returns the value in rax, which is where we want it below
1674
1675 jsobject.Bind();
1676 // Get the set of properties (as a FixedArray or Map).
1677 // rax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00001678 frame_->EmitPush(rax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00001679
Steve Blockd0582a62009-12-15 09:54:21 +00001680
1681 // Check cache validity in generated code. This is a fast case for
1682 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1683 // guarantee cache validity, call the runtime system to check cache
1684 // validity or get the property names in a fixed array.
1685 JumpTarget call_runtime;
1686 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
1687 JumpTarget check_prototype;
1688 JumpTarget use_cache;
1689 __ movq(rcx, rax);
1690 loop.Bind();
1691 // Check that there are no elements.
1692 __ movq(rdx, FieldOperand(rcx, JSObject::kElementsOffset));
1693 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
1694 call_runtime.Branch(not_equal);
1695 // Check that instance descriptors are not empty so that we can
1696 // check for an enum cache. Leave the map in ebx for the subsequent
1697 // prototype load.
1698 __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
1699 __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
1700 __ CompareRoot(rdx, Heap::kEmptyDescriptorArrayRootIndex);
1701 call_runtime.Branch(equal);
1702 // Check that there in an enum cache in the non-empty instance
1703 // descriptors. This is the case if the next enumeration index
1704 // field does not contain a smi.
1705 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
1706 is_smi = masm_->CheckSmi(rdx);
1707 call_runtime.Branch(is_smi);
1708 // For all objects but the receiver, check that the cache is empty.
1709 __ cmpq(rcx, rax);
1710 check_prototype.Branch(equal);
1711 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1712 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
1713 call_runtime.Branch(not_equal);
1714 check_prototype.Bind();
1715 // Load the prototype from the map and loop if non-null.
1716 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
1717 __ CompareRoot(rcx, Heap::kNullValueRootIndex);
1718 loop.Branch(not_equal);
1719 // The enum cache is valid. Load the map of the object being
1720 // iterated over and use the cache for the iteration.
1721 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
1722 use_cache.Jump();
1723
1724 call_runtime.Bind();
1725 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00001726 frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call
1727 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1728
1729 // If we got a Map, we can do a fast modification check.
1730 // Otherwise, we got a FixedArray, and we have to do a slow check.
1731 // rax: map or fixed array (result from call to
1732 // Runtime::kGetPropertyNamesFast)
1733 __ movq(rdx, rax);
1734 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1735 __ CompareRoot(rcx, Heap::kMetaMapRootIndex);
1736 fixed_array.Branch(not_equal);
1737
Steve Blockd0582a62009-12-15 09:54:21 +00001738 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001739 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00001740 // rax: map (either the result from a call to
1741 // Runtime::kGetPropertyNamesFast or has been fetched directly from
1742 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00001743 __ movq(rcx, rax);
1744 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset));
1745 // Get the bridge array held in the enumeration index field.
1746 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
1747 // Get the cache from the bridge array.
1748 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1749
1750 frame_->EmitPush(rax); // <- slot 3
1751 frame_->EmitPush(rdx); // <- slot 2
1752 __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
1753 __ Integer32ToSmi(rax, rax);
1754 frame_->EmitPush(rax); // <- slot 1
Steve Block3ce2e202009-11-05 08:53:23 +00001755 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
Steve Blocka7e24c12009-10-30 11:49:00 +00001756 entry.Jump();
1757
1758 fixed_array.Bind();
1759 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
Steve Block3ce2e202009-11-05 08:53:23 +00001760 frame_->EmitPush(Smi::FromInt(0)); // <- slot 3
Steve Blocka7e24c12009-10-30 11:49:00 +00001761 frame_->EmitPush(rax); // <- slot 2
1762
1763 // Push the length of the array and the initial index onto the stack.
1764 __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1765 __ Integer32ToSmi(rax, rax);
1766 frame_->EmitPush(rax); // <- slot 1
Steve Block3ce2e202009-11-05 08:53:23 +00001767 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
Steve Blocka7e24c12009-10-30 11:49:00 +00001768
1769 // Condition.
1770 entry.Bind();
1771 // Grab the current frame's height for the break and continue
1772 // targets only after all the state is pushed on the frame.
1773 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1774 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1775
1776 __ movq(rax, frame_->ElementAt(0)); // load the current count
Steve Block3ce2e202009-11-05 08:53:23 +00001777 __ SmiCompare(frame_->ElementAt(1), rax); // compare to the array length
1778 node->break_target()->Branch(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00001779
1780 // Get the i'th entry of the array.
1781 __ movq(rdx, frame_->ElementAt(2));
1782 SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2);
1783 __ movq(rbx,
1784 FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize));
1785
1786 // Get the expected map from the stack or a zero map in the
1787 // permanent slow case rax: current iteration count rbx: i'th entry
1788 // of the enum cache
1789 __ movq(rdx, frame_->ElementAt(3));
1790 // Check if the expected map still matches that of the enumerable.
1791 // If not, we have to filter the key.
1792 // rax: current iteration count
1793 // rbx: i'th entry of the enum cache
1794 // rdx: expected map value
1795 __ movq(rcx, frame_->ElementAt(4));
1796 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
1797 __ cmpq(rcx, rdx);
1798 end_del_check.Branch(equal);
1799
1800 // Convert the entry to a string (or null if it isn't a property anymore).
1801 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
1802 frame_->EmitPush(rbx); // push entry
1803 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
1804 __ movq(rbx, rax);
1805
1806 // If the property has been removed while iterating, we just skip it.
1807 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1808 node->continue_target()->Branch(equal);
1809
1810 end_del_check.Bind();
1811 // Store the entry in the 'each' expression and take another spin in the
1812 // loop. rdx: i'th entry of the enum cache (or string there of)
1813 frame_->EmitPush(rbx);
1814 { Reference each(this, node->each());
1815 // Loading a reference may leave the frame in an unspilled state.
1816 frame_->SpillAll();
1817 if (!each.is_illegal()) {
1818 if (each.size() > 0) {
1819 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001820 }
1821 // If the reference was to a slot we rely on the convenient property
1822 // that it doesn't matter whether a value (eg, ebx pushed above) is
1823 // right on top of or right underneath a zero-sized reference.
1824 each.SetValue(NOT_CONST_INIT);
1825 if (each.size() > 0) {
1826 // It's safe to pop the value lying on top of the reference before
1827 // unloading the reference itself (which preserves the top of stack,
1828 // ie, now the topmost value of the non-zero sized reference), since
1829 // we will discard the top of stack after unloading the reference
1830 // anyway.
1831 frame_->Drop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001832 }
1833 }
1834 }
1835 // Unloading a reference may leave the frame in an unspilled state.
1836 frame_->SpillAll();
1837
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001838 // Discard the i'th entry pushed above or else the remainder of the
1839 // reference, whichever is currently on top of the stack.
1840 frame_->Drop();
1841
Steve Blocka7e24c12009-10-30 11:49:00 +00001842 // Body.
1843 CheckStack(); // TODO(1222600): ignore if body contains calls.
1844 VisitAndSpill(node->body());
1845
1846 // Next. Reestablish a spilled frame in case we are coming here via
1847 // a continue in the body.
1848 node->continue_target()->Bind();
1849 frame_->SpillAll();
1850 frame_->EmitPop(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001851 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001852 frame_->EmitPush(rax);
1853 entry.Jump();
1854
1855 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
1856 // any frame.
1857 node->break_target()->Bind();
1858 frame_->Drop(5);
1859
1860 // Exit.
1861 exit.Bind();
1862
1863 node->continue_target()->Unuse();
1864 node->break_target()->Unuse();
1865}
1866
Steve Block3ce2e202009-11-05 08:53:23 +00001867void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001868 ASSERT(!in_spilled_code());
1869 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00001870 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00001871 CodeForStatementPosition(node);
1872
1873 JumpTarget try_block;
1874 JumpTarget exit;
1875
1876 try_block.Call();
1877 // --- Catch block ---
1878 frame_->EmitPush(rax);
1879
1880 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00001881 Variable* catch_var = node->catch_var()->var();
1882 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
1883 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00001884
1885 // Remove the exception from the stack.
1886 frame_->Drop();
1887
1888 VisitStatementsAndSpill(node->catch_block()->statements());
1889 if (has_valid_frame()) {
1890 exit.Jump();
1891 }
1892
1893
1894 // --- Try block ---
1895 try_block.Bind();
1896
1897 frame_->PushTryHandler(TRY_CATCH_HANDLER);
1898 int handler_height = frame_->height();
1899
1900 // Shadow the jump targets for all escapes from the try block, including
1901 // returns. During shadowing, the original target is hidden as the
1902 // ShadowTarget and operations on the original actually affect the
1903 // shadowing target.
1904 //
1905 // We should probably try to unify the escaping targets and the return
1906 // target.
1907 int nof_escapes = node->escaping_targets()->length();
1908 List<ShadowTarget*> shadows(1 + nof_escapes);
1909
1910 // Add the shadow target for the function return.
1911 static const int kReturnShadowIndex = 0;
1912 shadows.Add(new ShadowTarget(&function_return_));
1913 bool function_return_was_shadowed = function_return_is_shadowed_;
1914 function_return_is_shadowed_ = true;
1915 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
1916
1917 // Add the remaining shadow targets.
1918 for (int i = 0; i < nof_escapes; i++) {
1919 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
1920 }
1921
1922 // Generate code for the statements in the try block.
1923 VisitStatementsAndSpill(node->try_block()->statements());
1924
1925 // Stop the introduced shadowing and count the number of required unlinks.
1926 // After shadowing stops, the original targets are unshadowed and the
1927 // ShadowTargets represent the formerly shadowing targets.
1928 bool has_unlinks = false;
1929 for (int i = 0; i < shadows.length(); i++) {
1930 shadows[i]->StopShadowing();
1931 has_unlinks = has_unlinks || shadows[i]->is_linked();
1932 }
1933 function_return_is_shadowed_ = function_return_was_shadowed;
1934
1935 // Get an external reference to the handler address.
1936 ExternalReference handler_address(Top::k_handler_address);
1937
1938 // Make sure that there's nothing left on the stack above the
1939 // handler structure.
1940 if (FLAG_debug_code) {
1941 __ movq(kScratchRegister, handler_address);
1942 __ cmpq(rsp, Operand(kScratchRegister, 0));
1943 __ Assert(equal, "stack pointer should point to top handler");
1944 }
1945
1946 // If we can fall off the end of the try block, unlink from try chain.
1947 if (has_valid_frame()) {
1948 // The next handler address is on top of the frame. Unlink from
1949 // the handler list and drop the rest of this handler from the
1950 // frame.
1951 ASSERT(StackHandlerConstants::kNextOffset == 0);
1952 __ movq(kScratchRegister, handler_address);
1953 frame_->EmitPop(Operand(kScratchRegister, 0));
1954 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
1955 if (has_unlinks) {
1956 exit.Jump();
1957 }
1958 }
1959
1960 // Generate unlink code for the (formerly) shadowing targets that
1961 // have been jumped to. Deallocate each shadow target.
1962 Result return_value;
1963 for (int i = 0; i < shadows.length(); i++) {
1964 if (shadows[i]->is_linked()) {
1965 // Unlink from try chain; be careful not to destroy the TOS if
1966 // there is one.
1967 if (i == kReturnShadowIndex) {
1968 shadows[i]->Bind(&return_value);
1969 return_value.ToRegister(rax);
1970 } else {
1971 shadows[i]->Bind();
1972 }
1973 // Because we can be jumping here (to spilled code) from
1974 // unspilled code, we need to reestablish a spilled frame at
1975 // this block.
1976 frame_->SpillAll();
1977
1978 // Reload sp from the top handler, because some statements that we
1979 // break from (eg, for...in) may have left stuff on the stack.
1980 __ movq(kScratchRegister, handler_address);
1981 __ movq(rsp, Operand(kScratchRegister, 0));
1982 frame_->Forget(frame_->height() - handler_height);
1983
1984 ASSERT(StackHandlerConstants::kNextOffset == 0);
1985 __ movq(kScratchRegister, handler_address);
1986 frame_->EmitPop(Operand(kScratchRegister, 0));
1987 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
1988
1989 if (i == kReturnShadowIndex) {
1990 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
1991 shadows[i]->other_target()->Jump(&return_value);
1992 } else {
1993 shadows[i]->other_target()->Jump();
1994 }
1995 }
1996 }
1997
1998 exit.Bind();
1999}
2000
2001
Steve Block3ce2e202009-11-05 08:53:23 +00002002void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002003 ASSERT(!in_spilled_code());
2004 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00002005 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002006 CodeForStatementPosition(node);
2007
2008 // State: Used to keep track of reason for entering the finally
2009 // block. Should probably be extended to hold information for
2010 // break/continue from within the try block.
2011 enum { FALLING, THROWING, JUMPING };
2012
2013 JumpTarget try_block;
2014 JumpTarget finally_block;
2015
2016 try_block.Call();
2017
2018 frame_->EmitPush(rax);
2019 // In case of thrown exceptions, this is where we continue.
Steve Block3ce2e202009-11-05 08:53:23 +00002020 __ Move(rcx, Smi::FromInt(THROWING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002021 finally_block.Jump();
2022
2023 // --- Try block ---
2024 try_block.Bind();
2025
2026 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2027 int handler_height = frame_->height();
2028
2029 // Shadow the jump targets for all escapes from the try block, including
2030 // returns. During shadowing, the original target is hidden as the
2031 // ShadowTarget and operations on the original actually affect the
2032 // shadowing target.
2033 //
2034 // We should probably try to unify the escaping targets and the return
2035 // target.
2036 int nof_escapes = node->escaping_targets()->length();
2037 List<ShadowTarget*> shadows(1 + nof_escapes);
2038
2039 // Add the shadow target for the function return.
2040 static const int kReturnShadowIndex = 0;
2041 shadows.Add(new ShadowTarget(&function_return_));
2042 bool function_return_was_shadowed = function_return_is_shadowed_;
2043 function_return_is_shadowed_ = true;
2044 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2045
2046 // Add the remaining shadow targets.
2047 for (int i = 0; i < nof_escapes; i++) {
2048 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2049 }
2050
2051 // Generate code for the statements in the try block.
2052 VisitStatementsAndSpill(node->try_block()->statements());
2053
2054 // Stop the introduced shadowing and count the number of required unlinks.
2055 // After shadowing stops, the original targets are unshadowed and the
2056 // ShadowTargets represent the formerly shadowing targets.
2057 int nof_unlinks = 0;
2058 for (int i = 0; i < shadows.length(); i++) {
2059 shadows[i]->StopShadowing();
2060 if (shadows[i]->is_linked()) nof_unlinks++;
2061 }
2062 function_return_is_shadowed_ = function_return_was_shadowed;
2063
2064 // Get an external reference to the handler address.
2065 ExternalReference handler_address(Top::k_handler_address);
2066
2067 // If we can fall off the end of the try block, unlink from the try
2068 // chain and set the state on the frame to FALLING.
2069 if (has_valid_frame()) {
2070 // The next handler address is on top of the frame.
2071 ASSERT(StackHandlerConstants::kNextOffset == 0);
2072 __ movq(kScratchRegister, handler_address);
2073 frame_->EmitPop(Operand(kScratchRegister, 0));
2074 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2075
2076 // Fake a top of stack value (unneeded when FALLING) and set the
2077 // state in ecx, then jump around the unlink blocks if any.
2078 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
Steve Block3ce2e202009-11-05 08:53:23 +00002079 __ Move(rcx, Smi::FromInt(FALLING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002080 if (nof_unlinks > 0) {
2081 finally_block.Jump();
2082 }
2083 }
2084
2085 // Generate code to unlink and set the state for the (formerly)
2086 // shadowing targets that have been jumped to.
2087 for (int i = 0; i < shadows.length(); i++) {
2088 if (shadows[i]->is_linked()) {
2089 // If we have come from the shadowed return, the return value is
2090 // on the virtual frame. We must preserve it until it is
2091 // pushed.
2092 if (i == kReturnShadowIndex) {
2093 Result return_value;
2094 shadows[i]->Bind(&return_value);
2095 return_value.ToRegister(rax);
2096 } else {
2097 shadows[i]->Bind();
2098 }
2099 // Because we can be jumping here (to spilled code) from
2100 // unspilled code, we need to reestablish a spilled frame at
2101 // this block.
2102 frame_->SpillAll();
2103
2104 // Reload sp from the top handler, because some statements that
2105 // we break from (eg, for...in) may have left stuff on the
2106 // stack.
2107 __ movq(kScratchRegister, handler_address);
2108 __ movq(rsp, Operand(kScratchRegister, 0));
2109 frame_->Forget(frame_->height() - handler_height);
2110
2111 // Unlink this handler and drop it from the frame.
2112 ASSERT(StackHandlerConstants::kNextOffset == 0);
2113 __ movq(kScratchRegister, handler_address);
2114 frame_->EmitPop(Operand(kScratchRegister, 0));
2115 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2116
2117 if (i == kReturnShadowIndex) {
2118 // If this target shadowed the function return, materialize
2119 // the return value on the stack.
2120 frame_->EmitPush(rax);
2121 } else {
2122 // Fake TOS for targets that shadowed breaks and continues.
2123 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
2124 }
Steve Block3ce2e202009-11-05 08:53:23 +00002125 __ Move(rcx, Smi::FromInt(JUMPING + i));
Steve Blocka7e24c12009-10-30 11:49:00 +00002126 if (--nof_unlinks > 0) {
2127 // If this is not the last unlink block, jump around the next.
2128 finally_block.Jump();
2129 }
2130 }
2131 }
2132
2133 // --- Finally block ---
2134 finally_block.Bind();
2135
2136 // Push the state on the stack.
2137 frame_->EmitPush(rcx);
2138
2139 // We keep two elements on the stack - the (possibly faked) result
2140 // and the state - while evaluating the finally block.
2141 //
2142 // Generate code for the statements in the finally block.
2143 VisitStatementsAndSpill(node->finally_block()->statements());
2144
2145 if (has_valid_frame()) {
2146 // Restore state and return value or faked TOS.
2147 frame_->EmitPop(rcx);
2148 frame_->EmitPop(rax);
2149 }
2150
2151 // Generate code to jump to the right destination for all used
2152 // formerly shadowing targets. Deallocate each shadow target.
2153 for (int i = 0; i < shadows.length(); i++) {
2154 if (has_valid_frame() && shadows[i]->is_bound()) {
2155 BreakTarget* original = shadows[i]->other_target();
Steve Block3ce2e202009-11-05 08:53:23 +00002156 __ SmiCompare(rcx, Smi::FromInt(JUMPING + i));
Steve Blocka7e24c12009-10-30 11:49:00 +00002157 if (i == kReturnShadowIndex) {
2158 // The return value is (already) in rax.
2159 Result return_value = allocator_->Allocate(rax);
2160 ASSERT(return_value.is_valid());
2161 if (function_return_is_shadowed_) {
2162 original->Branch(equal, &return_value);
2163 } else {
2164 // Branch around the preparation for return which may emit
2165 // code.
2166 JumpTarget skip;
2167 skip.Branch(not_equal);
2168 frame_->PrepareForReturn();
2169 original->Jump(&return_value);
2170 skip.Bind();
2171 }
2172 } else {
2173 original->Branch(equal);
2174 }
2175 }
2176 }
2177
2178 if (has_valid_frame()) {
2179 // Check if we need to rethrow the exception.
2180 JumpTarget exit;
Steve Block3ce2e202009-11-05 08:53:23 +00002181 __ SmiCompare(rcx, Smi::FromInt(THROWING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002182 exit.Branch(not_equal);
2183
2184 // Rethrow exception.
2185 frame_->EmitPush(rax); // undo pop from above
2186 frame_->CallRuntime(Runtime::kReThrow, 1);
2187
2188 // Done.
2189 exit.Bind();
2190 }
2191}
2192
2193
2194void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2195 ASSERT(!in_spilled_code());
2196 Comment cmnt(masm_, "[ DebuggerStatement");
2197 CodeForStatementPosition(node);
2198#ifdef ENABLE_DEBUGGER_SUPPORT
2199 // Spill everything, even constants, to the frame.
2200 frame_->SpillAll();
2201 frame_->CallRuntime(Runtime::kDebugBreak, 0);
2202 // Ignore the return value.
2203#endif
2204}
2205
2206
2207void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
Leon Clarkee46be812010-01-19 14:06:41 +00002208 ASSERT(boilerplate->IsBoilerplate());
2209
Steve Blocka7e24c12009-10-30 11:49:00 +00002210 // The inevitable call will sync frame elements to memory anyway, so
2211 // we do it eagerly to allow us to push the arguments directly into
2212 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00002213 frame_->SyncRange(0, frame_->element_count() - 1);
2214
Leon Clarkee46be812010-01-19 14:06:41 +00002215 // Use the fast case closure allocation code that allocates in new
2216 // space for nested functions that don't need literals cloning.
2217 if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
2218 FastNewClosureStub stub;
2219 frame_->Push(boilerplate);
2220 Result answer = frame_->CallStub(&stub, 1);
2221 frame_->Push(&answer);
2222 } else {
2223 // Call the runtime to instantiate the function boilerplate
2224 // object.
2225 frame_->EmitPush(rsi);
2226 frame_->EmitPush(boilerplate);
2227 Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
2228 frame_->Push(&result);
2229 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002230}
2231
2232
2233void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2234 Comment cmnt(masm_, "[ FunctionLiteral");
2235
2236 // Build the function boilerplate and instantiate it.
Steve Blockd0582a62009-12-15 09:54:21 +00002237 Handle<JSFunction> boilerplate =
2238 Compiler::BuildBoilerplate(node, script_, this);
Steve Blocka7e24c12009-10-30 11:49:00 +00002239 // Check for stack-overflow exception.
2240 if (HasStackOverflow()) return;
2241 InstantiateBoilerplate(boilerplate);
2242}
2243
2244
2245void CodeGenerator::VisitFunctionBoilerplateLiteral(
2246 FunctionBoilerplateLiteral* node) {
2247 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral");
2248 InstantiateBoilerplate(node->boilerplate());
2249}
2250
2251
2252void CodeGenerator::VisitConditional(Conditional* node) {
2253 Comment cmnt(masm_, "[ Conditional");
2254 JumpTarget then;
2255 JumpTarget else_;
2256 JumpTarget exit;
2257 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002258 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002259
2260 if (dest.false_was_fall_through()) {
2261 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00002262 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002263
2264 if (then.is_linked()) {
2265 exit.Jump();
2266 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00002267 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002268 }
2269 } else {
2270 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00002271 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002272
2273 if (else_.is_linked()) {
2274 exit.Jump();
2275 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00002276 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002277 }
2278 }
2279
2280 exit.Bind();
2281}
2282
2283
2284void CodeGenerator::VisitSlot(Slot* node) {
2285 Comment cmnt(masm_, "[ Slot");
Steve Blockd0582a62009-12-15 09:54:21 +00002286 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00002287}
2288
2289
2290void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
2291 Comment cmnt(masm_, "[ VariableProxy");
2292 Variable* var = node->var();
2293 Expression* expr = var->rewrite();
2294 if (expr != NULL) {
2295 Visit(expr);
2296 } else {
2297 ASSERT(var->is_global());
2298 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00002299 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002300 }
2301}
2302
2303
2304void CodeGenerator::VisitLiteral(Literal* node) {
2305 Comment cmnt(masm_, "[ Literal");
2306 frame_->Push(node->handle());
2307}
2308
2309
2310// Materialize the regexp literal 'node' in the literals array
2311// 'literals' of the function. Leave the regexp boilerplate in
2312// 'boilerplate'.
2313class DeferredRegExpLiteral: public DeferredCode {
2314 public:
2315 DeferredRegExpLiteral(Register boilerplate,
2316 Register literals,
2317 RegExpLiteral* node)
2318 : boilerplate_(boilerplate), literals_(literals), node_(node) {
2319 set_comment("[ DeferredRegExpLiteral");
2320 }
2321
2322 void Generate();
2323
2324 private:
2325 Register boilerplate_;
2326 Register literals_;
2327 RegExpLiteral* node_;
2328};
2329
2330
2331void DeferredRegExpLiteral::Generate() {
2332 // Since the entry is undefined we call the runtime system to
2333 // compute the literal.
2334 // Literal array (0).
2335 __ push(literals_);
2336 // Literal index (1).
Steve Block3ce2e202009-11-05 08:53:23 +00002337 __ Push(Smi::FromInt(node_->literal_index()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002338 // RegExp pattern (2).
2339 __ Push(node_->pattern());
2340 // RegExp flags (3).
2341 __ Push(node_->flags());
2342 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
2343 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
2344}
2345
2346
2347void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
2348 Comment cmnt(masm_, "[ RegExp Literal");
2349
2350 // Retrieve the literals array and check the allocated entry. Begin
2351 // with a writable copy of the function of this activation in a
2352 // register.
2353 frame_->PushFunction();
2354 Result literals = frame_->Pop();
2355 literals.ToRegister();
2356 frame_->Spill(literals.reg());
2357
2358 // Load the literals array of the function.
2359 __ movq(literals.reg(),
2360 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
2361
2362 // Load the literal at the ast saved index.
2363 Result boilerplate = allocator_->Allocate();
2364 ASSERT(boilerplate.is_valid());
2365 int literal_offset =
2366 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2367 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
2368
2369 // Check whether we need to materialize the RegExp object. If so,
2370 // jump to the deferred code passing the literals array.
2371 DeferredRegExpLiteral* deferred =
2372 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
2373 __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
2374 deferred->Branch(equal);
2375 deferred->BindExit();
2376 literals.Unuse();
2377
2378 // Push the boilerplate object.
2379 frame_->Push(&boilerplate);
2380}
2381
2382
Steve Blocka7e24c12009-10-30 11:49:00 +00002383void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
2384 Comment cmnt(masm_, "[ ObjectLiteral");
2385
Leon Clarkee46be812010-01-19 14:06:41 +00002386 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00002387 // register.
2388 frame_->PushFunction();
2389 Result literals = frame_->Pop();
2390 literals.ToRegister();
2391 frame_->Spill(literals.reg());
2392
2393 // Load the literals array of the function.
2394 __ movq(literals.reg(),
2395 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00002396 // Literal array.
2397 frame_->Push(&literals);
2398 // Literal index.
2399 frame_->Push(Smi::FromInt(node->literal_index()));
2400 // Constant properties.
2401 frame_->Push(node->constant_properties());
2402 Result clone;
2403 if (node->depth() > 1) {
2404 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 3);
2405 } else {
2406 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002407 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002408 frame_->Push(&clone);
2409
2410 for (int i = 0; i < node->properties()->length(); i++) {
2411 ObjectLiteral::Property* property = node->properties()->at(i);
2412 switch (property->kind()) {
2413 case ObjectLiteral::Property::CONSTANT:
2414 break;
2415 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2416 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
2417 // else fall through.
2418 case ObjectLiteral::Property::COMPUTED: {
2419 Handle<Object> key(property->key()->handle());
2420 if (key->IsSymbol()) {
2421 // Duplicate the object as the IC receiver.
2422 frame_->Dup();
2423 Load(property->value());
2424 frame_->Push(key);
2425 Result ignored = frame_->CallStoreIC();
2426 // Drop the duplicated receiver and ignore the result.
2427 frame_->Drop();
2428 break;
2429 }
2430 // Fall through
2431 }
2432 case ObjectLiteral::Property::PROTOTYPE: {
2433 // Duplicate the object as an argument to the runtime call.
2434 frame_->Dup();
2435 Load(property->key());
2436 Load(property->value());
2437 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
2438 // Ignore the result.
2439 break;
2440 }
2441 case ObjectLiteral::Property::SETTER: {
2442 // Duplicate the object as an argument to the runtime call.
2443 frame_->Dup();
2444 Load(property->key());
2445 frame_->Push(Smi::FromInt(1));
2446 Load(property->value());
2447 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2448 // Ignore the result.
2449 break;
2450 }
2451 case ObjectLiteral::Property::GETTER: {
2452 // Duplicate the object as an argument to the runtime call.
2453 frame_->Dup();
2454 Load(property->key());
2455 frame_->Push(Smi::FromInt(0));
2456 Load(property->value());
2457 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2458 // Ignore the result.
2459 break;
2460 }
2461 default: UNREACHABLE();
2462 }
2463 }
2464}
2465
2466
Steve Blocka7e24c12009-10-30 11:49:00 +00002467void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
2468 Comment cmnt(masm_, "[ ArrayLiteral");
2469
Leon Clarkee46be812010-01-19 14:06:41 +00002470 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00002471 // register.
2472 frame_->PushFunction();
2473 Result literals = frame_->Pop();
2474 literals.ToRegister();
2475 frame_->Spill(literals.reg());
2476
2477 // Load the literals array of the function.
2478 __ movq(literals.reg(),
2479 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00002480 // Literal array.
2481 frame_->Push(&literals);
2482 // Literal index.
2483 frame_->Push(Smi::FromInt(node->literal_index()));
2484 // Constant elements.
2485 frame_->Push(node->constant_elements());
2486 Result clone;
2487 if (node->depth() > 1) {
2488 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
2489 } else {
2490 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002491 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002492 frame_->Push(&clone);
2493
2494 // Generate code to set the elements in the array that are not
2495 // literals.
2496 for (int i = 0; i < node->values()->length(); i++) {
2497 Expression* value = node->values()->at(i);
2498
2499 // If value is a literal the property value is already set in the
2500 // boilerplate object.
2501 if (value->AsLiteral() != NULL) continue;
2502 // If value is a materialized literal the property value is already set
2503 // in the boilerplate object if it is simple.
2504 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
2505
2506 // The property must be set by generated code.
2507 Load(value);
2508
2509 // Get the property value off the stack.
2510 Result prop_value = frame_->Pop();
2511 prop_value.ToRegister();
2512
2513 // Fetch the array literal while leaving a copy on the stack and
2514 // use it to get the elements array.
2515 frame_->Dup();
2516 Result elements = frame_->Pop();
2517 elements.ToRegister();
2518 frame_->Spill(elements.reg());
2519 // Get the elements FixedArray.
2520 __ movq(elements.reg(),
2521 FieldOperand(elements.reg(), JSObject::kElementsOffset));
2522
2523 // Write to the indexed properties array.
2524 int offset = i * kPointerSize + FixedArray::kHeaderSize;
2525 __ movq(FieldOperand(elements.reg(), offset), prop_value.reg());
2526
2527 // Update the write barrier for the array address.
2528 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
2529 Result scratch = allocator_->Allocate();
2530 ASSERT(scratch.is_valid());
2531 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
2532 }
2533}
2534
2535
2536void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
2537 ASSERT(!in_spilled_code());
2538 // Call runtime routine to allocate the catch extension object and
2539 // assign the exception value to the catch variable.
2540 Comment cmnt(masm_, "[ CatchExtensionObject");
2541 Load(node->key());
2542 Load(node->value());
2543 Result result =
2544 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
2545 frame_->Push(&result);
2546}
2547
2548
2549void CodeGenerator::VisitAssignment(Assignment* node) {
2550 Comment cmnt(masm_, "[ Assignment");
2551
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002552 { Reference target(this, node->target());
Steve Blocka7e24c12009-10-30 11:49:00 +00002553 if (target.is_illegal()) {
2554 // Fool the virtual frame into thinking that we left the assignment's
2555 // value on the frame.
2556 frame_->Push(Smi::FromInt(0));
2557 return;
2558 }
2559 Variable* var = node->target()->AsVariableProxy()->AsVariable();
2560
2561 if (node->starts_initialization_block()) {
2562 ASSERT(target.type() == Reference::NAMED ||
2563 target.type() == Reference::KEYED);
2564 // Change to slow case in the beginning of an initialization
2565 // block to avoid the quadratic behavior of repeatedly adding
2566 // fast properties.
2567
2568 // The receiver is the argument to the runtime call. It is the
2569 // first value pushed when the reference was loaded to the
2570 // frame.
2571 frame_->PushElementAt(target.size() - 1);
2572 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
2573 }
2574 if (node->op() == Token::ASSIGN ||
2575 node->op() == Token::INIT_VAR ||
2576 node->op() == Token::INIT_CONST) {
2577 Load(node->value());
2578
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002579 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002580 Literal* literal = node->value()->AsLiteral();
2581 bool overwrite_value =
2582 (node->value()->AsBinaryOperation() != NULL &&
2583 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
2584 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
2585 // There are two cases where the target is not read in the right hand
2586 // side, that are easy to test for: the right hand side is a literal,
2587 // or the right hand side is a different variable. TakeValue invalidates
2588 // the target, with an implicit promise that it will be written to again
2589 // before it is read.
2590 if (literal != NULL || (right_var != NULL && right_var != var)) {
Steve Blockd0582a62009-12-15 09:54:21 +00002591 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002592 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00002593 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002594 }
2595 Load(node->value());
2596 GenericBinaryOperation(node->binary_op(),
2597 node->type(),
2598 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
2599 }
2600
2601 if (var != NULL &&
2602 var->mode() == Variable::CONST &&
2603 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
2604 // Assignment ignored - leave the value on the stack.
2605 } else {
2606 CodeForSourcePosition(node->position());
2607 if (node->op() == Token::INIT_CONST) {
2608 // Dynamic constant initializations must use the function context
2609 // and initialize the actual constant declared. Dynamic variable
2610 // initializations are simply assignments and use SetValue.
2611 target.SetValue(CONST_INIT);
2612 } else {
2613 target.SetValue(NOT_CONST_INIT);
2614 }
2615 if (node->ends_initialization_block()) {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002616 ASSERT(target.type() == Reference::NAMED ||
2617 target.type() == Reference::KEYED);
Steve Blocka7e24c12009-10-30 11:49:00 +00002618 // End of initialization block. Revert to fast case. The
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002619 // argument to the runtime call is the receiver, which is the
2620 // first value pushed as part of the reference, which is below
2621 // the lhs value.
2622 frame_->PushElementAt(target.size());
Steve Blocka7e24c12009-10-30 11:49:00 +00002623 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
2624 }
2625 }
2626 }
2627}
2628
2629
2630void CodeGenerator::VisitThrow(Throw* node) {
2631 Comment cmnt(masm_, "[ Throw");
2632 Load(node->exception());
2633 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
2634 frame_->Push(&result);
2635}
2636
2637
2638void CodeGenerator::VisitProperty(Property* node) {
2639 Comment cmnt(masm_, "[ Property");
2640 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00002641 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002642}
2643
2644
2645void CodeGenerator::VisitCall(Call* node) {
2646 Comment cmnt(masm_, "[ Call");
2647
2648 ZoneList<Expression*>* args = node->arguments();
2649
2650 // Check if the function is a variable or a property.
2651 Expression* function = node->expression();
2652 Variable* var = function->AsVariableProxy()->AsVariable();
2653 Property* property = function->AsProperty();
2654
2655 // ------------------------------------------------------------------------
2656 // Fast-case: Use inline caching.
2657 // ---
2658 // According to ECMA-262, section 11.2.3, page 44, the function to call
2659 // must be resolved after the arguments have been evaluated. The IC code
2660 // automatically handles this by loading the arguments before the function
2661 // is resolved in cache misses (this also holds for megamorphic calls).
2662 // ------------------------------------------------------------------------
2663
2664 if (var != NULL && var->is_possibly_eval()) {
2665 // ----------------------------------
2666 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
2667 // ----------------------------------
2668
2669 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2670 // resolve the function we need to call and the receiver of the
2671 // call. Then we call the resolved function using the given
2672 // arguments.
2673
2674 // Prepare the stack for the call to the resolved function.
2675 Load(function);
2676
2677 // Allocate a frame slot for the receiver.
2678 frame_->Push(Factory::undefined_value());
2679 int arg_count = args->length();
2680 for (int i = 0; i < arg_count; i++) {
2681 Load(args->at(i));
2682 }
2683
2684 // Prepare the stack for the call to ResolvePossiblyDirectEval.
2685 frame_->PushElementAt(arg_count + 1);
2686 if (arg_count > 0) {
2687 frame_->PushElementAt(arg_count);
2688 } else {
2689 frame_->Push(Factory::undefined_value());
2690 }
2691
Leon Clarkee46be812010-01-19 14:06:41 +00002692 // Push the receiver.
2693 frame_->PushParameterAt(-1);
2694
Steve Blocka7e24c12009-10-30 11:49:00 +00002695 // Resolve the call.
2696 Result result =
Leon Clarkee46be812010-01-19 14:06:41 +00002697 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002698
Leon Clarkee46be812010-01-19 14:06:41 +00002699 // The runtime call returns a pair of values in rax (function) and
2700 // rdx (receiver). Touch up the stack with the right values.
2701 Result receiver = allocator_->Allocate(rdx);
2702 frame_->SetElementAt(arg_count + 1, &result);
2703 frame_->SetElementAt(arg_count, &receiver);
2704 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002705
2706 // Call the function.
2707 CodeForSourcePosition(node->position());
2708 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00002709 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00002710 result = frame_->CallStub(&call_function, arg_count + 1);
2711
2712 // Restore the context and overwrite the function on the stack with
2713 // the result.
2714 frame_->RestoreContextRegister();
2715 frame_->SetElementAt(0, &result);
2716
2717 } else if (var != NULL && !var->is_this() && var->is_global()) {
2718 // ----------------------------------
2719 // JavaScript example: 'foo(1, 2, 3)' // foo is global
2720 // ----------------------------------
2721
2722 // Push the name of the function and the receiver onto the stack.
2723 frame_->Push(var->name());
2724
2725 // Pass the global object as the receiver and let the IC stub
2726 // patch the stack to use the global proxy as 'this' in the
2727 // invoked function.
2728 LoadGlobal();
2729
2730 // Load the arguments.
2731 int arg_count = args->length();
2732 for (int i = 0; i < arg_count; i++) {
2733 Load(args->at(i));
2734 }
2735
2736 // Call the IC initialization code.
2737 CodeForSourcePosition(node->position());
2738 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
2739 arg_count,
2740 loop_nesting());
2741 frame_->RestoreContextRegister();
2742 // Replace the function on the stack with the result.
2743 frame_->SetElementAt(0, &result);
2744
2745 } else if (var != NULL && var->slot() != NULL &&
2746 var->slot()->type() == Slot::LOOKUP) {
2747 // ----------------------------------
2748 // JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj
2749 // ----------------------------------
2750
2751 // Load the function from the context. Sync the frame so we can
2752 // push the arguments directly into place.
2753 frame_->SyncRange(0, frame_->element_count() - 1);
2754 frame_->EmitPush(rsi);
2755 frame_->EmitPush(var->name());
2756 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2757 // The runtime call returns a pair of values in rax and rdx. The
2758 // looked-up function is in rax and the receiver is in rdx. These
2759 // register references are not ref counted here. We spill them
2760 // eagerly since they are arguments to an inevitable call (and are
2761 // not sharable by the arguments).
2762 ASSERT(!allocator()->is_used(rax));
2763 frame_->EmitPush(rax);
2764
2765 // Load the receiver.
2766 ASSERT(!allocator()->is_used(rdx));
2767 frame_->EmitPush(rdx);
2768
2769 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002770 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002771
2772 } else if (property != NULL) {
2773 // Check if the key is a literal string.
2774 Literal* literal = property->key()->AsLiteral();
2775
2776 if (literal != NULL && literal->handle()->IsSymbol()) {
2777 // ------------------------------------------------------------------
2778 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
2779 // ------------------------------------------------------------------
2780
2781 Handle<String> name = Handle<String>::cast(literal->handle());
2782
2783 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
2784 name->IsEqualTo(CStrVector("apply")) &&
2785 args->length() == 2 &&
2786 args->at(1)->AsVariableProxy() != NULL &&
2787 args->at(1)->AsVariableProxy()->IsArguments()) {
2788 // Use the optimized Function.prototype.apply that avoids
2789 // allocating lazily allocated arguments objects.
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002790 CallApplyLazy(property,
Steve Blocka7e24c12009-10-30 11:49:00 +00002791 args->at(0),
2792 args->at(1)->AsVariableProxy(),
2793 node->position());
2794
2795 } else {
2796 // Push the name of the function and the receiver onto the stack.
2797 frame_->Push(name);
2798 Load(property->obj());
2799
2800 // Load the arguments.
2801 int arg_count = args->length();
2802 for (int i = 0; i < arg_count; i++) {
2803 Load(args->at(i));
2804 }
2805
2806 // Call the IC initialization code.
2807 CodeForSourcePosition(node->position());
2808 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET,
2809 arg_count,
2810 loop_nesting());
2811 frame_->RestoreContextRegister();
2812 // Replace the function on the stack with the result.
2813 frame_->SetElementAt(0, &result);
2814 }
2815
2816 } else {
2817 // -------------------------------------------
2818 // JavaScript example: 'array[index](1, 2, 3)'
2819 // -------------------------------------------
2820
2821 // Load the function to call from the property through a reference.
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002822 Reference ref(this, property);
2823 ref.GetValue();
2824
2825 // Pass receiver to called function.
Steve Blocka7e24c12009-10-30 11:49:00 +00002826 if (property->is_synthetic()) {
2827 // Use global object as receiver.
2828 LoadGlobalReceiver();
2829 } else {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002830 // The reference's size is non-negative.
2831 frame_->PushElementAt(ref.size());
Steve Blocka7e24c12009-10-30 11:49:00 +00002832 }
2833
2834 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002835 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002836 }
2837
2838 } else {
2839 // ----------------------------------
2840 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
2841 // ----------------------------------
2842
2843 // Load the function.
2844 Load(function);
2845
2846 // Pass the global proxy as the receiver.
2847 LoadGlobalReceiver();
2848
2849 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002850 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002851 }
2852}
2853
2854
2855void CodeGenerator::VisitCallNew(CallNew* node) {
2856 Comment cmnt(masm_, "[ CallNew");
2857
2858 // According to ECMA-262, section 11.2.2, page 44, the function
2859 // expression in new calls must be evaluated before the
2860 // arguments. This is different from ordinary calls, where the
2861 // actual function to call is resolved after the arguments have been
2862 // evaluated.
2863
2864 // Compute function to call and use the global object as the
2865 // receiver. There is no need to use the global proxy here because
2866 // it will always be replaced with a newly allocated object.
2867 Load(node->expression());
2868 LoadGlobal();
2869
2870 // Push the arguments ("left-to-right") on the stack.
2871 ZoneList<Expression*>* args = node->arguments();
2872 int arg_count = args->length();
2873 for (int i = 0; i < arg_count; i++) {
2874 Load(args->at(i));
2875 }
2876
2877 // Call the construct call builtin that handles allocation and
2878 // constructor invocation.
2879 CodeForSourcePosition(node->position());
2880 Result result = frame_->CallConstructor(arg_count);
2881 // Replace the function on the stack with the result.
2882 frame_->SetElementAt(0, &result);
2883}
2884
2885
2886void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
2887 if (CheckForInlineRuntimeCall(node)) {
2888 return;
2889 }
2890
2891 ZoneList<Expression*>* args = node->arguments();
2892 Comment cmnt(masm_, "[ CallRuntime");
2893 Runtime::Function* function = node->function();
2894
2895 if (function == NULL) {
2896 // Prepare stack for calling JS runtime function.
2897 frame_->Push(node->name());
2898 // Push the builtins object found in the current global object.
2899 Result temp = allocator()->Allocate();
2900 ASSERT(temp.is_valid());
2901 __ movq(temp.reg(), GlobalObject());
2902 __ movq(temp.reg(),
2903 FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
2904 frame_->Push(&temp);
2905 }
2906
2907 // Push the arguments ("left-to-right").
2908 int arg_count = args->length();
2909 for (int i = 0; i < arg_count; i++) {
2910 Load(args->at(i));
2911 }
2912
2913 if (function == NULL) {
2914 // Call the JS runtime function.
2915 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
2916 arg_count,
2917 loop_nesting_);
2918 frame_->RestoreContextRegister();
2919 frame_->SetElementAt(0, &answer);
2920 } else {
2921 // Call the C runtime function.
2922 Result answer = frame_->CallRuntime(function, arg_count);
2923 frame_->Push(&answer);
2924 }
2925}
2926
2927
2928void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002929 Comment cmnt(masm_, "[ UnaryOperation");
2930
2931 Token::Value op = node->op();
2932
2933 if (op == Token::NOT) {
2934 // Swap the true and false targets but keep the same actual label
2935 // as the fall through.
2936 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00002937 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002938 // Swap the labels back.
2939 destination()->Invert();
2940
2941 } else if (op == Token::DELETE) {
2942 Property* property = node->expression()->AsProperty();
2943 if (property != NULL) {
2944 Load(property->obj());
2945 Load(property->key());
2946 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
2947 frame_->Push(&answer);
2948 return;
2949 }
2950
2951 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
2952 if (variable != NULL) {
2953 Slot* slot = variable->slot();
2954 if (variable->is_global()) {
2955 LoadGlobal();
2956 frame_->Push(variable->name());
2957 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
2958 CALL_FUNCTION, 2);
2959 frame_->Push(&answer);
2960 return;
2961
2962 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
2963 // Call the runtime to look up the context holding the named
2964 // variable. Sync the virtual frame eagerly so we can push the
2965 // arguments directly into place.
2966 frame_->SyncRange(0, frame_->element_count() - 1);
2967 frame_->EmitPush(rsi);
2968 frame_->EmitPush(variable->name());
2969 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
2970 ASSERT(context.is_register());
2971 frame_->EmitPush(context.reg());
2972 context.Unuse();
2973 frame_->EmitPush(variable->name());
2974 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
2975 CALL_FUNCTION, 2);
2976 frame_->Push(&answer);
2977 return;
2978 }
2979
2980 // Default: Result of deleting non-global, not dynamically
2981 // introduced variables is false.
2982 frame_->Push(Factory::false_value());
2983
2984 } else {
2985 // Default: Result of deleting expressions is true.
2986 Load(node->expression()); // may have side-effects
2987 frame_->SetElementAt(0, Factory::true_value());
2988 }
2989
2990 } else if (op == Token::TYPEOF) {
2991 // Special case for loading the typeof expression; see comment on
2992 // LoadTypeofExpression().
2993 LoadTypeofExpression(node->expression());
2994 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
2995 frame_->Push(&answer);
2996
2997 } else if (op == Token::VOID) {
2998 Expression* expression = node->expression();
2999 if (expression && expression->AsLiteral() && (
3000 expression->AsLiteral()->IsTrue() ||
3001 expression->AsLiteral()->IsFalse() ||
3002 expression->AsLiteral()->handle()->IsNumber() ||
3003 expression->AsLiteral()->handle()->IsString() ||
3004 expression->AsLiteral()->handle()->IsJSRegExp() ||
3005 expression->AsLiteral()->IsNull())) {
3006 // Omit evaluating the value of the primitive literal.
3007 // It will be discarded anyway, and can have no side effect.
3008 frame_->Push(Factory::undefined_value());
3009 } else {
3010 Load(node->expression());
3011 frame_->SetElementAt(0, Factory::undefined_value());
3012 }
3013
3014 } else {
3015 Load(node->expression());
3016 switch (op) {
3017 case Token::NOT:
3018 case Token::DELETE:
3019 case Token::TYPEOF:
3020 UNREACHABLE(); // handled above
3021 break;
3022
3023 case Token::SUB: {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003024 bool overwrite =
3025 (node->expression()->AsBinaryOperation() != NULL &&
3026 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Leon Clarkee46be812010-01-19 14:06:41 +00003027 GenericUnaryOpStub stub(Token::SUB, overwrite);
Steve Blocka7e24c12009-10-30 11:49:00 +00003028 // TODO(1222589): remove dependency of TOS being cached inside stub
3029 Result operand = frame_->Pop();
3030 Result answer = frame_->CallStub(&stub, &operand);
3031 frame_->Push(&answer);
3032 break;
3033 }
3034
3035 case Token::BIT_NOT: {
3036 // Smi check.
3037 JumpTarget smi_label;
3038 JumpTarget continue_label;
3039 Result operand = frame_->Pop();
3040 operand.ToRegister();
3041
3042 Condition is_smi = masm_->CheckSmi(operand.reg());
3043 smi_label.Branch(is_smi, &operand);
3044
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003045 frame_->Push(&operand); // undo popping of TOS
3046 Result answer = frame_->InvokeBuiltin(Builtins::BIT_NOT,
3047 CALL_FUNCTION, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00003048 continue_label.Jump(&answer);
3049 smi_label.Bind(&answer);
3050 answer.ToRegister();
3051 frame_->Spill(answer.reg());
3052 __ SmiNot(answer.reg(), answer.reg());
3053 continue_label.Bind(&answer);
3054 frame_->Push(&answer);
3055 break;
3056 }
3057
3058 case Token::ADD: {
3059 // Smi check.
3060 JumpTarget continue_label;
3061 Result operand = frame_->Pop();
3062 operand.ToRegister();
3063 Condition is_smi = masm_->CheckSmi(operand.reg());
3064 continue_label.Branch(is_smi, &operand);
3065 frame_->Push(&operand);
3066 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
3067 CALL_FUNCTION, 1);
3068
3069 continue_label.Bind(&answer);
3070 frame_->Push(&answer);
3071 break;
3072 }
3073
3074 default:
3075 UNREACHABLE();
3076 }
3077 }
3078}
3079
3080
3081// The value in dst was optimistically incremented or decremented. The
3082// result overflowed or was not smi tagged. Undo the operation, call
3083// into the runtime to convert the argument to a number, and call the
3084// specialized add or subtract stub. The result is left in dst.
3085class DeferredPrefixCountOperation: public DeferredCode {
3086 public:
3087 DeferredPrefixCountOperation(Register dst, bool is_increment)
3088 : dst_(dst), is_increment_(is_increment) {
3089 set_comment("[ DeferredCountOperation");
3090 }
3091
3092 virtual void Generate();
3093
3094 private:
3095 Register dst_;
3096 bool is_increment_;
3097};
3098
3099
3100void DeferredPrefixCountOperation::Generate() {
3101 __ push(dst_);
3102 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3103 __ push(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00003104 __ Push(Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00003105 if (is_increment_) {
3106 __ CallRuntime(Runtime::kNumberAdd, 2);
3107 } else {
3108 __ CallRuntime(Runtime::kNumberSub, 2);
3109 }
3110 if (!dst_.is(rax)) __ movq(dst_, rax);
3111}
3112
3113
3114// The value in dst was optimistically incremented or decremented. The
3115// result overflowed or was not smi tagged. Undo the operation and call
3116// into the runtime to convert the argument to a number. Update the
3117// original value in old. Call the specialized add or subtract stub.
3118// The result is left in dst.
3119class DeferredPostfixCountOperation: public DeferredCode {
3120 public:
3121 DeferredPostfixCountOperation(Register dst, Register old, bool is_increment)
3122 : dst_(dst), old_(old), is_increment_(is_increment) {
3123 set_comment("[ DeferredCountOperation");
3124 }
3125
3126 virtual void Generate();
3127
3128 private:
3129 Register dst_;
3130 Register old_;
3131 bool is_increment_;
3132};
3133
3134
3135void DeferredPostfixCountOperation::Generate() {
3136 __ push(dst_);
3137 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3138
3139 // Save the result of ToNumber to use as the old value.
3140 __ push(rax);
3141
3142 // Call the runtime for the addition or subtraction.
3143 __ push(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00003144 __ Push(Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00003145 if (is_increment_) {
3146 __ CallRuntime(Runtime::kNumberAdd, 2);
3147 } else {
3148 __ CallRuntime(Runtime::kNumberSub, 2);
3149 }
3150 if (!dst_.is(rax)) __ movq(dst_, rax);
3151 __ pop(old_);
3152}
3153
3154
3155void CodeGenerator::VisitCountOperation(CountOperation* node) {
3156 Comment cmnt(masm_, "[ CountOperation");
3157
3158 bool is_postfix = node->is_postfix();
3159 bool is_increment = node->op() == Token::INC;
3160
3161 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
3162 bool is_const = (var != NULL && var->mode() == Variable::CONST);
3163
3164 // Postfix operations need a stack slot under the reference to hold
3165 // the old value while the new value is being stored. This is so that
3166 // in the case that storing the new value requires a call, the old
3167 // value will be in the frame to be spilled.
3168 if (is_postfix) frame_->Push(Smi::FromInt(0));
3169
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003170 { Reference target(this, node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003171 if (target.is_illegal()) {
3172 // Spoof the virtual frame to have the expected height (one higher
3173 // than on entry).
3174 if (!is_postfix) frame_->Push(Smi::FromInt(0));
3175 return;
3176 }
Steve Blockd0582a62009-12-15 09:54:21 +00003177 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003178
3179 Result new_value = frame_->Pop();
3180 new_value.ToRegister();
3181
3182 Result old_value; // Only allocated in the postfix case.
3183 if (is_postfix) {
3184 // Allocate a temporary to preserve the old value.
3185 old_value = allocator_->Allocate();
3186 ASSERT(old_value.is_valid());
3187 __ movq(old_value.reg(), new_value.reg());
3188 }
3189 // Ensure the new value is writable.
3190 frame_->Spill(new_value.reg());
3191
3192 DeferredCode* deferred = NULL;
3193 if (is_postfix) {
3194 deferred = new DeferredPostfixCountOperation(new_value.reg(),
3195 old_value.reg(),
3196 is_increment);
3197 } else {
3198 deferred = new DeferredPrefixCountOperation(new_value.reg(),
3199 is_increment);
3200 }
3201
Steve Block3ce2e202009-11-05 08:53:23 +00003202 __ JumpIfNotSmi(new_value.reg(), deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003203 if (is_increment) {
Steve Block3ce2e202009-11-05 08:53:23 +00003204 __ SmiAddConstant(kScratchRegister,
3205 new_value.reg(),
3206 Smi::FromInt(1),
3207 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003208 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00003209 __ SmiSubConstant(kScratchRegister,
3210 new_value.reg(),
3211 Smi::FromInt(1),
3212 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003213 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003214 __ movq(new_value.reg(), kScratchRegister);
3215 deferred->BindExit();
3216
3217 // Postfix: store the old value in the allocated slot under the
3218 // reference.
3219 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
3220
3221 frame_->Push(&new_value);
3222 // Non-constant: update the reference.
3223 if (!is_const) target.SetValue(NOT_CONST_INIT);
3224 }
3225
3226 // Postfix: drop the new value and use the old.
3227 if (is_postfix) frame_->Drop();
3228}
3229
3230
3231void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
3232 // TODO(X64): This code was copied verbatim from codegen-ia32.
3233 // Either find a reason to change it or move it to a shared location.
3234
Steve Blocka7e24c12009-10-30 11:49:00 +00003235 Comment cmnt(masm_, "[ BinaryOperation");
3236 Token::Value op = node->op();
3237
3238 // According to ECMA-262 section 11.11, page 58, the binary logical
3239 // operators must yield the result of one of the two expressions
3240 // before any ToBoolean() conversions. This means that the value
3241 // produced by a && or || operator is not necessarily a boolean.
3242
3243 // NOTE: If the left hand side produces a materialized value (not
3244 // control flow), we force the right hand side to do the same. This
3245 // is necessary because we assume that if we get control flow on the
3246 // last path out of an expression we got it on all paths.
3247 if (op == Token::AND) {
3248 JumpTarget is_true;
3249 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003250 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003251
3252 if (dest.false_was_fall_through()) {
3253 // The current false target was used as the fall-through. If
3254 // there are no dangling jumps to is_true then the left
3255 // subexpression was unconditionally false. Otherwise we have
3256 // paths where we do have to evaluate the right subexpression.
3257 if (is_true.is_linked()) {
3258 // We need to compile the right subexpression. If the jump to
3259 // the current false target was a forward jump then we have a
3260 // valid frame, we have just bound the false target, and we
3261 // have to jump around the code for the right subexpression.
3262 if (has_valid_frame()) {
3263 destination()->false_target()->Unuse();
3264 destination()->false_target()->Jump();
3265 }
3266 is_true.Bind();
3267 // The left subexpression compiled to control flow, so the
3268 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003269 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003270 } else {
3271 // We have actually just jumped to or bound the current false
3272 // target but the current control destination is not marked as
3273 // used.
3274 destination()->Use(false);
3275 }
3276
3277 } else if (dest.is_used()) {
3278 // The left subexpression compiled to control flow (and is_true
3279 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003280 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003281
3282 } else {
3283 // We have a materialized value on the frame, so we exit with
3284 // one on all paths. There are possibly also jumps to is_true
3285 // from nested subexpressions.
3286 JumpTarget pop_and_continue;
3287 JumpTarget exit;
3288
3289 // Avoid popping the result if it converts to 'false' using the
3290 // standard ToBoolean() conversion as described in ECMA-262,
3291 // section 9.2, page 30.
3292 //
3293 // Duplicate the TOS value. The duplicate will be popped by
3294 // ToBoolean.
3295 frame_->Dup();
3296 ControlDestination dest(&pop_and_continue, &exit, true);
3297 ToBoolean(&dest);
3298
3299 // Pop the result of evaluating the first part.
3300 frame_->Drop();
3301
3302 // Compile right side expression.
3303 is_true.Bind();
3304 Load(node->right());
3305
3306 // Exit (always with a materialized value).
3307 exit.Bind();
3308 }
3309
3310 } else if (op == Token::OR) {
3311 JumpTarget is_false;
3312 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003313 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003314
3315 if (dest.true_was_fall_through()) {
3316 // The current true target was used as the fall-through. If
3317 // there are no dangling jumps to is_false then the left
3318 // subexpression was unconditionally true. Otherwise we have
3319 // paths where we do have to evaluate the right subexpression.
3320 if (is_false.is_linked()) {
3321 // We need to compile the right subexpression. If the jump to
3322 // the current true target was a forward jump then we have a
3323 // valid frame, we have just bound the true target, and we
3324 // have to jump around the code for the right subexpression.
3325 if (has_valid_frame()) {
3326 destination()->true_target()->Unuse();
3327 destination()->true_target()->Jump();
3328 }
3329 is_false.Bind();
3330 // The left subexpression compiled to control flow, so the
3331 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003332 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003333 } else {
3334 // We have just jumped to or bound the current true target but
3335 // the current control destination is not marked as used.
3336 destination()->Use(true);
3337 }
3338
3339 } else if (dest.is_used()) {
3340 // The left subexpression compiled to control flow (and is_false
3341 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003342 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003343
3344 } else {
3345 // We have a materialized value on the frame, so we exit with
3346 // one on all paths. There are possibly also jumps to is_false
3347 // from nested subexpressions.
3348 JumpTarget pop_and_continue;
3349 JumpTarget exit;
3350
3351 // Avoid popping the result if it converts to 'true' using the
3352 // standard ToBoolean() conversion as described in ECMA-262,
3353 // section 9.2, page 30.
3354 //
3355 // Duplicate the TOS value. The duplicate will be popped by
3356 // ToBoolean.
3357 frame_->Dup();
3358 ControlDestination dest(&exit, &pop_and_continue, false);
3359 ToBoolean(&dest);
3360
3361 // Pop the result of evaluating the first part.
3362 frame_->Drop();
3363
3364 // Compile right side expression.
3365 is_false.Bind();
3366 Load(node->right());
3367
3368 // Exit (always with a materialized value).
3369 exit.Bind();
3370 }
3371
3372 } else {
3373 // NOTE: The code below assumes that the slow cases (calls to runtime)
3374 // never return a constant/immutable object.
3375 OverwriteMode overwrite_mode = NO_OVERWRITE;
3376 if (node->left()->AsBinaryOperation() != NULL &&
3377 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
3378 overwrite_mode = OVERWRITE_LEFT;
3379 } else if (node->right()->AsBinaryOperation() != NULL &&
3380 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
3381 overwrite_mode = OVERWRITE_RIGHT;
3382 }
3383
3384 Load(node->left());
3385 Load(node->right());
3386 GenericBinaryOperation(node->op(), node->type(), overwrite_mode);
3387 }
3388}
3389
3390
3391
3392void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
3393 Comment cmnt(masm_, "[ CompareOperation");
3394
3395 // Get the expressions from the node.
3396 Expression* left = node->left();
3397 Expression* right = node->right();
3398 Token::Value op = node->op();
3399 // To make typeof testing for natives implemented in JavaScript really
3400 // efficient, we generate special code for expressions of the form:
3401 // 'typeof <expression> == <string>'.
3402 UnaryOperation* operation = left->AsUnaryOperation();
3403 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
3404 (operation != NULL && operation->op() == Token::TYPEOF) &&
3405 (right->AsLiteral() != NULL &&
3406 right->AsLiteral()->handle()->IsString())) {
3407 Handle<String> check(Handle<String>::cast(right->AsLiteral()->handle()));
3408
3409 // Load the operand and move it to a register.
3410 LoadTypeofExpression(operation->expression());
3411 Result answer = frame_->Pop();
3412 answer.ToRegister();
3413
3414 if (check->Equals(Heap::number_symbol())) {
3415 Condition is_smi = masm_->CheckSmi(answer.reg());
3416 destination()->true_target()->Branch(is_smi);
3417 frame_->Spill(answer.reg());
3418 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
3419 __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex);
3420 answer.Unuse();
3421 destination()->Split(equal);
3422
3423 } else if (check->Equals(Heap::string_symbol())) {
3424 Condition is_smi = masm_->CheckSmi(answer.reg());
3425 destination()->false_target()->Branch(is_smi);
3426
3427 // It can be an undetectable string object.
3428 __ movq(kScratchRegister,
3429 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3430 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3431 Immediate(1 << Map::kIsUndetectable));
3432 destination()->false_target()->Branch(not_zero);
3433 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE);
3434 answer.Unuse();
3435 destination()->Split(below); // Unsigned byte comparison needed.
3436
3437 } else if (check->Equals(Heap::boolean_symbol())) {
3438 __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
3439 destination()->true_target()->Branch(equal);
3440 __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
3441 answer.Unuse();
3442 destination()->Split(equal);
3443
3444 } else if (check->Equals(Heap::undefined_symbol())) {
3445 __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
3446 destination()->true_target()->Branch(equal);
3447
3448 Condition is_smi = masm_->CheckSmi(answer.reg());
3449 destination()->false_target()->Branch(is_smi);
3450
3451 // It can be an undetectable object.
3452 __ movq(kScratchRegister,
3453 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3454 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3455 Immediate(1 << Map::kIsUndetectable));
3456 answer.Unuse();
3457 destination()->Split(not_zero);
3458
3459 } else if (check->Equals(Heap::function_symbol())) {
3460 Condition is_smi = masm_->CheckSmi(answer.reg());
3461 destination()->false_target()->Branch(is_smi);
3462 frame_->Spill(answer.reg());
3463 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00003464 destination()->true_target()->Branch(equal);
3465 // Regular expressions are callable so typeof == 'function'.
3466 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00003467 answer.Unuse();
3468 destination()->Split(equal);
3469
3470 } else if (check->Equals(Heap::object_symbol())) {
3471 Condition is_smi = masm_->CheckSmi(answer.reg());
3472 destination()->false_target()->Branch(is_smi);
3473 __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
3474 destination()->true_target()->Branch(equal);
3475
Steve Blockd0582a62009-12-15 09:54:21 +00003476 // Regular expressions are typeof == 'function', not 'object'.
3477 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister);
3478 destination()->false_target()->Branch(equal);
3479
Steve Blocka7e24c12009-10-30 11:49:00 +00003480 // It can be an undetectable object.
Steve Blocka7e24c12009-10-30 11:49:00 +00003481 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3482 Immediate(1 << Map::kIsUndetectable));
3483 destination()->false_target()->Branch(not_zero);
3484 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3485 destination()->false_target()->Branch(below);
3486 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3487 answer.Unuse();
3488 destination()->Split(below_equal);
3489 } else {
3490 // Uncommon case: typeof testing against a string literal that is
3491 // never returned from the typeof operator.
3492 answer.Unuse();
3493 destination()->Goto(false);
3494 }
3495 return;
3496 }
3497
3498 Condition cc = no_condition;
3499 bool strict = false;
3500 switch (op) {
3501 case Token::EQ_STRICT:
3502 strict = true;
3503 // Fall through
3504 case Token::EQ:
3505 cc = equal;
3506 break;
3507 case Token::LT:
3508 cc = less;
3509 break;
3510 case Token::GT:
3511 cc = greater;
3512 break;
3513 case Token::LTE:
3514 cc = less_equal;
3515 break;
3516 case Token::GTE:
3517 cc = greater_equal;
3518 break;
3519 case Token::IN: {
3520 Load(left);
3521 Load(right);
3522 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
3523 frame_->Push(&answer); // push the result
3524 return;
3525 }
3526 case Token::INSTANCEOF: {
3527 Load(left);
3528 Load(right);
3529 InstanceofStub stub;
3530 Result answer = frame_->CallStub(&stub, 2);
3531 answer.ToRegister();
3532 __ testq(answer.reg(), answer.reg());
3533 answer.Unuse();
3534 destination()->Split(zero);
3535 return;
3536 }
3537 default:
3538 UNREACHABLE();
3539 }
3540 Load(left);
3541 Load(right);
3542 Comparison(cc, strict, destination());
3543}
3544
3545
3546void CodeGenerator::VisitThisFunction(ThisFunction* node) {
3547 frame_->PushFunction();
3548}
3549
3550
3551void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
3552 ASSERT(args->length() == 1);
3553
3554 // ArgumentsAccessStub expects the key in rdx and the formal
3555 // parameter count in rax.
3556 Load(args->at(0));
3557 Result key = frame_->Pop();
3558 // Explicitly create a constant result.
3559 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters())));
3560 // Call the shared stub to get to arguments[key].
3561 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3562 Result result = frame_->CallStub(&stub, &key, &count);
3563 frame_->Push(&result);
3564}
3565
3566
3567void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
3568 ASSERT(args->length() == 1);
3569 Load(args->at(0));
3570 Result value = frame_->Pop();
3571 value.ToRegister();
3572 ASSERT(value.is_valid());
3573 Condition is_smi = masm_->CheckSmi(value.reg());
3574 destination()->false_target()->Branch(is_smi);
3575 // It is a heap object - get map.
3576 // Check if the object is a JS array or not.
3577 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, kScratchRegister);
3578 value.Unuse();
3579 destination()->Split(equal);
3580}
3581
3582
Steve Blockd0582a62009-12-15 09:54:21 +00003583void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
3584 // This generates a fast version of:
3585 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
3586 ASSERT(args->length() == 1);
3587 Load(args->at(0));
3588 Result obj = frame_->Pop();
3589 obj.ToRegister();
3590 Condition is_smi = masm_->CheckSmi(obj.reg());
3591 destination()->false_target()->Branch(is_smi);
3592
3593 __ Move(kScratchRegister, Factory::null_value());
3594 __ cmpq(obj.reg(), kScratchRegister);
3595 destination()->true_target()->Branch(equal);
3596
3597 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset));
3598 // Undetectable objects behave like undefined when tested with typeof.
3599 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3600 Immediate(1 << Map::kIsUndetectable));
3601 destination()->false_target()->Branch(not_zero);
3602 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3603 destination()->false_target()->Branch(less);
3604 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3605 obj.Unuse();
3606 destination()->Split(less_equal);
3607}
3608
3609
3610void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
3611 // This generates a fast version of:
3612 // (%_ClassOf(arg) === 'Function')
3613 ASSERT(args->length() == 1);
3614 Load(args->at(0));
3615 Result obj = frame_->Pop();
3616 obj.ToRegister();
3617 Condition is_smi = masm_->CheckSmi(obj.reg());
3618 destination()->false_target()->Branch(is_smi);
3619 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
3620 obj.Unuse();
3621 destination()->Split(equal);
3622}
3623
3624
Steve Blocka7e24c12009-10-30 11:49:00 +00003625void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
3626 ASSERT(args->length() == 0);
3627
3628 // Get the frame pointer for the calling frame.
3629 Result fp = allocator()->Allocate();
3630 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3631
3632 // Skip the arguments adaptor frame if it exists.
3633 Label check_frame_marker;
Steve Block3ce2e202009-11-05 08:53:23 +00003634 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
3635 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00003636 __ j(not_equal, &check_frame_marker);
3637 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
3638
3639 // Check the marker in the calling frame.
3640 __ bind(&check_frame_marker);
Steve Block3ce2e202009-11-05 08:53:23 +00003641 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
3642 Smi::FromInt(StackFrame::CONSTRUCT));
Steve Blocka7e24c12009-10-30 11:49:00 +00003643 fp.Unuse();
3644 destination()->Split(equal);
3645}
3646
3647
3648void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
3649 ASSERT(args->length() == 0);
3650 // ArgumentsAccessStub takes the parameter count as an input argument
3651 // in register eax. Create a constant result for it.
3652 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters())));
3653 // Call the shared stub to get to the arguments.length.
3654 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH);
3655 Result result = frame_->CallStub(&stub, &count);
3656 frame_->Push(&result);
3657}
3658
3659
3660void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
3661 Comment(masm_, "[ GenerateFastCharCodeAt");
3662 ASSERT(args->length() == 2);
3663
3664 Label slow_case;
3665 Label end;
3666 Label not_a_flat_string;
Steve Blocka7e24c12009-10-30 11:49:00 +00003667 Label try_again_with_new_string;
3668 Label ascii_string;
3669 Label got_char_code;
3670
3671 Load(args->at(0));
3672 Load(args->at(1));
3673 Result index = frame_->Pop();
3674 Result object = frame_->Pop();
3675
3676 // Get register rcx to use as shift amount later.
3677 Result shift_amount;
3678 if (object.is_register() && object.reg().is(rcx)) {
3679 Result fresh = allocator_->Allocate();
3680 shift_amount = object;
3681 object = fresh;
3682 __ movq(object.reg(), rcx);
3683 }
3684 if (index.is_register() && index.reg().is(rcx)) {
3685 Result fresh = allocator_->Allocate();
3686 shift_amount = index;
3687 index = fresh;
3688 __ movq(index.reg(), rcx);
3689 }
3690 // There could be references to ecx in the frame. Allocating will
3691 // spill them, otherwise spill explicitly.
3692 if (shift_amount.is_valid()) {
3693 frame_->Spill(rcx);
3694 } else {
3695 shift_amount = allocator()->Allocate(rcx);
3696 }
3697 ASSERT(shift_amount.is_register());
3698 ASSERT(shift_amount.reg().is(rcx));
3699 ASSERT(allocator_->count(rcx) == 1);
3700
3701 // We will mutate the index register and possibly the object register.
3702 // The case where they are somehow the same register is handled
3703 // because we only mutate them in the case where the receiver is a
3704 // heap object and the index is not.
3705 object.ToRegister();
3706 index.ToRegister();
3707 frame_->Spill(object.reg());
3708 frame_->Spill(index.reg());
3709
3710 // We need a single extra temporary register.
3711 Result temp = allocator()->Allocate();
3712 ASSERT(temp.is_valid());
3713
3714 // There is no virtual frame effect from here up to the final result
3715 // push.
3716
3717 // If the receiver is a smi trigger the slow case.
3718 __ JumpIfSmi(object.reg(), &slow_case);
3719
3720 // If the index is negative or non-smi trigger the slow case.
3721 __ JumpIfNotPositiveSmi(index.reg(), &slow_case);
3722
3723 // Untag the index.
3724 __ SmiToInteger32(index.reg(), index.reg());
3725
3726 __ bind(&try_again_with_new_string);
3727 // Fetch the instance type of the receiver into rcx.
3728 __ movq(rcx, FieldOperand(object.reg(), HeapObject::kMapOffset));
3729 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
3730 // If the receiver is not a string trigger the slow case.
3731 __ testb(rcx, Immediate(kIsNotStringMask));
3732 __ j(not_zero, &slow_case);
3733
Steve Blocka7e24c12009-10-30 11:49:00 +00003734 // Check for index out of range.
Steve Blockd0582a62009-12-15 09:54:21 +00003735 __ cmpl(index.reg(), FieldOperand(object.reg(), String::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003736 __ j(greater_equal, &slow_case);
3737 // Reload the instance type (into the temp register this time)..
3738 __ movq(temp.reg(), FieldOperand(object.reg(), HeapObject::kMapOffset));
3739 __ movzxbl(temp.reg(), FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
3740
3741 // We need special handling for non-flat strings.
Steve Blockd0582a62009-12-15 09:54:21 +00003742 ASSERT_EQ(0, kSeqStringTag);
Steve Blocka7e24c12009-10-30 11:49:00 +00003743 __ testb(temp.reg(), Immediate(kStringRepresentationMask));
3744 __ j(not_zero, &not_a_flat_string);
3745 // Check for 1-byte or 2-byte string.
Steve Blockd0582a62009-12-15 09:54:21 +00003746 ASSERT_EQ(0, kTwoByteStringTag);
Steve Blocka7e24c12009-10-30 11:49:00 +00003747 __ testb(temp.reg(), Immediate(kStringEncodingMask));
3748 __ j(not_zero, &ascii_string);
3749
3750 // 2-byte string.
3751 // Load the 2-byte character code into the temp register.
3752 __ movzxwl(temp.reg(), FieldOperand(object.reg(),
3753 index.reg(),
3754 times_2,
3755 SeqTwoByteString::kHeaderSize));
3756 __ jmp(&got_char_code);
3757
3758 // ASCII string.
3759 __ bind(&ascii_string);
3760 // Load the byte into the temp register.
3761 __ movzxbl(temp.reg(), FieldOperand(object.reg(),
3762 index.reg(),
3763 times_1,
3764 SeqAsciiString::kHeaderSize));
3765 __ bind(&got_char_code);
3766 __ Integer32ToSmi(temp.reg(), temp.reg());
3767 __ jmp(&end);
3768
3769 // Handle non-flat strings.
3770 __ bind(&not_a_flat_string);
3771 __ and_(temp.reg(), Immediate(kStringRepresentationMask));
3772 __ cmpb(temp.reg(), Immediate(kConsStringTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00003773 __ j(not_equal, &slow_case);
3774
Steve Blocka7e24c12009-10-30 11:49:00 +00003775 // ConsString.
Steve Blockd0582a62009-12-15 09:54:21 +00003776 // Check that the right hand side is the empty string (ie if this is really a
3777 // flat string in a cons string). If that is not the case we would rather go
3778 // to the runtime system now, to flatten the string.
3779 __ movq(temp.reg(), FieldOperand(object.reg(), ConsString::kSecondOffset));
3780 __ CompareRoot(temp.reg(), Heap::kEmptyStringRootIndex);
3781 __ j(not_equal, &slow_case);
3782 // Get the first of the two strings.
Steve Blocka7e24c12009-10-30 11:49:00 +00003783 __ movq(object.reg(), FieldOperand(object.reg(), ConsString::kFirstOffset));
3784 __ jmp(&try_again_with_new_string);
3785
3786 __ bind(&slow_case);
3787 // Move the undefined value into the result register, which will
3788 // trigger the slow case.
3789 __ LoadRoot(temp.reg(), Heap::kUndefinedValueRootIndex);
3790
3791 __ bind(&end);
3792 frame_->Push(&temp);
3793}
3794
3795
3796void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
3797 ASSERT(args->length() == 1);
3798 Load(args->at(0));
3799 Result value = frame_->Pop();
3800 value.ToRegister();
3801 ASSERT(value.is_valid());
3802 Condition positive_smi = masm_->CheckPositiveSmi(value.reg());
3803 value.Unuse();
3804 destination()->Split(positive_smi);
3805}
3806
3807
3808void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
3809 ASSERT(args->length() == 1);
3810 Load(args->at(0));
3811 Result value = frame_->Pop();
3812 value.ToRegister();
3813 ASSERT(value.is_valid());
3814 Condition is_smi = masm_->CheckSmi(value.reg());
3815 value.Unuse();
3816 destination()->Split(is_smi);
3817}
3818
3819
3820void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
3821 // Conditionally generate a log call.
3822 // Args:
3823 // 0 (literal string): The type of logging (corresponds to the flags).
3824 // This is used to determine whether or not to generate the log call.
3825 // 1 (string): Format string. Access the string at argument index 2
3826 // with '%2s' (see Logger::LogRuntime for all the formats).
3827 // 2 (array): Arguments to the format string.
3828 ASSERT_EQ(args->length(), 3);
3829#ifdef ENABLE_LOGGING_AND_PROFILING
3830 if (ShouldGenerateLog(args->at(0))) {
3831 Load(args->at(1));
3832 Load(args->at(2));
3833 frame_->CallRuntime(Runtime::kLog, 2);
3834 }
3835#endif
3836 // Finally, we're expected to leave a value on the top of the stack.
3837 frame_->Push(Factory::undefined_value());
3838}
3839
3840
3841void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
3842 ASSERT(args->length() == 2);
3843
3844 // Load the two objects into registers and perform the comparison.
3845 Load(args->at(0));
3846 Load(args->at(1));
3847 Result right = frame_->Pop();
3848 Result left = frame_->Pop();
3849 right.ToRegister();
3850 left.ToRegister();
3851 __ cmpq(right.reg(), left.reg());
3852 right.Unuse();
3853 left.Unuse();
3854 destination()->Split(equal);
3855}
3856
3857
3858void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
3859 ASSERT(args->length() == 0);
3860 // RBP value is aligned, so it should be tagged as a smi (without necesarily
Steve Block3ce2e202009-11-05 08:53:23 +00003861 // being padded as a smi, so it should not be treated as a smi.).
Steve Blocka7e24c12009-10-30 11:49:00 +00003862 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3863 Result rbp_as_smi = allocator_->Allocate();
3864 ASSERT(rbp_as_smi.is_valid());
3865 __ movq(rbp_as_smi.reg(), rbp);
3866 frame_->Push(&rbp_as_smi);
3867}
3868
3869
3870void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) {
3871 ASSERT(args->length() == 0);
3872 frame_->SpillAll();
3873 __ push(rsi);
3874
3875 // Make sure the frame is aligned like the OS expects.
3876 static const int kFrameAlignment = OS::ActivationFrameAlignment();
3877 if (kFrameAlignment > 0) {
3878 ASSERT(IsPowerOf2(kFrameAlignment));
3879 __ movq(rbx, rsp); // Save in AMD-64 abi callee-saved register.
3880 __ and_(rsp, Immediate(-kFrameAlignment));
3881 }
3882
3883 // Call V8::RandomPositiveSmi().
3884 __ Call(FUNCTION_ADDR(V8::RandomPositiveSmi), RelocInfo::RUNTIME_ENTRY);
3885
3886 // Restore stack pointer from callee-saved register.
3887 if (kFrameAlignment > 0) {
3888 __ movq(rsp, rbx);
3889 }
3890
3891 __ pop(rsi);
3892 Result result = allocator_->Allocate(rax);
3893 frame_->Push(&result);
3894}
3895
3896
Leon Clarkee46be812010-01-19 14:06:41 +00003897void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
3898 ASSERT_EQ(args->length(), 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003899
Leon Clarkee46be812010-01-19 14:06:41 +00003900 // Load the arguments on the stack and call the runtime system.
Steve Blocka7e24c12009-10-30 11:49:00 +00003901 Load(args->at(0));
Leon Clarkee46be812010-01-19 14:06:41 +00003902 Load(args->at(1));
3903 Load(args->at(2));
3904 Load(args->at(3));
3905 Result result = frame_->CallRuntime(Runtime::kRegExpExec, 4);
3906 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00003907}
3908
3909
Steve Blockd0582a62009-12-15 09:54:21 +00003910void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
3911 ASSERT_EQ(2, args->length());
3912
3913 Load(args->at(0));
3914 Load(args->at(1));
3915
Leon Clarkee46be812010-01-19 14:06:41 +00003916 StringAddStub stub(NO_STRING_ADD_FLAGS);
3917 Result answer = frame_->CallStub(&stub, 2);
3918 frame_->Push(&answer);
3919}
3920
3921
3922void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
3923 ASSERT_EQ(3, args->length());
3924
3925 Load(args->at(0));
3926 Load(args->at(1));
3927 Load(args->at(2));
3928
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003929 Result answer = frame_->CallRuntime(Runtime::kSubString, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00003930 frame_->Push(&answer);
3931}
3932
3933
3934void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
3935 ASSERT_EQ(2, args->length());
3936
3937 Load(args->at(0));
3938 Load(args->at(1));
3939
3940 StringCompareStub stub;
3941 Result answer = frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00003942 frame_->Push(&answer);
3943}
3944
3945
Steve Blocka7e24c12009-10-30 11:49:00 +00003946void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
3947 ASSERT(args->length() == 1);
3948 JumpTarget leave, null, function, non_function_constructor;
3949 Load(args->at(0)); // Load the object.
3950 Result obj = frame_->Pop();
3951 obj.ToRegister();
3952 frame_->Spill(obj.reg());
3953
3954 // If the object is a smi, we return null.
3955 Condition is_smi = masm_->CheckSmi(obj.reg());
3956 null.Branch(is_smi);
3957
3958 // Check that the object is a JS object but take special care of JS
3959 // functions to make sure they have 'Function' as their class.
3960
3961 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
3962 null.Branch(below);
3963
3964 // As long as JS_FUNCTION_TYPE is the last instance type and it is
3965 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
3966 // LAST_JS_OBJECT_TYPE.
3967 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3968 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
3969 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
3970 function.Branch(equal);
3971
3972 // Check if the constructor in the map is a function.
3973 __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
3974 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
3975 non_function_constructor.Branch(not_equal);
3976
3977 // The obj register now contains the constructor function. Grab the
3978 // instance class name from there.
3979 __ movq(obj.reg(),
3980 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
3981 __ movq(obj.reg(),
3982 FieldOperand(obj.reg(),
3983 SharedFunctionInfo::kInstanceClassNameOffset));
3984 frame_->Push(&obj);
3985 leave.Jump();
3986
3987 // Functions have class 'Function'.
3988 function.Bind();
3989 frame_->Push(Factory::function_class_symbol());
3990 leave.Jump();
3991
3992 // Objects with a non-function constructor have class 'Object'.
3993 non_function_constructor.Bind();
3994 frame_->Push(Factory::Object_symbol());
3995 leave.Jump();
3996
3997 // Non-JS objects have class null.
3998 null.Bind();
3999 frame_->Push(Factory::null_value());
4000
4001 // All done.
4002 leave.Bind();
4003}
4004
4005
4006void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
4007 ASSERT(args->length() == 2);
4008 JumpTarget leave;
4009 Load(args->at(0)); // Load the object.
4010 Load(args->at(1)); // Load the value.
4011 Result value = frame_->Pop();
4012 Result object = frame_->Pop();
4013 value.ToRegister();
4014 object.ToRegister();
4015
4016 // if (object->IsSmi()) return value.
4017 Condition is_smi = masm_->CheckSmi(object.reg());
4018 leave.Branch(is_smi, &value);
4019
4020 // It is a heap object - get its map.
4021 Result scratch = allocator_->Allocate();
4022 ASSERT(scratch.is_valid());
4023 // if (!object->IsJSValue()) return value.
4024 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
4025 leave.Branch(not_equal, &value);
4026
4027 // Store the value.
4028 __ movq(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
4029 // Update the write barrier. Save the value as it will be
4030 // overwritten by the write barrier code and is needed afterward.
4031 Result duplicate_value = allocator_->Allocate();
4032 ASSERT(duplicate_value.is_valid());
4033 __ movq(duplicate_value.reg(), value.reg());
4034 // The object register is also overwritten by the write barrier and
4035 // possibly aliased in the frame.
4036 frame_->Spill(object.reg());
4037 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
4038 scratch.reg());
4039 object.Unuse();
4040 scratch.Unuse();
4041 duplicate_value.Unuse();
4042
4043 // Leave.
4044 leave.Bind(&value);
4045 frame_->Push(&value);
4046}
4047
4048
4049void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
4050 ASSERT(args->length() == 1);
4051 JumpTarget leave;
4052 Load(args->at(0)); // Load the object.
4053 frame_->Dup();
4054 Result object = frame_->Pop();
4055 object.ToRegister();
4056 ASSERT(object.is_valid());
4057 // if (object->IsSmi()) return object.
4058 Condition is_smi = masm_->CheckSmi(object.reg());
4059 leave.Branch(is_smi);
4060 // It is a heap object - get map.
4061 Result temp = allocator()->Allocate();
4062 ASSERT(temp.is_valid());
4063 // if (!object->IsJSValue()) return object.
4064 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
4065 leave.Branch(not_equal);
4066 __ movq(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
4067 object.Unuse();
4068 frame_->SetElementAt(0, &temp);
4069 leave.Bind();
4070}
4071
4072
4073// -----------------------------------------------------------------------------
4074// CodeGenerator implementation of Expressions
4075
Steve Blockd0582a62009-12-15 09:54:21 +00004076void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004077 // TODO(x64): No architecture specific code. Move to shared location.
4078 ASSERT(in_spilled_code());
4079 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +00004080 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00004081 frame_->SpillAll();
4082 set_in_spilled_code(true);
4083}
4084
4085
Steve Blockd0582a62009-12-15 09:54:21 +00004086void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004087#ifdef DEBUG
4088 int original_height = frame_->height();
4089#endif
4090 ASSERT(!in_spilled_code());
4091 JumpTarget true_target;
4092 JumpTarget false_target;
4093 ControlDestination dest(&true_target, &false_target, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004094 LoadCondition(expr, &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00004095
4096 if (dest.false_was_fall_through()) {
4097 // The false target was just bound.
4098 JumpTarget loaded;
4099 frame_->Push(Factory::false_value());
4100 // There may be dangling jumps to the true target.
4101 if (true_target.is_linked()) {
4102 loaded.Jump();
4103 true_target.Bind();
4104 frame_->Push(Factory::true_value());
4105 loaded.Bind();
4106 }
4107
4108 } else if (dest.is_used()) {
4109 // There is true, and possibly false, control flow (with true as
4110 // the fall through).
4111 JumpTarget loaded;
4112 frame_->Push(Factory::true_value());
4113 if (false_target.is_linked()) {
4114 loaded.Jump();
4115 false_target.Bind();
4116 frame_->Push(Factory::false_value());
4117 loaded.Bind();
4118 }
4119
4120 } else {
4121 // We have a valid value on top of the frame, but we still may
4122 // have dangling jumps to the true and false targets from nested
4123 // subexpressions (eg, the left subexpressions of the
4124 // short-circuited boolean operators).
4125 ASSERT(has_valid_frame());
4126 if (true_target.is_linked() || false_target.is_linked()) {
4127 JumpTarget loaded;
4128 loaded.Jump(); // Don't lose the current TOS.
4129 if (true_target.is_linked()) {
4130 true_target.Bind();
4131 frame_->Push(Factory::true_value());
4132 if (false_target.is_linked()) {
4133 loaded.Jump();
4134 }
4135 }
4136 if (false_target.is_linked()) {
4137 false_target.Bind();
4138 frame_->Push(Factory::false_value());
4139 }
4140 loaded.Bind();
4141 }
4142 }
4143
4144 ASSERT(has_valid_frame());
4145 ASSERT(frame_->height() == original_height + 1);
4146}
4147
4148
4149// Emit code to load the value of an expression to the top of the
4150// frame. If the expression is boolean-valued it may be compiled (or
4151// partially compiled) into control flow to the control destination.
4152// If force_control is true, control flow is forced.
4153void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +00004154 ControlDestination* dest,
4155 bool force_control) {
4156 ASSERT(!in_spilled_code());
4157 int original_height = frame_->height();
4158
Steve Blockd0582a62009-12-15 09:54:21 +00004159 { CodeGenState new_state(this, dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00004160 Visit(x);
4161
4162 // If we hit a stack overflow, we may not have actually visited
4163 // the expression. In that case, we ensure that we have a
4164 // valid-looking frame state because we will continue to generate
4165 // code as we unwind the C++ stack.
4166 //
4167 // It's possible to have both a stack overflow and a valid frame
4168 // state (eg, a subexpression overflowed, visiting it returned
4169 // with a dummied frame state, and visiting this expression
4170 // returned with a normal-looking state).
4171 if (HasStackOverflow() &&
4172 !dest->is_used() &&
4173 frame_->height() == original_height) {
4174 dest->Goto(true);
4175 }
4176 }
4177
4178 if (force_control && !dest->is_used()) {
4179 // Convert the TOS value into flow to the control destination.
4180 // TODO(X64): Make control flow to control destinations work.
4181 ToBoolean(dest);
4182 }
4183
4184 ASSERT(!(force_control && !dest->is_used()));
4185 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
4186}
4187
4188
Steve Blocka7e24c12009-10-30 11:49:00 +00004189// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
4190// convert it to a boolean in the condition code register or jump to
4191// 'false_target'/'true_target' as appropriate.
4192void CodeGenerator::ToBoolean(ControlDestination* dest) {
4193 Comment cmnt(masm_, "[ ToBoolean");
4194
4195 // The value to convert should be popped from the frame.
4196 Result value = frame_->Pop();
4197 value.ToRegister();
4198 // Fast case checks.
4199
4200 // 'false' => false.
4201 __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
4202 dest->false_target()->Branch(equal);
4203
4204 // 'true' => true.
4205 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
4206 dest->true_target()->Branch(equal);
4207
4208 // 'undefined' => false.
4209 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
4210 dest->false_target()->Branch(equal);
4211
4212 // Smi => false iff zero.
Steve Block3ce2e202009-11-05 08:53:23 +00004213 __ SmiCompare(value.reg(), Smi::FromInt(0));
4214 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00004215 Condition is_smi = masm_->CheckSmi(value.reg());
4216 dest->true_target()->Branch(is_smi);
4217
4218 // Call the stub for all other cases.
4219 frame_->Push(&value); // Undo the Pop() from above.
4220 ToBooleanStub stub;
4221 Result temp = frame_->CallStub(&stub, 1);
4222 // Convert the result to a condition code.
4223 __ testq(temp.reg(), temp.reg());
4224 temp.Unuse();
4225 dest->Split(not_equal);
4226}
4227
4228
4229void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) {
4230 UNIMPLEMENTED();
4231 // TODO(X64): Implement security policy for loads of smis.
4232}
4233
4234
4235bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
4236 return false;
4237}
4238
4239//------------------------------------------------------------------------------
4240// CodeGenerator implementation of variables, lookups, and stores.
4241
Leon Clarkeeab96aa2010-01-27 16:31:12 +00004242Reference::Reference(CodeGenerator* cgen, Expression* expression)
4243 : cgen_(cgen), expression_(expression), type_(ILLEGAL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004244 cgen->LoadReference(this);
4245}
4246
4247
4248Reference::~Reference() {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00004249 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004250}
4251
4252
4253void CodeGenerator::LoadReference(Reference* ref) {
4254 // References are loaded from both spilled and unspilled code. Set the
4255 // state to unspilled to allow that (and explicitly spill after
4256 // construction at the construction sites).
4257 bool was_in_spilled_code = in_spilled_code_;
4258 in_spilled_code_ = false;
4259
4260 Comment cmnt(masm_, "[ LoadReference");
4261 Expression* e = ref->expression();
4262 Property* property = e->AsProperty();
4263 Variable* var = e->AsVariableProxy()->AsVariable();
4264
4265 if (property != NULL) {
4266 // The expression is either a property or a variable proxy that rewrites
4267 // to a property.
4268 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +00004269 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004270 ref->set_type(Reference::NAMED);
4271 } else {
4272 Load(property->key());
4273 ref->set_type(Reference::KEYED);
4274 }
4275 } else if (var != NULL) {
4276 // The expression is a variable proxy that does not rewrite to a
4277 // property. Global variables are treated as named property references.
4278 if (var->is_global()) {
4279 LoadGlobal();
4280 ref->set_type(Reference::NAMED);
4281 } else {
4282 ASSERT(var->slot() != NULL);
4283 ref->set_type(Reference::SLOT);
4284 }
4285 } else {
4286 // Anything else is a runtime error.
4287 Load(e);
4288 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
4289 }
4290
4291 in_spilled_code_ = was_in_spilled_code;
4292}
4293
4294
4295void CodeGenerator::UnloadReference(Reference* ref) {
4296 // Pop a reference from the stack while preserving TOS.
4297 Comment cmnt(masm_, "[ UnloadReference");
4298 frame_->Nip(ref->size());
4299}
4300
4301
4302Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
4303 // Currently, this assertion will fail if we try to assign to
4304 // a constant variable that is constant because it is read-only
4305 // (such as the variable referring to a named function expression).
4306 // We need to implement assignments to read-only variables.
4307 // Ideally, we should do this during AST generation (by converting
4308 // such assignments into expression statements); however, in general
4309 // we may not be able to make the decision until past AST generation,
4310 // that is when the entire program is known.
4311 ASSERT(slot != NULL);
4312 int index = slot->index();
4313 switch (slot->type()) {
4314 case Slot::PARAMETER:
4315 return frame_->ParameterAt(index);
4316
4317 case Slot::LOCAL:
4318 return frame_->LocalAt(index);
4319
4320 case Slot::CONTEXT: {
4321 // Follow the context chain if necessary.
4322 ASSERT(!tmp.is(rsi)); // do not overwrite context register
4323 Register context = rsi;
4324 int chain_length = scope()->ContextChainLength(slot->var()->scope());
4325 for (int i = 0; i < chain_length; i++) {
4326 // Load the closure.
4327 // (All contexts, even 'with' contexts, have a closure,
4328 // and it is the same for all contexts inside a function.
4329 // There is no need to go to the function context first.)
4330 __ movq(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
4331 // Load the function context (which is the incoming, outer context).
4332 __ movq(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
4333 context = tmp;
4334 }
4335 // We may have a 'with' context now. Get the function context.
4336 // (In fact this mov may never be the needed, since the scope analysis
4337 // may not permit a direct context access in this case and thus we are
4338 // always at a function context. However it is safe to dereference be-
4339 // cause the function context of a function context is itself. Before
4340 // deleting this mov we should try to create a counter-example first,
4341 // though...)
4342 __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
4343 return ContextOperand(tmp, index);
4344 }
4345
4346 default:
4347 UNREACHABLE();
4348 return Operand(rsp, 0);
4349 }
4350}
4351
4352
4353Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
4354 Result tmp,
4355 JumpTarget* slow) {
4356 ASSERT(slot->type() == Slot::CONTEXT);
4357 ASSERT(tmp.is_register());
4358 Register context = rsi;
4359
4360 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
4361 if (s->num_heap_slots() > 0) {
4362 if (s->calls_eval()) {
4363 // Check that extension is NULL.
4364 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
4365 Immediate(0));
4366 slow->Branch(not_equal, not_taken);
4367 }
4368 __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
4369 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4370 context = tmp.reg();
4371 }
4372 }
4373 // Check that last extension is NULL.
4374 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
4375 slow->Branch(not_equal, not_taken);
4376 __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
4377 return ContextOperand(tmp.reg(), slot->index());
4378}
4379
4380
4381void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
4382 if (slot->type() == Slot::LOOKUP) {
4383 ASSERT(slot->var()->is_dynamic());
4384
4385 JumpTarget slow;
4386 JumpTarget done;
4387 Result value;
4388
4389 // Generate fast-case code for variables that might be shadowed by
4390 // eval-introduced variables. Eval is used a lot without
4391 // introducing variables. In those cases, we do not want to
4392 // perform a runtime call for all variables in the scope
4393 // containing the eval.
4394 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
4395 value = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow);
4396 // If there was no control flow to slow, we can exit early.
4397 if (!slow.is_linked()) {
4398 frame_->Push(&value);
4399 return;
4400 }
4401
4402 done.Jump(&value);
4403
4404 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
4405 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
4406 // Only generate the fast case for locals that rewrite to slots.
4407 // This rules out argument loads.
4408 if (potential_slot != NULL) {
4409 // Allocate a fresh register to use as a temp in
4410 // ContextSlotOperandCheckExtensions and to hold the result
4411 // value.
4412 value = allocator_->Allocate();
4413 ASSERT(value.is_valid());
4414 __ movq(value.reg(),
4415 ContextSlotOperandCheckExtensions(potential_slot,
4416 value,
4417 &slow));
4418 if (potential_slot->var()->mode() == Variable::CONST) {
4419 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
4420 done.Branch(not_equal, &value);
4421 __ LoadRoot(value.reg(), Heap::kUndefinedValueRootIndex);
4422 }
4423 // There is always control flow to slow from
4424 // ContextSlotOperandCheckExtensions so we have to jump around
4425 // it.
4426 done.Jump(&value);
4427 }
4428 }
4429
4430 slow.Bind();
4431 // A runtime call is inevitable. We eagerly sync frame elements
4432 // to memory so that we can push the arguments directly into place
4433 // on top of the frame.
4434 frame_->SyncRange(0, frame_->element_count() - 1);
4435 frame_->EmitPush(rsi);
4436 __ movq(kScratchRegister, slot->var()->name(), RelocInfo::EMBEDDED_OBJECT);
4437 frame_->EmitPush(kScratchRegister);
4438 if (typeof_state == INSIDE_TYPEOF) {
4439 value =
4440 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4441 } else {
4442 value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4443 }
4444
4445 done.Bind(&value);
4446 frame_->Push(&value);
4447
4448 } else if (slot->var()->mode() == Variable::CONST) {
4449 // Const slots may contain 'the hole' value (the constant hasn't been
4450 // initialized yet) which needs to be converted into the 'undefined'
4451 // value.
4452 //
4453 // We currently spill the virtual frame because constants use the
4454 // potentially unsafe direct-frame access of SlotOperand.
4455 VirtualFrame::SpilledScope spilled_scope;
4456 Comment cmnt(masm_, "[ Load const");
4457 JumpTarget exit;
4458 __ movq(rcx, SlotOperand(slot, rcx));
4459 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
4460 exit.Branch(not_equal);
4461 __ LoadRoot(rcx, Heap::kUndefinedValueRootIndex);
4462 exit.Bind();
4463 frame_->EmitPush(rcx);
4464
4465 } else if (slot->type() == Slot::PARAMETER) {
4466 frame_->PushParameterAt(slot->index());
4467
4468 } else if (slot->type() == Slot::LOCAL) {
4469 frame_->PushLocalAt(slot->index());
4470
4471 } else {
4472 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
4473 // here.
4474 //
4475 // The use of SlotOperand below is safe for an unspilled frame
4476 // because it will always be a context slot.
4477 ASSERT(slot->type() == Slot::CONTEXT);
4478 Result temp = allocator_->Allocate();
4479 ASSERT(temp.is_valid());
4480 __ movq(temp.reg(), SlotOperand(slot, temp.reg()));
4481 frame_->Push(&temp);
4482 }
4483}
4484
4485
4486void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
4487 TypeofState state) {
4488 LoadFromSlot(slot, state);
4489
4490 // Bail out quickly if we're not using lazy arguments allocation.
4491 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
4492
4493 // ... or if the slot isn't a non-parameter arguments slot.
4494 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
4495
4496 // Pop the loaded value from the stack.
4497 Result value = frame_->Pop();
4498
4499 // If the loaded value is a constant, we know if the arguments
4500 // object has been lazily loaded yet.
4501 if (value.is_constant()) {
4502 if (value.handle()->IsTheHole()) {
4503 Result arguments = StoreArgumentsObject(false);
4504 frame_->Push(&arguments);
4505 } else {
4506 frame_->Push(&value);
4507 }
4508 return;
4509 }
4510
4511 // The loaded value is in a register. If it is the sentinel that
4512 // indicates that we haven't loaded the arguments object yet, we
4513 // need to do it now.
4514 JumpTarget exit;
4515 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
4516 frame_->Push(&value);
4517 exit.Branch(not_equal);
4518 Result arguments = StoreArgumentsObject(false);
4519 frame_->SetElementAt(0, &arguments);
4520 exit.Bind();
4521}
4522
4523
4524void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4525 if (slot->type() == Slot::LOOKUP) {
4526 ASSERT(slot->var()->is_dynamic());
4527
4528 // For now, just do a runtime call. Since the call is inevitable,
4529 // we eagerly sync the virtual frame so we can directly push the
4530 // arguments into place.
4531 frame_->SyncRange(0, frame_->element_count() - 1);
4532
4533 frame_->EmitPush(rsi);
4534 frame_->EmitPush(slot->var()->name());
4535
4536 Result value;
4537 if (init_state == CONST_INIT) {
4538 // Same as the case for a normal store, but ignores attribute
4539 // (e.g. READ_ONLY) of context slot so that we can initialize const
4540 // properties (introduced via eval("const foo = (some expr);")). Also,
4541 // uses the current function context instead of the top context.
4542 //
4543 // Note that we must declare the foo upon entry of eval(), via a
4544 // context slot declaration, but we cannot initialize it at the same
4545 // time, because the const declaration may be at the end of the eval
4546 // code (sigh...) and the const variable may have been used before
4547 // (where its value is 'undefined'). Thus, we can only do the
4548 // initialization when we actually encounter the expression and when
4549 // the expression operands are defined and valid, and thus we need the
4550 // split into 2 operations: declaration of the context slot followed
4551 // by initialization.
4552 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
4553 } else {
4554 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
4555 }
4556 // Storing a variable must keep the (new) value on the expression
4557 // stack. This is necessary for compiling chained assignment
4558 // expressions.
4559 frame_->Push(&value);
4560 } else {
4561 ASSERT(!slot->var()->is_dynamic());
4562
4563 JumpTarget exit;
4564 if (init_state == CONST_INIT) {
4565 ASSERT(slot->var()->mode() == Variable::CONST);
4566 // Only the first const initialization must be executed (the slot
4567 // still contains 'the hole' value). When the assignment is executed,
4568 // the code is identical to a normal store (see below).
4569 //
4570 // We spill the frame in the code below because the direct-frame
4571 // access of SlotOperand is potentially unsafe with an unspilled
4572 // frame.
4573 VirtualFrame::SpilledScope spilled_scope;
4574 Comment cmnt(masm_, "[ Init const");
4575 __ movq(rcx, SlotOperand(slot, rcx));
4576 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
4577 exit.Branch(not_equal);
4578 }
4579
4580 // We must execute the store. Storing a variable must keep the (new)
4581 // value on the stack. This is necessary for compiling assignment
4582 // expressions.
4583 //
4584 // Note: We will reach here even with slot->var()->mode() ==
4585 // Variable::CONST because of const declarations which will initialize
4586 // consts to 'the hole' value and by doing so, end up calling this code.
4587 if (slot->type() == Slot::PARAMETER) {
4588 frame_->StoreToParameterAt(slot->index());
4589 } else if (slot->type() == Slot::LOCAL) {
4590 frame_->StoreToLocalAt(slot->index());
4591 } else {
4592 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
4593 //
4594 // The use of SlotOperand below is safe for an unspilled frame
4595 // because the slot is a context slot.
4596 ASSERT(slot->type() == Slot::CONTEXT);
4597 frame_->Dup();
4598 Result value = frame_->Pop();
4599 value.ToRegister();
4600 Result start = allocator_->Allocate();
4601 ASSERT(start.is_valid());
4602 __ movq(SlotOperand(slot, start.reg()), value.reg());
4603 // RecordWrite may destroy the value registers.
4604 //
4605 // TODO(204): Avoid actually spilling when the value is not
4606 // needed (probably the common case).
4607 frame_->Spill(value.reg());
4608 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
4609 Result temp = allocator_->Allocate();
4610 ASSERT(temp.is_valid());
4611 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
4612 // The results start, value, and temp are unused by going out of
4613 // scope.
4614 }
4615
4616 exit.Bind();
4617 }
4618}
4619
4620
4621Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
4622 Slot* slot,
4623 TypeofState typeof_state,
4624 JumpTarget* slow) {
4625 // Check that no extension objects have been created by calls to
4626 // eval from the current scope to the global scope.
4627 Register context = rsi;
4628 Result tmp = allocator_->Allocate();
4629 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
4630
4631 Scope* s = scope();
4632 while (s != NULL) {
4633 if (s->num_heap_slots() > 0) {
4634 if (s->calls_eval()) {
4635 // Check that extension is NULL.
4636 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
4637 Immediate(0));
4638 slow->Branch(not_equal, not_taken);
4639 }
4640 // Load next context in chain.
4641 __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
4642 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4643 context = tmp.reg();
4644 }
4645 // If no outer scope calls eval, we do not need to check more
4646 // context extensions. If we have reached an eval scope, we check
4647 // all extensions from this point.
4648 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
4649 s = s->outer_scope();
4650 }
4651
4652 if (s->is_eval_scope()) {
4653 // Loop up the context chain. There is no frame effect so it is
4654 // safe to use raw labels here.
4655 Label next, fast;
4656 if (!context.is(tmp.reg())) {
4657 __ movq(tmp.reg(), context);
4658 }
4659 // Load map for comparison into register, outside loop.
4660 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
4661 __ bind(&next);
4662 // Terminate at global context.
4663 __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset));
4664 __ j(equal, &fast);
4665 // Check that extension is NULL.
4666 __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
4667 slow->Branch(not_equal);
4668 // Load next context in chain.
4669 __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
4670 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4671 __ jmp(&next);
4672 __ bind(&fast);
4673 }
4674 tmp.Unuse();
4675
4676 // All extension objects were empty and it is safe to use a global
4677 // load IC call.
4678 LoadGlobal();
4679 frame_->Push(slot->var()->name());
4680 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
4681 ? RelocInfo::CODE_TARGET
4682 : RelocInfo::CODE_TARGET_CONTEXT;
4683 Result answer = frame_->CallLoadIC(mode);
4684 // A test rax instruction following the call signals that the inobject
4685 // property case was inlined. Ensure that there is not a test rax
4686 // instruction here.
4687 masm_->nop();
4688 // Discard the global object. The result is in answer.
4689 frame_->Drop();
4690 return answer;
4691}
4692
4693
4694void CodeGenerator::LoadGlobal() {
4695 if (in_spilled_code()) {
4696 frame_->EmitPush(GlobalObject());
4697 } else {
4698 Result temp = allocator_->Allocate();
4699 __ movq(temp.reg(), GlobalObject());
4700 frame_->Push(&temp);
4701 }
4702}
4703
4704
4705void CodeGenerator::LoadGlobalReceiver() {
4706 Result temp = allocator_->Allocate();
4707 Register reg = temp.reg();
4708 __ movq(reg, GlobalObject());
4709 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
4710 frame_->Push(&temp);
4711}
4712
4713
4714ArgumentsAllocationMode CodeGenerator::ArgumentsMode() const {
4715 if (scope_->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
4716 ASSERT(scope_->arguments_shadow() != NULL);
4717 // We don't want to do lazy arguments allocation for functions that
4718 // have heap-allocated contexts, because it interfers with the
4719 // uninitialized const tracking in the context objects.
4720 return (scope_->num_heap_slots() > 0)
4721 ? EAGER_ARGUMENTS_ALLOCATION
4722 : LAZY_ARGUMENTS_ALLOCATION;
4723}
4724
4725
4726Result CodeGenerator::StoreArgumentsObject(bool initial) {
4727 ArgumentsAllocationMode mode = ArgumentsMode();
4728 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
4729
4730 Comment cmnt(masm_, "[ store arguments object");
4731 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
4732 // When using lazy arguments allocation, we store the hole value
4733 // as a sentinel indicating that the arguments object hasn't been
4734 // allocated yet.
4735 frame_->Push(Factory::the_hole_value());
4736 } else {
4737 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
4738 frame_->PushFunction();
4739 frame_->PushReceiverSlotAddress();
4740 frame_->Push(Smi::FromInt(scope_->num_parameters()));
4741 Result result = frame_->CallStub(&stub, 3);
4742 frame_->Push(&result);
4743 }
4744
Leon Clarkee46be812010-01-19 14:06:41 +00004745
4746 Variable* arguments = scope_->arguments()->var();
4747 Variable* shadow = scope_->arguments_shadow()->var();
4748 ASSERT(arguments != NULL && arguments->slot() != NULL);
4749 ASSERT(shadow != NULL && shadow->slot() != NULL);
4750 JumpTarget done;
4751 bool skip_arguments = false;
4752 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
4753 // We have to skip storing into the arguments slot if it has
4754 // already been written to. This can happen if the a function
4755 // has a local variable named 'arguments'.
4756 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
4757 Result probe = frame_->Pop();
4758 if (probe.is_constant()) {
4759 // We have to skip updating the arguments object if it has been
4760 // assigned a proper value.
4761 skip_arguments = !probe.handle()->IsTheHole();
4762 } else {
4763 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
4764 probe.Unuse();
4765 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00004766 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004767 }
Leon Clarkee46be812010-01-19 14:06:41 +00004768 if (!skip_arguments) {
4769 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
4770 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
4771 }
4772 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004773 return frame_->Pop();
4774}
4775
4776
Steve Blockd0582a62009-12-15 09:54:21 +00004777void CodeGenerator::LoadTypeofExpression(Expression* expr) {
4778 // Special handling of identifiers as subexpressions of typeof.
4779 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +00004780 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004781 // For a global variable we build the property reference
4782 // <global>.<variable> and perform a (regular non-contextual) property
4783 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +00004784 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
4785 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00004786 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +00004787 Reference ref(this, &property);
4788 ref.GetValue();
4789 } else if (variable != NULL && variable->slot() != NULL) {
4790 // For a variable that rewrites to a slot, we signal it is the immediate
4791 // subexpression of a typeof.
4792 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00004793 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00004794 // Anything else can be handled normally.
4795 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +00004796 }
4797}
4798
4799
4800void CodeGenerator::Comparison(Condition cc,
4801 bool strict,
4802 ControlDestination* dest) {
4803 // Strict only makes sense for equality comparisons.
4804 ASSERT(!strict || cc == equal);
4805
4806 Result left_side;
4807 Result right_side;
4808 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
4809 if (cc == greater || cc == less_equal) {
4810 cc = ReverseCondition(cc);
4811 left_side = frame_->Pop();
4812 right_side = frame_->Pop();
4813 } else {
4814 right_side = frame_->Pop();
4815 left_side = frame_->Pop();
4816 }
4817 ASSERT(cc == less || cc == equal || cc == greater_equal);
4818
4819 // If either side is a constant smi, optimize the comparison.
4820 bool left_side_constant_smi =
4821 left_side.is_constant() && left_side.handle()->IsSmi();
4822 bool right_side_constant_smi =
4823 right_side.is_constant() && right_side.handle()->IsSmi();
4824 bool left_side_constant_null =
4825 left_side.is_constant() && left_side.handle()->IsNull();
4826 bool right_side_constant_null =
4827 right_side.is_constant() && right_side.handle()->IsNull();
4828
4829 if (left_side_constant_smi || right_side_constant_smi) {
4830 if (left_side_constant_smi && right_side_constant_smi) {
4831 // Trivial case, comparing two constants.
4832 int left_value = Smi::cast(*left_side.handle())->value();
4833 int right_value = Smi::cast(*right_side.handle())->value();
4834 switch (cc) {
4835 case less:
4836 dest->Goto(left_value < right_value);
4837 break;
4838 case equal:
4839 dest->Goto(left_value == right_value);
4840 break;
4841 case greater_equal:
4842 dest->Goto(left_value >= right_value);
4843 break;
4844 default:
4845 UNREACHABLE();
4846 }
4847 } else { // Only one side is a constant Smi.
4848 // If left side is a constant Smi, reverse the operands.
4849 // Since one side is a constant Smi, conversion order does not matter.
4850 if (left_side_constant_smi) {
4851 Result temp = left_side;
4852 left_side = right_side;
4853 right_side = temp;
4854 cc = ReverseCondition(cc);
4855 // This may reintroduce greater or less_equal as the value of cc.
4856 // CompareStub and the inline code both support all values of cc.
4857 }
4858 // Implement comparison against a constant Smi, inlining the case
4859 // where both sides are Smis.
4860 left_side.ToRegister();
4861
4862 // Here we split control flow to the stub call and inlined cases
4863 // before finally splitting it to the control destination. We use
4864 // a jump target and branching to duplicate the virtual frame at
4865 // the first split. We manually handle the off-frame references
4866 // by reconstituting them on the non-fall-through path.
4867 JumpTarget is_smi;
4868 Register left_reg = left_side.reg();
4869 Handle<Object> right_val = right_side.handle();
4870
4871 Condition left_is_smi = masm_->CheckSmi(left_side.reg());
4872 is_smi.Branch(left_is_smi);
4873
4874 // Setup and call the compare stub.
4875 CompareStub stub(cc, strict);
4876 Result result = frame_->CallStub(&stub, &left_side, &right_side);
4877 result.ToRegister();
4878 __ testq(result.reg(), result.reg());
4879 result.Unuse();
4880 dest->true_target()->Branch(cc);
4881 dest->false_target()->Jump();
4882
4883 is_smi.Bind();
4884 left_side = Result(left_reg);
4885 right_side = Result(right_val);
4886 // Test smi equality and comparison by signed int comparison.
4887 // Both sides are smis, so we can use an Immediate.
Steve Block3ce2e202009-11-05 08:53:23 +00004888 __ SmiCompare(left_side.reg(), Smi::cast(*right_side.handle()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004889 left_side.Unuse();
4890 right_side.Unuse();
4891 dest->Split(cc);
4892 }
4893 } else if (cc == equal &&
4894 (left_side_constant_null || right_side_constant_null)) {
4895 // To make null checks efficient, we check if either the left side or
4896 // the right side is the constant 'null'.
4897 // If so, we optimize the code by inlining a null check instead of
4898 // calling the (very) general runtime routine for checking equality.
4899 Result operand = left_side_constant_null ? right_side : left_side;
4900 right_side.Unuse();
4901 left_side.Unuse();
4902 operand.ToRegister();
4903 __ CompareRoot(operand.reg(), Heap::kNullValueRootIndex);
4904 if (strict) {
4905 operand.Unuse();
4906 dest->Split(equal);
4907 } else {
4908 // The 'null' value is only equal to 'undefined' if using non-strict
4909 // comparisons.
4910 dest->true_target()->Branch(equal);
4911 __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex);
4912 dest->true_target()->Branch(equal);
4913 Condition is_smi = masm_->CheckSmi(operand.reg());
4914 dest->false_target()->Branch(is_smi);
4915
4916 // It can be an undetectable object.
4917 // Use a scratch register in preference to spilling operand.reg().
4918 Result temp = allocator()->Allocate();
4919 ASSERT(temp.is_valid());
4920 __ movq(temp.reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00004921 FieldOperand(operand.reg(), HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004922 __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset),
4923 Immediate(1 << Map::kIsUndetectable));
4924 temp.Unuse();
4925 operand.Unuse();
4926 dest->Split(not_zero);
4927 }
4928 } else { // Neither side is a constant Smi or null.
4929 // If either side is a non-smi constant, skip the smi check.
4930 bool known_non_smi =
4931 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
4932 (right_side.is_constant() && !right_side.handle()->IsSmi());
4933 left_side.ToRegister();
4934 right_side.ToRegister();
4935
4936 if (known_non_smi) {
4937 // When non-smi, call out to the compare stub.
4938 CompareStub stub(cc, strict);
4939 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
4940 // The result is a Smi, which is negative, zero, or positive.
Steve Block3ce2e202009-11-05 08:53:23 +00004941 __ SmiTest(answer.reg()); // Sets both zero and sign flag.
Steve Blocka7e24c12009-10-30 11:49:00 +00004942 answer.Unuse();
4943 dest->Split(cc);
4944 } else {
4945 // Here we split control flow to the stub call and inlined cases
4946 // before finally splitting it to the control destination. We use
4947 // a jump target and branching to duplicate the virtual frame at
4948 // the first split. We manually handle the off-frame references
4949 // by reconstituting them on the non-fall-through path.
4950 JumpTarget is_smi;
4951 Register left_reg = left_side.reg();
4952 Register right_reg = right_side.reg();
4953
4954 Condition both_smi = masm_->CheckBothSmi(left_reg, right_reg);
4955 is_smi.Branch(both_smi);
4956 // When non-smi, call out to the compare stub.
4957 CompareStub stub(cc, strict);
4958 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Steve Block3ce2e202009-11-05 08:53:23 +00004959 __ SmiTest(answer.reg()); // Sets both zero and sign flags.
Steve Blocka7e24c12009-10-30 11:49:00 +00004960 answer.Unuse();
4961 dest->true_target()->Branch(cc);
4962 dest->false_target()->Jump();
4963
4964 is_smi.Bind();
4965 left_side = Result(left_reg);
4966 right_side = Result(right_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00004967 __ SmiCompare(left_side.reg(), right_side.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00004968 right_side.Unuse();
4969 left_side.Unuse();
4970 dest->Split(cc);
4971 }
4972 }
4973}
4974
4975
4976class DeferredInlineBinaryOperation: public DeferredCode {
4977 public:
4978 DeferredInlineBinaryOperation(Token::Value op,
4979 Register dst,
4980 Register left,
4981 Register right,
4982 OverwriteMode mode)
4983 : op_(op), dst_(dst), left_(left), right_(right), mode_(mode) {
4984 set_comment("[ DeferredInlineBinaryOperation");
4985 }
4986
4987 virtual void Generate();
4988
4989 private:
4990 Token::Value op_;
4991 Register dst_;
4992 Register left_;
4993 Register right_;
4994 OverwriteMode mode_;
4995};
4996
4997
4998void DeferredInlineBinaryOperation::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00004999 GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB);
5000 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005001 if (!dst_.is(rax)) __ movq(dst_, rax);
5002}
5003
5004
5005void CodeGenerator::GenericBinaryOperation(Token::Value op,
Leon Clarkee46be812010-01-19 14:06:41 +00005006 StaticType* type,
Steve Blocka7e24c12009-10-30 11:49:00 +00005007 OverwriteMode overwrite_mode) {
5008 Comment cmnt(masm_, "[ BinaryOperation");
5009 Comment cmnt_token(masm_, Token::String(op));
5010
5011 if (op == Token::COMMA) {
5012 // Simply discard left value.
5013 frame_->Nip(1);
5014 return;
5015 }
5016
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005017 // Set the flags based on the operation, type and loop nesting level.
5018 GenericBinaryFlags flags;
5019 switch (op) {
5020 case Token::BIT_OR:
5021 case Token::BIT_AND:
5022 case Token::BIT_XOR:
5023 case Token::SHL:
5024 case Token::SHR:
5025 case Token::SAR:
5026 // Bit operations always assume they likely operate on Smis. Still only
5027 // generate the inline Smi check code if this operation is part of a loop.
5028 flags = (loop_nesting() > 0)
5029 ? NO_SMI_CODE_IN_STUB
5030 : NO_GENERIC_BINARY_FLAGS;
5031 break;
5032
5033 default:
5034 // By default only inline the Smi check code for likely smis if this
5035 // operation is part of a loop.
5036 flags = ((loop_nesting() > 0) && type->IsLikelySmi())
5037 ? NO_SMI_CODE_IN_STUB
5038 : NO_GENERIC_BINARY_FLAGS;
5039 break;
5040 }
5041
Steve Blocka7e24c12009-10-30 11:49:00 +00005042 Result right = frame_->Pop();
5043 Result left = frame_->Pop();
5044
5045 if (op == Token::ADD) {
5046 bool left_is_string = left.is_constant() && left.handle()->IsString();
5047 bool right_is_string = right.is_constant() && right.handle()->IsString();
5048 if (left_is_string || right_is_string) {
5049 frame_->Push(&left);
5050 frame_->Push(&right);
5051 Result answer;
5052 if (left_is_string) {
5053 if (right_is_string) {
5054 // TODO(lrn): if both are constant strings
5055 // -- do a compile time cons, if allocation during codegen is allowed.
5056 answer = frame_->CallRuntime(Runtime::kStringAdd, 2);
5057 } else {
5058 answer =
5059 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
5060 }
5061 } else if (right_is_string) {
5062 answer =
5063 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
5064 }
5065 frame_->Push(&answer);
5066 return;
5067 }
5068 // Neither operand is known to be a string.
5069 }
5070
5071 bool left_is_smi = left.is_constant() && left.handle()->IsSmi();
5072 bool left_is_non_smi = left.is_constant() && !left.handle()->IsSmi();
5073 bool right_is_smi = right.is_constant() && right.handle()->IsSmi();
5074 bool right_is_non_smi = right.is_constant() && !right.handle()->IsSmi();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005075 bool generate_no_smi_code = false; // No smi code at all, inline or in stub.
Steve Blocka7e24c12009-10-30 11:49:00 +00005076
5077 if (left_is_smi && right_is_smi) {
5078 // Compute the constant result at compile time, and leave it on the frame.
5079 int left_int = Smi::cast(*left.handle())->value();
5080 int right_int = Smi::cast(*right.handle())->value();
5081 if (FoldConstantSmis(op, left_int, right_int)) return;
5082 }
5083
5084 if (left_is_non_smi || right_is_non_smi) {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005085 // Set flag so that we go straight to the slow case, with no smi code.
5086 generate_no_smi_code = true;
5087 } else if (right_is_smi) {
5088 ConstantSmiBinaryOperation(op, &left, right.handle(),
5089 type, false, overwrite_mode);
5090 return;
5091 } else if (left_is_smi) {
5092 ConstantSmiBinaryOperation(op, &right, left.handle(),
5093 type, true, overwrite_mode);
5094 return;
5095 }
5096
5097 if ((flags & NO_SMI_CODE_IN_STUB) != 0 && !generate_no_smi_code) {
5098 LikelySmiBinaryOperation(op, &left, &right, overwrite_mode);
5099 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00005100 frame_->Push(&left);
5101 frame_->Push(&right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005102 // If we know the arguments aren't smis, use the binary operation stub
5103 // that does not check for the fast smi case.
5104 // The same stub is used for NO_SMI_CODE and SMI_CODE_INLINED.
5105 if (generate_no_smi_code) {
5106 flags = NO_SMI_CODE_IN_STUB;
Steve Blocka7e24c12009-10-30 11:49:00 +00005107 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005108 GenericBinaryOpStub stub(op, overwrite_mode, flags);
5109 Result answer = frame_->CallStub(&stub, 2);
5110 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00005111 }
5112}
5113
5114
5115// Emit a LoadIC call to get the value from receiver and leave it in
5116// dst. The receiver register is restored after the call.
5117class DeferredReferenceGetNamedValue: public DeferredCode {
5118 public:
5119 DeferredReferenceGetNamedValue(Register dst,
5120 Register receiver,
5121 Handle<String> name)
5122 : dst_(dst), receiver_(receiver), name_(name) {
5123 set_comment("[ DeferredReferenceGetNamedValue");
5124 }
5125
5126 virtual void Generate();
5127
5128 Label* patch_site() { return &patch_site_; }
5129
5130 private:
5131 Label patch_site_;
5132 Register dst_;
5133 Register receiver_;
5134 Handle<String> name_;
5135};
5136
5137
5138void DeferredReferenceGetNamedValue::Generate() {
5139 __ push(receiver_);
5140 __ Move(rcx, name_);
5141 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
5142 __ Call(ic, RelocInfo::CODE_TARGET);
5143 // The call must be followed by a test rax instruction to indicate
5144 // that the inobject property case was inlined.
5145 //
5146 // Store the delta to the map check instruction here in the test
5147 // instruction. Use masm_-> instead of the __ macro since the
5148 // latter can't return a value.
5149 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
5150 // Here we use masm_-> instead of the __ macro because this is the
5151 // instruction that gets patched and coverage code gets in the way.
5152 masm_->testl(rax, Immediate(-delta_to_patch_site));
5153 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
5154
5155 if (!dst_.is(rax)) __ movq(dst_, rax);
5156 __ pop(receiver_);
5157}
5158
5159
5160void DeferredInlineSmiAdd::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00005161 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
5162 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005163 if (!dst_.is(rax)) __ movq(dst_, rax);
5164}
5165
5166
5167void DeferredInlineSmiAddReversed::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00005168 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
5169 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005170 if (!dst_.is(rax)) __ movq(dst_, rax);
5171}
5172
5173
5174void DeferredInlineSmiSub::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00005175 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB);
5176 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005177 if (!dst_.is(rax)) __ movq(dst_, rax);
5178}
5179
5180
5181void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00005182 // For mod we don't generate all the Smi code inline.
5183 GenericBinaryOpStub stub(
5184 op_,
5185 overwrite_mode_,
Steve Blockd0582a62009-12-15 09:54:21 +00005186 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB);
5187 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005188 if (!dst_.is(rax)) __ movq(dst_, rax);
5189}
5190
5191
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005192void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
5193 Result* operand,
5194 Handle<Object> value,
5195 StaticType* type,
5196 bool reversed,
5197 OverwriteMode overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005198 // NOTE: This is an attempt to inline (a bit) more of the code for
5199 // some possible smi operations (like + and -) when (at least) one
5200 // of the operands is a constant smi.
5201 // Consumes the argument "operand".
5202
5203 // TODO(199): Optimize some special cases of operations involving a
5204 // smi literal (multiply by 2, shift by 0, etc.).
5205 if (IsUnsafeSmi(value)) {
5206 Result unsafe_operand(value);
5207 if (reversed) {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005208 LikelySmiBinaryOperation(op, &unsafe_operand, operand,
Steve Blocka7e24c12009-10-30 11:49:00 +00005209 overwrite_mode);
5210 } else {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005211 LikelySmiBinaryOperation(op, operand, &unsafe_operand,
Steve Blocka7e24c12009-10-30 11:49:00 +00005212 overwrite_mode);
5213 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005214 ASSERT(!operand->is_valid());
5215 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005216 }
5217
5218 // Get the literal value.
5219 Smi* smi_value = Smi::cast(*value);
5220 int int_value = smi_value->value();
5221
5222 switch (op) {
5223 case Token::ADD: {
5224 operand->ToRegister();
5225 frame_->Spill(operand->reg());
5226 DeferredCode* deferred = NULL;
5227 if (reversed) {
5228 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
5229 smi_value,
5230 overwrite_mode);
5231 } else {
5232 deferred = new DeferredInlineSmiAdd(operand->reg(),
5233 smi_value,
5234 overwrite_mode);
5235 }
5236 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5237 __ SmiAddConstant(operand->reg(),
5238 operand->reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00005239 smi_value,
Steve Blocka7e24c12009-10-30 11:49:00 +00005240 deferred->entry_label());
5241 deferred->BindExit();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005242 frame_->Push(operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005243 break;
5244 }
5245
5246 case Token::SUB: {
5247 if (reversed) {
5248 Result constant_operand(value);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005249 LikelySmiBinaryOperation(op, &constant_operand, operand,
5250 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005251 } else {
5252 operand->ToRegister();
5253 frame_->Spill(operand->reg());
5254 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(),
5255 smi_value,
5256 overwrite_mode);
5257 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5258 // A smi currently fits in a 32-bit Immediate.
5259 __ SmiSubConstant(operand->reg(),
5260 operand->reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00005261 smi_value,
Steve Blocka7e24c12009-10-30 11:49:00 +00005262 deferred->entry_label());
5263 deferred->BindExit();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005264 frame_->Push(operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005265 }
5266 break;
5267 }
5268
5269 case Token::SAR:
5270 if (reversed) {
5271 Result constant_operand(value);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005272 LikelySmiBinaryOperation(op, &constant_operand, operand,
5273 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005274 } else {
5275 // Only the least significant 5 bits of the shift value are used.
5276 // In the slow case, this masking is done inside the runtime call.
5277 int shift_value = int_value & 0x1f;
5278 operand->ToRegister();
5279 frame_->Spill(operand->reg());
5280 DeferredInlineSmiOperation* deferred =
5281 new DeferredInlineSmiOperation(op,
5282 operand->reg(),
5283 operand->reg(),
5284 smi_value,
5285 overwrite_mode);
5286 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5287 __ SmiShiftArithmeticRightConstant(operand->reg(),
5288 operand->reg(),
5289 shift_value);
5290 deferred->BindExit();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005291 frame_->Push(operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005292 }
5293 break;
5294
5295 case Token::SHR:
5296 if (reversed) {
5297 Result constant_operand(value);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005298 LikelySmiBinaryOperation(op, &constant_operand, operand,
5299 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005300 } else {
5301 // Only the least significant 5 bits of the shift value are used.
5302 // In the slow case, this masking is done inside the runtime call.
5303 int shift_value = int_value & 0x1f;
5304 operand->ToRegister();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005305 Result answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00005306 ASSERT(answer.is_valid());
5307 DeferredInlineSmiOperation* deferred =
5308 new DeferredInlineSmiOperation(op,
5309 answer.reg(),
5310 operand->reg(),
5311 smi_value,
5312 overwrite_mode);
5313 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5314 __ SmiShiftLogicalRightConstant(answer.reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00005315 operand->reg(),
5316 shift_value,
5317 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00005318 deferred->BindExit();
5319 operand->Unuse();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005320 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00005321 }
5322 break;
5323
5324 case Token::SHL:
5325 if (reversed) {
5326 Result constant_operand(value);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005327 LikelySmiBinaryOperation(op, &constant_operand, operand,
5328 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005329 } else {
5330 // Only the least significant 5 bits of the shift value are used.
5331 // In the slow case, this masking is done inside the runtime call.
5332 int shift_value = int_value & 0x1f;
5333 operand->ToRegister();
5334 if (shift_value == 0) {
5335 // Spill operand so it can be overwritten in the slow case.
5336 frame_->Spill(operand->reg());
5337 DeferredInlineSmiOperation* deferred =
5338 new DeferredInlineSmiOperation(op,
5339 operand->reg(),
5340 operand->reg(),
5341 smi_value,
5342 overwrite_mode);
5343 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5344 deferred->BindExit();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005345 frame_->Push(operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005346 } else {
5347 // Use a fresh temporary for nonzero shift values.
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005348 Result answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00005349 ASSERT(answer.is_valid());
5350 DeferredInlineSmiOperation* deferred =
5351 new DeferredInlineSmiOperation(op,
5352 answer.reg(),
5353 operand->reg(),
5354 smi_value,
5355 overwrite_mode);
5356 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5357 __ SmiShiftLeftConstant(answer.reg(),
5358 operand->reg(),
5359 shift_value,
5360 deferred->entry_label());
5361 deferred->BindExit();
5362 operand->Unuse();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005363 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00005364 }
5365 }
5366 break;
5367
5368 case Token::BIT_OR:
5369 case Token::BIT_XOR:
5370 case Token::BIT_AND: {
5371 operand->ToRegister();
5372 frame_->Spill(operand->reg());
5373 if (reversed) {
5374 // Bit operations with a constant smi are commutative.
5375 // We can swap left and right operands with no problem.
5376 // Swap left and right overwrite modes. 0->0, 1->2, 2->1.
5377 overwrite_mode = static_cast<OverwriteMode>((2 * overwrite_mode) % 3);
5378 }
5379 DeferredCode* deferred = new DeferredInlineSmiOperation(op,
5380 operand->reg(),
5381 operand->reg(),
5382 smi_value,
5383 overwrite_mode);
5384 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
5385 if (op == Token::BIT_AND) {
Steve Block3ce2e202009-11-05 08:53:23 +00005386 __ SmiAndConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005387 } else if (op == Token::BIT_XOR) {
5388 if (int_value != 0) {
Steve Block3ce2e202009-11-05 08:53:23 +00005389 __ SmiXorConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005390 }
5391 } else {
5392 ASSERT(op == Token::BIT_OR);
5393 if (int_value != 0) {
Steve Block3ce2e202009-11-05 08:53:23 +00005394 __ SmiOrConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005395 }
5396 }
5397 deferred->BindExit();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005398 frame_->Push(operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005399 break;
5400 }
5401
5402 // Generate inline code for mod of powers of 2 and negative powers of 2.
5403 case Token::MOD:
5404 if (!reversed &&
5405 int_value != 0 &&
5406 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
5407 operand->ToRegister();
5408 frame_->Spill(operand->reg());
Steve Block3ce2e202009-11-05 08:53:23 +00005409 DeferredCode* deferred =
5410 new DeferredInlineSmiOperation(op,
5411 operand->reg(),
5412 operand->reg(),
5413 smi_value,
5414 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005415 // Check for negative or non-Smi left hand side.
5416 __ JumpIfNotPositiveSmi(operand->reg(), deferred->entry_label());
5417 if (int_value < 0) int_value = -int_value;
5418 if (int_value == 1) {
Steve Block3ce2e202009-11-05 08:53:23 +00005419 __ Move(operand->reg(), Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00005420 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00005421 __ SmiAndConstant(operand->reg(),
5422 operand->reg(),
5423 Smi::FromInt(int_value - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00005424 }
5425 deferred->BindExit();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005426 frame_->Push(operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005427 break; // This break only applies if we generated code for MOD.
5428 }
5429 // Fall through if we did not find a power of 2 on the right hand side!
5430 // The next case must be the default.
5431
5432 default: {
5433 Result constant_operand(value);
5434 if (reversed) {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005435 LikelySmiBinaryOperation(op, &constant_operand, operand,
5436 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005437 } else {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005438 LikelySmiBinaryOperation(op, operand, &constant_operand,
5439 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00005440 }
5441 break;
5442 }
5443 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005444 ASSERT(!operand->is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00005445}
5446
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005447void CodeGenerator::LikelySmiBinaryOperation(Token::Value op,
5448 Result* left,
5449 Result* right,
5450 OverwriteMode overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005451 // Special handling of div and mod because they use fixed registers.
5452 if (op == Token::DIV || op == Token::MOD) {
5453 // We need rax as the quotient register, rdx as the remainder
5454 // register, neither left nor right in rax or rdx, and left copied
5455 // to rax.
5456 Result quotient;
5457 Result remainder;
5458 bool left_is_in_rax = false;
5459 // Step 1: get rax for quotient.
5460 if ((left->is_register() && left->reg().is(rax)) ||
5461 (right->is_register() && right->reg().is(rax))) {
5462 // One or both is in rax. Use a fresh non-rdx register for
5463 // them.
5464 Result fresh = allocator_->Allocate();
5465 ASSERT(fresh.is_valid());
5466 if (fresh.reg().is(rdx)) {
5467 remainder = fresh;
5468 fresh = allocator_->Allocate();
5469 ASSERT(fresh.is_valid());
5470 }
5471 if (left->is_register() && left->reg().is(rax)) {
5472 quotient = *left;
5473 *left = fresh;
5474 left_is_in_rax = true;
5475 }
5476 if (right->is_register() && right->reg().is(rax)) {
5477 quotient = *right;
5478 *right = fresh;
5479 }
5480 __ movq(fresh.reg(), rax);
5481 } else {
5482 // Neither left nor right is in rax.
5483 quotient = allocator_->Allocate(rax);
5484 }
5485 ASSERT(quotient.is_register() && quotient.reg().is(rax));
5486 ASSERT(!(left->is_register() && left->reg().is(rax)));
5487 ASSERT(!(right->is_register() && right->reg().is(rax)));
5488
5489 // Step 2: get rdx for remainder if necessary.
5490 if (!remainder.is_valid()) {
5491 if ((left->is_register() && left->reg().is(rdx)) ||
5492 (right->is_register() && right->reg().is(rdx))) {
5493 Result fresh = allocator_->Allocate();
5494 ASSERT(fresh.is_valid());
5495 if (left->is_register() && left->reg().is(rdx)) {
5496 remainder = *left;
5497 *left = fresh;
5498 }
5499 if (right->is_register() && right->reg().is(rdx)) {
5500 remainder = *right;
5501 *right = fresh;
5502 }
5503 __ movq(fresh.reg(), rdx);
5504 } else {
5505 // Neither left nor right is in rdx.
5506 remainder = allocator_->Allocate(rdx);
5507 }
5508 }
5509 ASSERT(remainder.is_register() && remainder.reg().is(rdx));
5510 ASSERT(!(left->is_register() && left->reg().is(rdx)));
5511 ASSERT(!(right->is_register() && right->reg().is(rdx)));
5512
5513 left->ToRegister();
5514 right->ToRegister();
5515 frame_->Spill(rax);
5516 frame_->Spill(rdx);
5517
5518 // Check that left and right are smi tagged.
5519 DeferredInlineBinaryOperation* deferred =
5520 new DeferredInlineBinaryOperation(op,
5521 (op == Token::DIV) ? rax : rdx,
5522 left->reg(),
5523 right->reg(),
5524 overwrite_mode);
5525 __ JumpIfNotBothSmi(left->reg(), right->reg(), deferred->entry_label());
5526
5527 if (op == Token::DIV) {
5528 __ SmiDiv(rax, left->reg(), right->reg(), deferred->entry_label());
5529 deferred->BindExit();
5530 left->Unuse();
5531 right->Unuse();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005532 frame_->Push(&quotient);
Steve Blocka7e24c12009-10-30 11:49:00 +00005533 } else {
5534 ASSERT(op == Token::MOD);
5535 __ SmiMod(rdx, left->reg(), right->reg(), deferred->entry_label());
5536 deferred->BindExit();
5537 left->Unuse();
5538 right->Unuse();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005539 frame_->Push(&remainder);
Steve Blocka7e24c12009-10-30 11:49:00 +00005540 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005541 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005542 }
5543
5544 // Special handling of shift operations because they use fixed
5545 // registers.
5546 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
5547 // Move left out of rcx if necessary.
5548 if (left->is_register() && left->reg().is(rcx)) {
5549 *left = allocator_->Allocate();
5550 ASSERT(left->is_valid());
5551 __ movq(left->reg(), rcx);
5552 }
5553 right->ToRegister(rcx);
5554 left->ToRegister();
5555 ASSERT(left->is_register() && !left->reg().is(rcx));
5556 ASSERT(right->is_register() && right->reg().is(rcx));
5557
5558 // We will modify right, it must be spilled.
5559 frame_->Spill(rcx);
5560
5561 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005562 Result answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00005563 ASSERT(answer.is_valid());
5564 // Check that both operands are smis using the answer register as a
5565 // temporary.
5566 DeferredInlineBinaryOperation* deferred =
5567 new DeferredInlineBinaryOperation(op,
5568 answer.reg(),
5569 left->reg(),
5570 rcx,
5571 overwrite_mode);
5572 __ movq(answer.reg(), left->reg());
5573 __ or_(answer.reg(), rcx);
5574 __ JumpIfNotSmi(answer.reg(), deferred->entry_label());
5575
5576 // Perform the operation.
5577 switch (op) {
5578 case Token::SAR:
5579 __ SmiShiftArithmeticRight(answer.reg(), left->reg(), rcx);
5580 break;
5581 case Token::SHR: {
5582 __ SmiShiftLogicalRight(answer.reg(),
5583 left->reg(),
5584 rcx,
5585 deferred->entry_label());
5586 break;
5587 }
5588 case Token::SHL: {
5589 __ SmiShiftLeft(answer.reg(),
5590 left->reg(),
5591 rcx,
5592 deferred->entry_label());
5593 break;
5594 }
5595 default:
5596 UNREACHABLE();
5597 }
5598 deferred->BindExit();
5599 left->Unuse();
5600 right->Unuse();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005601 frame_->Push(&answer);
5602 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005603 }
5604
5605 // Handle the other binary operations.
5606 left->ToRegister();
5607 right->ToRegister();
5608 // A newly allocated register answer is used to hold the answer. The
5609 // registers containing left and right are not modified so they don't
5610 // need to be spilled in the fast case.
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005611 Result answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00005612 ASSERT(answer.is_valid());
5613
5614 // Perform the smi tag check.
5615 DeferredInlineBinaryOperation* deferred =
5616 new DeferredInlineBinaryOperation(op,
5617 answer.reg(),
5618 left->reg(),
5619 right->reg(),
5620 overwrite_mode);
5621 __ JumpIfNotBothSmi(left->reg(), right->reg(), deferred->entry_label());
5622
5623 switch (op) {
5624 case Token::ADD:
5625 __ SmiAdd(answer.reg(),
5626 left->reg(),
5627 right->reg(),
5628 deferred->entry_label());
5629 break;
5630
5631 case Token::SUB:
5632 __ SmiSub(answer.reg(),
5633 left->reg(),
5634 right->reg(),
5635 deferred->entry_label());
5636 break;
5637
5638 case Token::MUL: {
5639 __ SmiMul(answer.reg(),
5640 left->reg(),
5641 right->reg(),
5642 deferred->entry_label());
5643 break;
5644 }
5645
5646 case Token::BIT_OR:
5647 __ SmiOr(answer.reg(), left->reg(), right->reg());
5648 break;
5649
5650 case Token::BIT_AND:
5651 __ SmiAnd(answer.reg(), left->reg(), right->reg());
5652 break;
5653
5654 case Token::BIT_XOR:
5655 __ SmiXor(answer.reg(), left->reg(), right->reg());
5656 break;
5657
5658 default:
5659 UNREACHABLE();
5660 break;
5661 }
5662 deferred->BindExit();
5663 left->Unuse();
5664 right->Unuse();
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005665 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00005666}
5667
5668
5669#undef __
5670#define __ ACCESS_MASM(masm)
5671
5672
5673Handle<String> Reference::GetName() {
5674 ASSERT(type_ == NAMED);
5675 Property* property = expression_->AsProperty();
5676 if (property == NULL) {
5677 // Global variable reference treated as a named property reference.
5678 VariableProxy* proxy = expression_->AsVariableProxy();
5679 ASSERT(proxy->AsVariable() != NULL);
5680 ASSERT(proxy->AsVariable()->is_global());
5681 return proxy->name();
5682 } else {
5683 Literal* raw_name = property->key()->AsLiteral();
5684 ASSERT(raw_name != NULL);
5685 return Handle<String>(String::cast(*raw_name->handle()));
5686 }
5687}
5688
5689
Steve Blockd0582a62009-12-15 09:54:21 +00005690void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00005691 ASSERT(!cgen_->in_spilled_code());
5692 ASSERT(cgen_->HasValidEntryRegisters());
5693 ASSERT(!is_illegal());
5694 MacroAssembler* masm = cgen_->masm();
5695
5696 // Record the source position for the property load.
5697 Property* property = expression_->AsProperty();
5698 if (property != NULL) {
5699 cgen_->CodeForSourcePosition(property->position());
5700 }
5701
5702 switch (type_) {
5703 case SLOT: {
5704 Comment cmnt(masm, "[ Load from Slot");
5705 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
5706 ASSERT(slot != NULL);
Steve Blockd0582a62009-12-15 09:54:21 +00005707 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00005708 break;
5709 }
5710
5711 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00005712 Variable* var = expression_->AsVariableProxy()->AsVariable();
5713 bool is_global = var != NULL;
5714 ASSERT(!is_global || var->is_global());
5715
5716 // Do not inline the inobject property case for loads from the global
5717 // object. Also do not inline for unoptimized code. This saves time
5718 // in the code generator. Unoptimized code is toplevel code or code
5719 // that is not in a loop.
5720 if (is_global ||
5721 cgen_->scope()->is_global_scope() ||
5722 cgen_->loop_nesting() == 0) {
5723 Comment cmnt(masm, "[ Load from named Property");
5724 cgen_->frame()->Push(GetName());
5725
5726 RelocInfo::Mode mode = is_global
5727 ? RelocInfo::CODE_TARGET_CONTEXT
5728 : RelocInfo::CODE_TARGET;
5729 Result answer = cgen_->frame()->CallLoadIC(mode);
5730 // A test rax instruction following the call signals that the
5731 // inobject property case was inlined. Ensure that there is not
5732 // a test rax instruction here.
5733 __ nop();
5734 cgen_->frame()->Push(&answer);
5735 } else {
5736 // Inline the inobject property case.
5737 Comment cmnt(masm, "[ Inlined named property load");
5738 Result receiver = cgen_->frame()->Pop();
5739 receiver.ToRegister();
5740 Result value = cgen_->allocator()->Allocate();
5741 ASSERT(value.is_valid());
5742 // Cannot use r12 for receiver, because that changes
5743 // the distance between a call and a fixup location,
5744 // due to a special encoding of r12 as r/m in a ModR/M byte.
5745 if (receiver.reg().is(r12)) {
5746 // Swap receiver and value.
5747 __ movq(value.reg(), receiver.reg());
5748 Result temp = receiver;
5749 receiver = value;
5750 value = temp;
5751 cgen_->frame()->Spill(value.reg()); // r12 may have been shared.
5752 }
5753
5754 DeferredReferenceGetNamedValue* deferred =
5755 new DeferredReferenceGetNamedValue(value.reg(),
5756 receiver.reg(),
5757 GetName());
5758
5759 // Check that the receiver is a heap object.
5760 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
5761
5762 __ bind(deferred->patch_site());
5763 // This is the map check instruction that will be patched (so we can't
5764 // use the double underscore macro that may insert instructions).
5765 // Initially use an invalid map to force a failure.
5766 masm->Move(kScratchRegister, Factory::null_value());
5767 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
5768 kScratchRegister);
5769 // This branch is always a forwards branch so it's always a fixed
5770 // size which allows the assert below to succeed and patching to work.
5771 // Don't use deferred->Branch(...), since that might add coverage code.
5772 masm->j(not_equal, deferred->entry_label());
5773
5774 // The delta from the patch label to the load offset must be
5775 // statically known.
5776 ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
5777 LoadIC::kOffsetToLoadInstruction);
5778 // The initial (invalid) offset has to be large enough to force
5779 // a 32-bit instruction encoding to allow patching with an
5780 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
5781 int offset = kMaxInt;
5782 masm->movq(value.reg(), FieldOperand(receiver.reg(), offset));
5783
5784 __ IncrementCounter(&Counters::named_load_inline, 1);
5785 deferred->BindExit();
5786 cgen_->frame()->Push(&receiver);
5787 cgen_->frame()->Push(&value);
5788 }
5789 break;
5790 }
5791
5792 case KEYED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00005793 Comment cmnt(masm, "[ Load from keyed Property");
5794 Variable* var = expression_->AsVariableProxy()->AsVariable();
5795 bool is_global = var != NULL;
5796 ASSERT(!is_global || var->is_global());
5797
Leon Clarkeeab96aa2010-01-27 16:31:12 +00005798 // Inline array load code if inside of a loop. We do not know
5799 // the receiver map yet, so we initially generate the code with
5800 // a check against an invalid map. In the inline cache code, we
5801 // patch the map check if appropriate.
5802 if (cgen_->loop_nesting() > 0) {
5803 Comment cmnt(masm, "[ Inlined load from keyed Property");
5804
5805 Result key = cgen_->frame()->Pop();
5806 Result receiver = cgen_->frame()->Pop();
5807 key.ToRegister();
5808 receiver.ToRegister();
5809
5810 // Use a fresh temporary to load the elements without destroying
5811 // the receiver which is needed for the deferred slow case.
5812 Result elements = cgen_->allocator()->Allocate();
5813 ASSERT(elements.is_valid());
5814
5815 // Use a fresh temporary for the index and later the loaded
5816 // value.
5817 Result index = cgen_->allocator()->Allocate();
5818 ASSERT(index.is_valid());
5819
5820 DeferredReferenceGetKeyedValue* deferred =
5821 new DeferredReferenceGetKeyedValue(index.reg(),
5822 receiver.reg(),
5823 key.reg(),
5824 is_global);
5825
5826 // Check that the receiver is not a smi (only needed if this
5827 // is not a load from the global context) and that it has the
5828 // expected map.
5829 if (!is_global) {
5830 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
5831 }
5832
5833 // Initially, use an invalid map. The map is patched in the IC
5834 // initialization code.
5835 __ bind(deferred->patch_site());
5836 // Use masm-> here instead of the double underscore macro since extra
5837 // coverage code can interfere with the patching.
5838 masm->movq(kScratchRegister, Factory::null_value(),
5839 RelocInfo::EMBEDDED_OBJECT);
5840 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
5841 kScratchRegister);
5842 deferred->Branch(not_equal);
5843
5844 // Check that the key is a non-negative smi.
5845 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
5846
5847 // Get the elements array from the receiver and check that it
5848 // is not a dictionary.
5849 __ movq(elements.reg(),
5850 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
5851 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
5852 Factory::fixed_array_map());
5853 deferred->Branch(not_equal);
5854
5855 // Shift the key to get the actual index value and check that
5856 // it is within bounds.
5857 __ SmiToInteger32(index.reg(), key.reg());
5858 __ cmpl(index.reg(),
5859 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
5860 deferred->Branch(above_equal);
5861
5862 // The index register holds the un-smi-tagged key. It has been
5863 // zero-extended to 64-bits, so it can be used directly as index in the
5864 // operand below.
5865 // Load and check that the result is not the hole. We could
5866 // reuse the index or elements register for the value.
5867 //
5868 // TODO(206): Consider whether it makes sense to try some
5869 // heuristic about which register to reuse. For example, if
5870 // one is rax, the we can reuse that one because the value
5871 // coming from the deferred code will be in rax.
5872 Result value = index;
5873 __ movq(value.reg(),
5874 Operand(elements.reg(),
5875 index.reg(),
5876 times_pointer_size,
5877 FixedArray::kHeaderSize - kHeapObjectTag));
5878 elements.Unuse();
5879 index.Unuse();
5880 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
5881 deferred->Branch(equal);
5882 __ IncrementCounter(&Counters::keyed_load_inline, 1);
5883
5884 deferred->BindExit();
5885 // Restore the receiver and key to the frame and push the
5886 // result on top of it.
5887 cgen_->frame()->Push(&receiver);
5888 cgen_->frame()->Push(&key);
5889 cgen_->frame()->Push(&value);
5890
5891 } else {
5892 Comment cmnt(masm, "[ Load from keyed Property");
5893 RelocInfo::Mode mode = is_global
5894 ? RelocInfo::CODE_TARGET_CONTEXT
5895 : RelocInfo::CODE_TARGET;
5896 Result answer = cgen_->frame()->CallKeyedLoadIC(mode);
5897 // Make sure that we do not have a test instruction after the
5898 // call. A test instruction after the call is used to
5899 // indicate that we have generated an inline version of the
5900 // keyed load. The explicit nop instruction is here because
5901 // the push that follows might be peep-hole optimized away.
5902 __ nop();
5903 cgen_->frame()->Push(&answer);
5904 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005905 break;
5906 }
5907
5908 default:
5909 UNREACHABLE();
5910 }
5911}
5912
5913
Steve Blockd0582a62009-12-15 09:54:21 +00005914void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00005915 // TODO(X64): This function is completely architecture independent. Move
5916 // it somewhere shared.
5917
5918 // For non-constant frame-allocated slots, we invalidate the value in the
5919 // slot. For all others, we fall back on GetValue.
5920 ASSERT(!cgen_->in_spilled_code());
5921 ASSERT(!is_illegal());
5922 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00005923 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005924 return;
5925 }
5926
5927 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
5928 ASSERT(slot != NULL);
5929 if (slot->type() == Slot::LOOKUP ||
5930 slot->type() == Slot::CONTEXT ||
5931 slot->var()->mode() == Variable::CONST ||
5932 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00005933 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005934 return;
5935 }
5936
5937 // Only non-constant, frame-allocated parameters and locals can reach
5938 // here. Be careful not to use the optimizations for arguments
5939 // object access since it may not have been initialized yet.
5940 ASSERT(!slot->is_arguments());
5941 if (slot->type() == Slot::PARAMETER) {
5942 cgen_->frame()->TakeParameterAt(slot->index());
5943 } else {
5944 ASSERT(slot->type() == Slot::LOCAL);
5945 cgen_->frame()->TakeLocalAt(slot->index());
5946 }
5947}
5948
5949
5950void Reference::SetValue(InitState init_state) {
5951 ASSERT(cgen_->HasValidEntryRegisters());
5952 ASSERT(!is_illegal());
5953 MacroAssembler* masm = cgen_->masm();
5954 switch (type_) {
5955 case SLOT: {
5956 Comment cmnt(masm, "[ Store to Slot");
5957 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
5958 ASSERT(slot != NULL);
5959 cgen_->StoreToSlot(slot, init_state);
5960 break;
5961 }
5962
5963 case NAMED: {
5964 Comment cmnt(masm, "[ Store to named Property");
5965 cgen_->frame()->Push(GetName());
5966 Result answer = cgen_->frame()->CallStoreIC();
5967 cgen_->frame()->Push(&answer);
5968 break;
5969 }
5970
5971 case KEYED: {
5972 Comment cmnt(masm, "[ Store to keyed Property");
5973
5974 // Generate inlined version of the keyed store if the code is in
5975 // a loop and the key is likely to be a smi.
5976 Property* property = expression()->AsProperty();
5977 ASSERT(property != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +00005978 StaticType* key_smi_analysis = property->key()->type();
Steve Blocka7e24c12009-10-30 11:49:00 +00005979
5980 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
5981 Comment cmnt(masm, "[ Inlined store to keyed Property");
5982
5983 // Get the receiver, key and value into registers.
5984 Result value = cgen_->frame()->Pop();
5985 Result key = cgen_->frame()->Pop();
5986 Result receiver = cgen_->frame()->Pop();
5987
5988 Result tmp = cgen_->allocator_->Allocate();
5989 ASSERT(tmp.is_valid());
5990
5991 // Determine whether the value is a constant before putting it
5992 // in a register.
5993 bool value_is_constant = value.is_constant();
5994
5995 // Make sure that value, key and receiver are in registers.
5996 value.ToRegister();
5997 key.ToRegister();
5998 receiver.ToRegister();
5999
6000 DeferredReferenceSetKeyedValue* deferred =
6001 new DeferredReferenceSetKeyedValue(value.reg(),
6002 key.reg(),
6003 receiver.reg());
6004
6005 // Check that the value is a smi if it is not a constant.
6006 // We can skip the write barrier for smis and constants.
6007 if (!value_is_constant) {
6008 __ JumpIfNotSmi(value.reg(), deferred->entry_label());
6009 }
6010
6011 // Check that the key is a non-negative smi.
6012 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00006013
6014 // Check that the receiver is not a smi.
6015 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
6016
6017 // Check that the receiver is a JSArray.
6018 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
6019 deferred->Branch(not_equal);
6020
6021 // Check that the key is within bounds. Both the key and the
Steve Block3ce2e202009-11-05 08:53:23 +00006022 // length of the JSArray are smis.
6023 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
6024 key.reg());
6025 deferred->Branch(less_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00006026
6027 // Get the elements array from the receiver and check that it
6028 // is a flat array (not a dictionary).
6029 __ movq(tmp.reg(),
6030 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6031 // Bind the deferred code patch site to be able to locate the
6032 // fixed array map comparison. When debugging, we patch this
6033 // comparison to always fail so that we will hit the IC call
6034 // in the deferred code which will allow the debugger to
6035 // break for fast case stores.
6036 __ bind(deferred->patch_site());
6037 // Avoid using __ to ensure the distance from patch_site
6038 // to the map address is always the same.
6039 masm->movq(kScratchRegister, Factory::fixed_array_map(),
6040 RelocInfo::EMBEDDED_OBJECT);
6041 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
6042 kScratchRegister);
6043 deferred->Branch(not_equal);
6044
6045 // Store the value.
6046 SmiIndex index =
6047 masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
6048 __ movq(Operand(tmp.reg(),
6049 index.reg,
6050 index.scale,
6051 FixedArray::kHeaderSize - kHeapObjectTag),
6052 value.reg());
6053 __ IncrementCounter(&Counters::keyed_store_inline, 1);
6054
6055 deferred->BindExit();
6056
6057 cgen_->frame()->Push(&receiver);
6058 cgen_->frame()->Push(&key);
6059 cgen_->frame()->Push(&value);
6060 } else {
6061 Result answer = cgen_->frame()->CallKeyedStoreIC();
6062 // Make sure that we do not have a test instruction after the
6063 // call. A test instruction after the call is used to
6064 // indicate that we have generated an inline version of the
6065 // keyed store.
6066 masm->nop();
6067 cgen_->frame()->Push(&answer);
6068 }
6069 break;
6070 }
6071
6072 default:
6073 UNREACHABLE();
6074 }
6075}
6076
6077
Leon Clarkee46be812010-01-19 14:06:41 +00006078void FastNewClosureStub::Generate(MacroAssembler* masm) {
6079 // Clone the boilerplate in new space. Set the context to the
6080 // current context in rsi.
6081 Label gc;
6082 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
6083
6084 // Get the boilerplate function from the stack.
6085 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
6086
6087 // Compute the function map in the current global context and set that
6088 // as the map of the allocated object.
6089 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6090 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
6091 __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
6092 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
6093
6094 // Clone the rest of the boilerplate fields. We don't have to update
6095 // the write barrier because the allocated object is in new space.
6096 for (int offset = kPointerSize;
6097 offset < JSFunction::kSize;
6098 offset += kPointerSize) {
6099 if (offset == JSFunction::kContextOffset) {
6100 __ movq(FieldOperand(rax, offset), rsi);
6101 } else {
6102 __ movq(rbx, FieldOperand(rdx, offset));
6103 __ movq(FieldOperand(rax, offset), rbx);
6104 }
6105 }
6106
6107 // Return and remove the on-stack parameter.
6108 __ ret(1 * kPointerSize);
6109
6110 // Create a new closure through the slower runtime call.
6111 __ bind(&gc);
6112 __ pop(rcx); // Temporarily remove return address.
6113 __ pop(rdx);
6114 __ push(rsi);
6115 __ push(rdx);
6116 __ push(rcx); // Restore return address.
6117 __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
6118}
6119
6120
6121void FastNewContextStub::Generate(MacroAssembler* masm) {
6122 // Try to allocate the context in new space.
6123 Label gc;
6124 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
6125 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
6126 rax, rbx, rcx, &gc, TAG_OBJECT);
6127
6128 // Get the function from the stack.
6129 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
6130
6131 // Setup the object header.
6132 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
6133 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
6134 __ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length));
6135
6136 // Setup the fixed slots.
6137 __ xor_(rbx, rbx); // Set to NULL.
6138 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
6139 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
6140 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
6141 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
6142
6143 // Copy the global object from the surrounding context.
6144 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6145 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
6146
6147 // Initialize the rest of the slots to undefined.
6148 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
6149 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
6150 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
6151 }
6152
6153 // Return and remove the on-stack parameter.
6154 __ movq(rsi, rax);
6155 __ ret(1 * kPointerSize);
6156
6157 // Need to collect. Call into runtime system.
6158 __ bind(&gc);
6159 __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
6160}
6161
6162
Steve Blocka7e24c12009-10-30 11:49:00 +00006163void ToBooleanStub::Generate(MacroAssembler* masm) {
6164 Label false_result, true_result, not_string;
6165 __ movq(rax, Operand(rsp, 1 * kPointerSize));
6166
6167 // 'null' => false.
6168 __ CompareRoot(rax, Heap::kNullValueRootIndex);
6169 __ j(equal, &false_result);
6170
6171 // Get the map and type of the heap object.
6172 // We don't use CmpObjectType because we manipulate the type field.
6173 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
6174 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
6175
6176 // Undetectable => false.
6177 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
6178 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
6179 __ j(not_zero, &false_result);
6180
6181 // JavaScript object => true.
6182 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
6183 __ j(above_equal, &true_result);
6184
6185 // String value => false iff empty.
6186 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
6187 __ j(above_equal, &not_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00006188 __ movl(rdx, FieldOperand(rax, String::kLengthOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00006189 __ testl(rdx, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00006190 __ j(zero, &false_result);
6191 __ jmp(&true_result);
6192
6193 __ bind(&not_string);
6194 // HeapNumber => false iff +0, -0, or NaN.
6195 // These three cases set C3 when compared to zero in the FPU.
6196 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
6197 __ j(not_equal, &true_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006198 __ fldz(); // Load zero onto fp stack
6199 // Load heap-number double value onto fp stack
6200 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00006201 __ FCmp();
6202 __ j(zero, &false_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006203 // Fall through to |true_result|.
6204
6205 // Return 1/0 for true/false in rax.
6206 __ bind(&true_result);
6207 __ movq(rax, Immediate(1));
6208 __ ret(1 * kPointerSize);
6209 __ bind(&false_result);
6210 __ xor_(rax, rax);
6211 __ ret(1 * kPointerSize);
6212}
6213
6214
6215bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006216 // TODO(X64): This method is identical to the ia32 version.
6217 // Either find a reason to change it, or move it somewhere where it can be
6218 // shared. (Notice: It assumes that a Smi can fit in an int).
6219
Steve Blocka7e24c12009-10-30 11:49:00 +00006220 Object* answer_object = Heap::undefined_value();
6221 switch (op) {
6222 case Token::ADD:
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006223 if (Smi::IsValid(left + right)) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006224 answer_object = Smi::FromInt(left + right);
6225 }
6226 break;
6227 case Token::SUB:
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006228 if (Smi::IsValid(left - right)) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006229 answer_object = Smi::FromInt(left - right);
6230 }
6231 break;
6232 case Token::MUL: {
6233 double answer = static_cast<double>(left) * right;
6234 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
6235 // If the product is zero and the non-zero factor is negative,
6236 // the spec requires us to return floating point negative zero.
6237 if (answer != 0 || (left + right) >= 0) {
6238 answer_object = Smi::FromInt(static_cast<int>(answer));
6239 }
6240 }
6241 }
6242 break;
6243 case Token::DIV:
6244 case Token::MOD:
6245 break;
6246 case Token::BIT_OR:
6247 answer_object = Smi::FromInt(left | right);
6248 break;
6249 case Token::BIT_AND:
6250 answer_object = Smi::FromInt(left & right);
6251 break;
6252 case Token::BIT_XOR:
6253 answer_object = Smi::FromInt(left ^ right);
6254 break;
6255
6256 case Token::SHL: {
6257 int shift_amount = right & 0x1F;
6258 if (Smi::IsValid(left << shift_amount)) {
6259 answer_object = Smi::FromInt(left << shift_amount);
6260 }
6261 break;
6262 }
6263 case Token::SHR: {
6264 int shift_amount = right & 0x1F;
6265 unsigned int unsigned_left = left;
6266 unsigned_left >>= shift_amount;
6267 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
6268 answer_object = Smi::FromInt(unsigned_left);
6269 }
6270 break;
6271 }
6272 case Token::SAR: {
6273 int shift_amount = right & 0x1F;
6274 unsigned int unsigned_left = left;
6275 if (left < 0) {
6276 // Perform arithmetic shift of a negative number by
6277 // complementing number, logical shifting, complementing again.
6278 unsigned_left = ~unsigned_left;
6279 unsigned_left >>= shift_amount;
6280 unsigned_left = ~unsigned_left;
6281 } else {
6282 unsigned_left >>= shift_amount;
6283 }
6284 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
6285 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
6286 break;
6287 }
6288 default:
6289 UNREACHABLE();
6290 break;
6291 }
6292 if (answer_object == Heap::undefined_value()) {
6293 return false;
6294 }
6295 frame_->Push(Handle<Object>(answer_object));
6296 return true;
6297}
6298
6299
6300// End of CodeGenerator implementation.
6301
Leon Clarkee46be812010-01-19 14:06:41 +00006302void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006303 ASSERT(op_ == Token::SUB);
Leon Clarkee46be812010-01-19 14:06:41 +00006304
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006305 Label slow;
6306 Label done;
6307 Label try_float;
6308 // Check whether the value is a smi.
6309 __ JumpIfNotSmi(rax, &try_float);
Steve Blocka7e24c12009-10-30 11:49:00 +00006310
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006311 // Enter runtime system if the value of the smi is zero
6312 // to make sure that we switch between 0 and -0.
6313 // Also enter it if the value of the smi is Smi::kMinValue.
6314 __ SmiNeg(rax, rax, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00006315
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006316 // Either zero or Smi::kMinValue, neither of which become a smi when negated.
6317 __ SmiCompare(rax, Smi::FromInt(0));
6318 __ j(not_equal, &slow);
6319 __ Move(rax, Factory::minus_zero_value());
6320 __ jmp(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00006321
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006322 // Enter runtime system.
Steve Blocka7e24c12009-10-30 11:49:00 +00006323 __ bind(&slow);
6324 __ pop(rcx); // pop return address
6325 __ push(rax);
6326 __ push(rcx); // push return address
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006327 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
6328 __ jmp(&done);
6329
6330 // Try floating point case.
6331 __ bind(&try_float);
6332 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
6333 __ Cmp(rdx, Factory::heap_number_map());
6334 __ j(not_equal, &slow);
6335 // Operand is a float, negate its value by flipping sign bit.
6336 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
6337 __ movq(kScratchRegister, Immediate(0x01));
6338 __ shl(kScratchRegister, Immediate(63));
6339 __ xor_(rdx, kScratchRegister); // Flip sign.
6340 // rdx is value to store.
6341 if (overwrite_) {
6342 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
6343 } else {
6344 __ AllocateHeapNumber(rcx, rbx, &slow);
6345 // rcx: allocated 'empty' number
6346 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
6347 __ movq(rax, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00006348 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00006349
6350 __ bind(&done);
6351 __ StubReturn(1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006352}
6353
6354
6355void CompareStub::Generate(MacroAssembler* masm) {
6356 Label call_builtin, done;
6357
6358 // NOTICE! This code is only reached after a smi-fast-case check, so
6359 // it is certain that at least one operand isn't a smi.
6360
6361 if (cc_ == equal) { // Both strict and non-strict.
6362 Label slow; // Fallthrough label.
6363 // Equality is almost reflexive (everything but NaN), so start by testing
6364 // for "identity and not NaN".
6365 {
6366 Label not_identical;
6367 __ cmpq(rax, rdx);
6368 __ j(not_equal, &not_identical);
6369 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
6370 // so we do the second best thing - test it ourselves.
6371
Leon Clarkee46be812010-01-19 14:06:41 +00006372 if (never_nan_nan_) {
6373 __ xor_(rax, rax);
6374 __ ret(0);
6375 } else {
6376 Label return_equal;
6377 Label heap_number;
6378 // If it's not a heap number, then return equal.
6379 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
6380 Factory::heap_number_map());
6381 __ j(equal, &heap_number);
6382 __ bind(&return_equal);
6383 __ xor_(rax, rax);
6384 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00006385
Leon Clarkee46be812010-01-19 14:06:41 +00006386 __ bind(&heap_number);
6387 // It is a heap number, so return non-equal if it's NaN and equal if
6388 // it's not NaN.
6389 // The representation of NaN values has all exponent bits (52..62) set,
6390 // and not all mantissa bits (0..51) clear.
6391 // We only allow QNaNs, which have bit 51 set (which also rules out
6392 // the value being Infinity).
Steve Blockd0582a62009-12-15 09:54:21 +00006393
Leon Clarkee46be812010-01-19 14:06:41 +00006394 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
6395 // all bits in the mask are set. We only need to check the word
6396 // that contains the exponent and high bit of the mantissa.
6397 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u);
6398 __ movl(rdx, FieldOperand(rdx, HeapNumber::kExponentOffset));
6399 __ xorl(rax, rax);
6400 __ addl(rdx, rdx); // Shift value and mask so mask applies to top bits.
6401 __ cmpl(rdx, Immediate(kQuietNaNHighBitsMask << 1));
6402 __ setcc(above_equal, rax);
6403 __ ret(0);
6404 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006405
6406 __ bind(&not_identical);
6407 }
6408
6409 // If we're doing a strict equality comparison, we don't have to do
6410 // type conversion, so we generate code to do fast comparison for objects
6411 // and oddballs. Non-smi numbers and strings still go through the usual
6412 // slow-case code.
6413 if (strict_) {
6414 // If either is a Smi (we know that not both are), then they can only
6415 // be equal if the other is a HeapNumber. If so, use the slow case.
6416 {
6417 Label not_smis;
6418 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
6419
6420 // Check if the non-smi operand is a heap number.
6421 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
6422 Factory::heap_number_map());
6423 // If heap number, handle it in the slow case.
6424 __ j(equal, &slow);
6425 // Return non-equal. ebx (the lower half of rbx) is not zero.
6426 __ movq(rax, rbx);
6427 __ ret(0);
6428
6429 __ bind(&not_smis);
6430 }
6431
6432 // If either operand is a JSObject or an oddball value, then they are not
6433 // equal since their pointers are different
6434 // There is no test for undetectability in strict equality.
6435
6436 // If the first object is a JS object, we have done pointer comparison.
6437 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
6438 Label first_non_object;
6439 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
6440 __ j(below, &first_non_object);
6441 // Return non-zero (eax (not rax) is not zero)
6442 Label return_not_equal;
6443 ASSERT(kHeapObjectTag != 0);
6444 __ bind(&return_not_equal);
6445 __ ret(0);
6446
6447 __ bind(&first_non_object);
6448 // Check for oddballs: true, false, null, undefined.
6449 __ CmpInstanceType(rcx, ODDBALL_TYPE);
6450 __ j(equal, &return_not_equal);
6451
6452 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
6453 __ j(above_equal, &return_not_equal);
6454
6455 // Check for oddballs: true, false, null, undefined.
6456 __ CmpInstanceType(rcx, ODDBALL_TYPE);
6457 __ j(equal, &return_not_equal);
6458
6459 // Fall through to the general case.
6460 }
6461 __ bind(&slow);
6462 }
6463
6464 // Push arguments below the return address to prepare jump to builtin.
6465 __ pop(rcx);
6466 __ push(rax);
6467 __ push(rdx);
6468 __ push(rcx);
6469
6470 // Inlined floating point compare.
6471 // Call builtin if operands are not floating point or smi.
6472 Label check_for_symbols;
6473 // Push arguments on stack, for helper functions.
Steve Block3ce2e202009-11-05 08:53:23 +00006474 FloatingPointHelper::CheckNumberOperands(masm, &check_for_symbols);
Steve Blocka7e24c12009-10-30 11:49:00 +00006475 FloatingPointHelper::LoadFloatOperands(masm, rax, rdx);
6476 __ FCmp();
6477
6478 // Jump to builtin for NaN.
6479 __ j(parity_even, &call_builtin);
6480
6481 // TODO(1243847): Use cmov below once CpuFeatures are properly hooked up.
6482 Label below_lbl, above_lbl;
6483 // use rdx, rax to convert unsigned to signed comparison
6484 __ j(below, &below_lbl);
6485 __ j(above, &above_lbl);
6486
6487 __ xor_(rax, rax); // equal
6488 __ ret(2 * kPointerSize);
6489
6490 __ bind(&below_lbl);
6491 __ movq(rax, Immediate(-1));
6492 __ ret(2 * kPointerSize);
6493
6494 __ bind(&above_lbl);
6495 __ movq(rax, Immediate(1));
6496 __ ret(2 * kPointerSize); // rax, rdx were pushed
6497
6498 // Fast negative check for symbol-to-symbol equality.
6499 __ bind(&check_for_symbols);
Leon Clarkee46be812010-01-19 14:06:41 +00006500 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +00006501 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00006502 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
6503 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00006504
6505 // We've already checked for object identity, so if both operands
6506 // are symbols they aren't equal. Register eax (not rax) already holds a
6507 // non-zero value, which indicates not equal, so just return.
6508 __ ret(2 * kPointerSize);
6509 }
6510
Leon Clarkee46be812010-01-19 14:06:41 +00006511 __ bind(&check_for_strings);
6512
6513 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &call_builtin);
6514
6515 // Inline comparison of ascii strings.
6516 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
6517 rdx,
6518 rax,
6519 rcx,
6520 rbx,
6521 rdi,
6522 r8);
6523
6524#ifdef DEBUG
6525 __ Abort("Unexpected fall-through from string comparison");
6526#endif
6527
Steve Blocka7e24c12009-10-30 11:49:00 +00006528 __ bind(&call_builtin);
6529 // must swap argument order
6530 __ pop(rcx);
6531 __ pop(rdx);
6532 __ pop(rax);
6533 __ push(rdx);
6534 __ push(rax);
6535
6536 // Figure out which native to call and setup the arguments.
6537 Builtins::JavaScript builtin;
6538 if (cc_ == equal) {
6539 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
6540 } else {
6541 builtin = Builtins::COMPARE;
6542 int ncr; // NaN compare result
6543 if (cc_ == less || cc_ == less_equal) {
6544 ncr = GREATER;
6545 } else {
6546 ASSERT(cc_ == greater || cc_ == greater_equal); // remaining cases
6547 ncr = LESS;
6548 }
Steve Block3ce2e202009-11-05 08:53:23 +00006549 __ Push(Smi::FromInt(ncr));
Steve Blocka7e24c12009-10-30 11:49:00 +00006550 }
6551
6552 // Restore return address on the stack.
6553 __ push(rcx);
6554
6555 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
6556 // tagged as a small integer.
6557 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
6558}
6559
6560
6561void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
6562 Label* label,
6563 Register object,
6564 Register scratch) {
6565 __ JumpIfSmi(object, label);
6566 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
6567 __ movzxbq(scratch,
6568 FieldOperand(scratch, Map::kInstanceTypeOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00006569 // Ensure that no non-strings have the symbol bit set.
6570 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
6571 ASSERT(kSymbolTag != 0);
6572 __ testb(scratch, Immediate(kIsSymbolMask));
6573 __ j(zero, label);
Steve Blocka7e24c12009-10-30 11:49:00 +00006574}
6575
6576
6577// Call the function just below TOS on the stack with the given
6578// arguments. The receiver is the TOS.
6579void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00006580 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00006581 int position) {
6582 // Push the arguments ("left-to-right") on the stack.
6583 int arg_count = args->length();
6584 for (int i = 0; i < arg_count; i++) {
6585 Load(args->at(i));
6586 }
6587
6588 // Record the position for debugging purposes.
6589 CodeForSourcePosition(position);
6590
6591 // Use the shared code stub to call the function.
6592 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006593 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00006594 Result answer = frame_->CallStub(&call_function, arg_count + 1);
6595 // Restore context and replace function on the stack with the
6596 // result of the stub invocation.
6597 frame_->RestoreContextRegister();
6598 frame_->SetElementAt(0, &answer);
6599}
6600
6601
6602void InstanceofStub::Generate(MacroAssembler* masm) {
6603 // Implements "value instanceof function" operator.
6604 // Expected input state:
6605 // rsp[0] : return address
6606 // rsp[1] : function pointer
6607 // rsp[2] : value
6608
6609 // Get the object - go slow case if it's a smi.
6610 Label slow;
6611 __ movq(rax, Operand(rsp, 2 * kPointerSize));
6612 __ JumpIfSmi(rax, &slow);
6613
6614 // Check that the left hand is a JS object. Leave its map in rax.
6615 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
6616 __ j(below, &slow);
6617 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
6618 __ j(above, &slow);
6619
6620 // Get the prototype of the function.
6621 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
6622 __ TryGetFunctionPrototype(rdx, rbx, &slow);
6623
6624 // Check that the function prototype is a JS object.
6625 __ JumpIfSmi(rbx, &slow);
6626 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
6627 __ j(below, &slow);
6628 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
6629 __ j(above, &slow);
6630
6631 // Register mapping: rax is object map and rbx is function prototype.
6632 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
6633
6634 // Loop through the prototype chain looking for the function prototype.
6635 Label loop, is_instance, is_not_instance;
6636 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
6637 __ bind(&loop);
6638 __ cmpq(rcx, rbx);
6639 __ j(equal, &is_instance);
6640 __ cmpq(rcx, kScratchRegister);
6641 __ j(equal, &is_not_instance);
6642 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
6643 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
6644 __ jmp(&loop);
6645
6646 __ bind(&is_instance);
Steve Blockd0582a62009-12-15 09:54:21 +00006647 __ xorl(rax, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00006648 __ ret(2 * kPointerSize);
6649
6650 __ bind(&is_not_instance);
Steve Blockd0582a62009-12-15 09:54:21 +00006651 __ movl(rax, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00006652 __ ret(2 * kPointerSize);
6653
6654 // Slow-case: Go through the JavaScript implementation.
6655 __ bind(&slow);
6656 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
6657}
6658
6659
6660void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
6661 // The displacement is used for skipping the return address and the
6662 // frame pointer on the stack. It is the offset of the last
6663 // parameter (if any) relative to the frame pointer.
6664 static const int kDisplacement = 2 * kPointerSize;
6665
6666 // Check if the calling frame is an arguments adaptor frame.
6667 Label runtime;
6668 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00006669 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
6670 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00006671 __ j(not_equal, &runtime);
6672 // Value in rcx is Smi encoded.
6673
6674 // Patch the arguments.length and the parameters pointer.
6675 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
6676 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
6677 SmiIndex index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2);
6678 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement));
6679 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
6680
6681 // Do the runtime call to allocate the arguments object.
6682 __ bind(&runtime);
6683 Runtime::Function* f = Runtime::FunctionForId(Runtime::kNewArgumentsFast);
6684 __ TailCallRuntime(ExternalReference(f), 3, f->result_size);
6685}
6686
6687
6688void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
6689 // The key is in rdx and the parameter count is in rax.
6690
6691 // The displacement is used for skipping the frame pointer on the
6692 // stack. It is the offset of the last parameter (if any) relative
6693 // to the frame pointer.
6694 static const int kDisplacement = 1 * kPointerSize;
6695
6696 // Check that the key is a smi.
6697 Label slow;
6698 __ JumpIfNotSmi(rdx, &slow);
6699
6700 // Check if the calling frame is an arguments adaptor frame.
6701 Label adaptor;
6702 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00006703 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
6704 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00006705 __ j(equal, &adaptor);
6706
6707 // Check index against formal parameters count limit passed in
6708 // through register rax. Use unsigned comparison to get negative
6709 // check for free.
6710 __ cmpq(rdx, rax);
6711 __ j(above_equal, &slow);
6712
6713 // Read the argument from the stack and return it.
6714 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
6715 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
6716 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
6717 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
6718 __ Ret();
6719
6720 // Arguments adaptor case: Check index against actual arguments
6721 // limit found in the arguments adaptor frame. Use unsigned
6722 // comparison to get negative check for free.
6723 __ bind(&adaptor);
6724 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
6725 __ cmpq(rdx, rcx);
6726 __ j(above_equal, &slow);
6727
6728 // Read the argument from the stack and return it.
6729 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
6730 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
6731 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
6732 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
6733 __ Ret();
6734
6735 // Slow-case: Handle non-smi or out-of-bounds access to arguments
6736 // by calling the runtime system.
6737 __ bind(&slow);
6738 __ pop(rbx); // Return address.
6739 __ push(rdx);
6740 __ push(rbx);
6741 Runtime::Function* f =
6742 Runtime::FunctionForId(Runtime::kGetArgumentsProperty);
6743 __ TailCallRuntime(ExternalReference(f), 1, f->result_size);
6744}
6745
6746
6747void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) {
6748 // Check if the calling frame is an arguments adaptor frame.
6749 Label adaptor;
6750 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00006751 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
6752 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00006753
6754 // Arguments adaptor case: Read the arguments length from the
6755 // adaptor frame and return it.
Leon Clarkee46be812010-01-19 14:06:41 +00006756 // Otherwise nothing to do: The number of formal parameters has already been
6757 // passed in register eax by calling function. Just return it.
6758 __ cmovq(equal, rax,
6759 Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006760 __ ret(0);
6761}
6762
6763
6764int CEntryStub::MinorKey() {
6765 ASSERT(result_size_ <= 2);
6766#ifdef _WIN64
6767 // Simple results returned in rax (using default code).
6768 // Complex results must be written to address passed as first argument.
6769 // Use even numbers for minor keys, reserving the odd numbers for
6770 // CEntryDebugBreakStub.
6771 return (result_size_ < 2) ? 0 : result_size_ * 2;
6772#else
6773 // Single results returned in rax (both AMD64 and Win64 calling conventions)
6774 // and a struct of two pointers in rax+rdx (AMD64 calling convention only)
6775 // by default.
6776 return 0;
6777#endif
6778}
6779
6780
6781void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
6782 // Check that stack should contain next handler, frame pointer, state and
6783 // return address in that order.
6784 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
6785 StackHandlerConstants::kStateOffset);
6786 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize,
6787 StackHandlerConstants::kPCOffset);
6788
6789 ExternalReference handler_address(Top::k_handler_address);
6790 __ movq(kScratchRegister, handler_address);
6791 __ movq(rsp, Operand(kScratchRegister, 0));
6792 // get next in chain
6793 __ pop(rcx);
6794 __ movq(Operand(kScratchRegister, 0), rcx);
6795 __ pop(rbp); // pop frame pointer
6796 __ pop(rdx); // remove state
6797
6798 // Before returning we restore the context from the frame pointer if not NULL.
6799 // The frame pointer is NULL in the exception handler of a JS entry frame.
6800 __ xor_(rsi, rsi); // tentatively set context pointer to NULL
6801 Label skip;
6802 __ cmpq(rbp, Immediate(0));
6803 __ j(equal, &skip);
6804 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
6805 __ bind(&skip);
6806 __ ret(0);
6807}
6808
6809
6810void CEntryStub::GenerateCore(MacroAssembler* masm,
6811 Label* throw_normal_exception,
6812 Label* throw_termination_exception,
6813 Label* throw_out_of_memory_exception,
Steve Blockd0582a62009-12-15 09:54:21 +00006814 ExitFrame::Mode mode,
Steve Blocka7e24c12009-10-30 11:49:00 +00006815 bool do_gc,
6816 bool always_allocate_scope) {
6817 // rax: result parameter for PerformGC, if any.
6818 // rbx: pointer to C function (C callee-saved).
6819 // rbp: frame pointer (restored after C call).
6820 // rsp: stack pointer (restored after C call).
6821 // r14: number of arguments including receiver (C callee-saved).
6822 // r15: pointer to the first argument (C callee-saved).
6823 // This pointer is reused in LeaveExitFrame(), so it is stored in a
6824 // callee-saved register.
6825
6826 if (do_gc) {
6827 // Pass failure code returned from last attempt as first argument to GC.
6828#ifdef _WIN64
6829 __ movq(rcx, rax);
6830#else // ! defined(_WIN64)
6831 __ movq(rdi, rax);
6832#endif
6833 __ movq(kScratchRegister,
6834 FUNCTION_ADDR(Runtime::PerformGC),
6835 RelocInfo::RUNTIME_ENTRY);
6836 __ call(kScratchRegister);
6837 }
6838
6839 ExternalReference scope_depth =
6840 ExternalReference::heap_always_allocate_scope_depth();
6841 if (always_allocate_scope) {
6842 __ movq(kScratchRegister, scope_depth);
6843 __ incl(Operand(kScratchRegister, 0));
6844 }
6845
6846 // Call C function.
6847#ifdef _WIN64
6848 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
6849 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
6850 __ movq(Operand(rsp, 4 * kPointerSize), r14); // argc.
6851 __ movq(Operand(rsp, 5 * kPointerSize), r15); // argv.
6852 if (result_size_ < 2) {
6853 // Pass a pointer to the Arguments object as the first argument.
6854 // Return result in single register (rax).
6855 __ lea(rcx, Operand(rsp, 4 * kPointerSize));
6856 } else {
6857 ASSERT_EQ(2, result_size_);
6858 // Pass a pointer to the result location as the first argument.
6859 __ lea(rcx, Operand(rsp, 6 * kPointerSize));
6860 // Pass a pointer to the Arguments object as the second argument.
6861 __ lea(rdx, Operand(rsp, 4 * kPointerSize));
6862 }
6863
6864#else // ! defined(_WIN64)
6865 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
6866 __ movq(rdi, r14); // argc.
6867 __ movq(rsi, r15); // argv.
6868#endif
6869 __ call(rbx);
6870 // Result is in rax - do not destroy this register!
6871
6872 if (always_allocate_scope) {
6873 __ movq(kScratchRegister, scope_depth);
6874 __ decl(Operand(kScratchRegister, 0));
6875 }
6876
6877 // Check for failure result.
6878 Label failure_returned;
6879 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00006880#ifdef _WIN64
6881 // If return value is on the stack, pop it to registers.
6882 if (result_size_ > 1) {
6883 ASSERT_EQ(2, result_size_);
Steve Blockd0582a62009-12-15 09:54:21 +00006884 // Read result values stored on stack. Result is stored
6885 // above the four argument mirror slots and the two
6886 // Arguments object slots.
Steve Block3ce2e202009-11-05 08:53:23 +00006887 __ movq(rax, Operand(rsp, 6 * kPointerSize));
6888 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
6889 }
6890#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006891 __ lea(rcx, Operand(rax, 1));
6892 // Lower 2 bits of rcx are 0 iff rax has failure tag.
6893 __ testl(rcx, Immediate(kFailureTagMask));
6894 __ j(zero, &failure_returned);
6895
6896 // Exit the JavaScript to C++ exit frame.
Steve Blockd0582a62009-12-15 09:54:21 +00006897 __ LeaveExitFrame(mode, result_size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006898 __ ret(0);
6899
6900 // Handling of failure.
6901 __ bind(&failure_returned);
6902
6903 Label retry;
6904 // If the returned exception is RETRY_AFTER_GC continue at retry label
6905 ASSERT(Failure::RETRY_AFTER_GC == 0);
6906 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
6907 __ j(zero, &retry);
6908
6909 // Special handling of out of memory exceptions.
6910 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
6911 __ cmpq(rax, kScratchRegister);
6912 __ j(equal, throw_out_of_memory_exception);
6913
6914 // Retrieve the pending exception and clear the variable.
6915 ExternalReference pending_exception_address(Top::k_pending_exception_address);
6916 __ movq(kScratchRegister, pending_exception_address);
6917 __ movq(rax, Operand(kScratchRegister, 0));
6918 __ movq(rdx, ExternalReference::the_hole_value_location());
6919 __ movq(rdx, Operand(rdx, 0));
6920 __ movq(Operand(kScratchRegister, 0), rdx);
6921
6922 // Special handling of termination exceptions which are uncatchable
6923 // by javascript code.
6924 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
6925 __ j(equal, throw_termination_exception);
6926
6927 // Handle normal exception.
6928 __ jmp(throw_normal_exception);
6929
6930 // Retry.
6931 __ bind(&retry);
6932}
6933
6934
6935void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
6936 UncatchableExceptionType type) {
6937 // Fetch top stack handler.
6938 ExternalReference handler_address(Top::k_handler_address);
6939 __ movq(kScratchRegister, handler_address);
6940 __ movq(rsp, Operand(kScratchRegister, 0));
6941
6942 // Unwind the handlers until the ENTRY handler is found.
6943 Label loop, done;
6944 __ bind(&loop);
6945 // Load the type of the current stack handler.
6946 const int kStateOffset = StackHandlerConstants::kStateOffset;
6947 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
6948 __ j(equal, &done);
6949 // Fetch the next handler in the list.
6950 const int kNextOffset = StackHandlerConstants::kNextOffset;
6951 __ movq(rsp, Operand(rsp, kNextOffset));
6952 __ jmp(&loop);
6953 __ bind(&done);
6954
6955 // Set the top handler address to next handler past the current ENTRY handler.
6956 __ movq(kScratchRegister, handler_address);
6957 __ pop(Operand(kScratchRegister, 0));
6958
6959 if (type == OUT_OF_MEMORY) {
6960 // Set external caught exception to false.
6961 ExternalReference external_caught(Top::k_external_caught_exception_address);
6962 __ movq(rax, Immediate(false));
6963 __ store_rax(external_caught);
6964
6965 // Set pending exception and rax to out of memory exception.
6966 ExternalReference pending_exception(Top::k_pending_exception_address);
6967 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
6968 __ store_rax(pending_exception);
6969 }
6970
6971 // Clear the context pointer.
6972 __ xor_(rsi, rsi);
6973
6974 // Restore registers from handler.
6975 ASSERT_EQ(StackHandlerConstants::kNextOffset + kPointerSize,
6976 StackHandlerConstants::kFPOffset);
6977 __ pop(rbp); // FP
6978 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
6979 StackHandlerConstants::kStateOffset);
6980 __ pop(rdx); // State
6981
6982 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize,
6983 StackHandlerConstants::kPCOffset);
6984 __ ret(0);
6985}
6986
6987
6988void CallFunctionStub::Generate(MacroAssembler* masm) {
6989 Label slow;
6990
Leon Clarkee46be812010-01-19 14:06:41 +00006991 // If the receiver might be a value (string, number or boolean) check for this
6992 // and box it if it is.
6993 if (ReceiverMightBeValue()) {
6994 // Get the receiver from the stack.
6995 // +1 ~ return address
6996 Label receiver_is_value, receiver_is_js_object;
6997 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
6998
6999 // Check if receiver is a smi (which is a number value).
7000 __ JumpIfSmi(rax, &receiver_is_value);
7001
7002 // Check if the receiver is a valid JS object.
7003 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
7004 __ j(above_equal, &receiver_is_js_object);
7005
7006 // Call the runtime to box the value.
7007 __ bind(&receiver_is_value);
7008 __ EnterInternalFrame();
7009 __ push(rax);
7010 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
7011 __ LeaveInternalFrame();
7012 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
7013
7014 __ bind(&receiver_is_js_object);
7015 }
7016
Steve Blocka7e24c12009-10-30 11:49:00 +00007017 // Get the function to call from the stack.
7018 // +2 ~ receiver, return address
7019 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
7020
7021 // Check that the function really is a JavaScript function.
7022 __ JumpIfSmi(rdi, &slow);
7023 // Goto slow case if we do not have a function.
7024 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
7025 __ j(not_equal, &slow);
7026
7027 // Fast-case: Just invoke the function.
7028 ParameterCount actual(argc_);
7029 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
7030
7031 // Slow-case: Non-function called.
7032 __ bind(&slow);
7033 __ Set(rax, argc_);
7034 __ Set(rbx, 0);
7035 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
7036 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
7037 __ Jump(adaptor, RelocInfo::CODE_TARGET);
7038}
7039
7040
7041void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
7042 // rax: number of arguments including receiver
7043 // rbx: pointer to C function (C callee-saved)
7044 // rbp: frame pointer of calling JS frame (restored after C call)
7045 // rsp: stack pointer (restored after C call)
7046 // rsi: current context (restored)
7047
7048 // NOTE: Invocations of builtins may return failure objects
7049 // instead of a proper result. The builtin entry handles
7050 // this by performing a garbage collection and retrying the
7051 // builtin once.
7052
Steve Blockd0582a62009-12-15 09:54:21 +00007053 ExitFrame::Mode mode = is_debug_break ?
7054 ExitFrame::MODE_DEBUG :
7055 ExitFrame::MODE_NORMAL;
Steve Blocka7e24c12009-10-30 11:49:00 +00007056
7057 // Enter the exit frame that transitions from JavaScript to C++.
Steve Blockd0582a62009-12-15 09:54:21 +00007058 __ EnterExitFrame(mode, result_size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007059
7060 // rax: Holds the context at this point, but should not be used.
7061 // On entry to code generated by GenerateCore, it must hold
7062 // a failure result if the collect_garbage argument to GenerateCore
7063 // is true. This failure result can be the result of code
7064 // generated by a previous call to GenerateCore. The value
7065 // of rax is then passed to Runtime::PerformGC.
7066 // rbx: pointer to builtin function (C callee-saved).
7067 // rbp: frame pointer of exit frame (restored after C call).
7068 // rsp: stack pointer (restored after C call).
7069 // r14: number of arguments including receiver (C callee-saved).
7070 // r15: argv pointer (C callee-saved).
7071
7072 Label throw_normal_exception;
7073 Label throw_termination_exception;
7074 Label throw_out_of_memory_exception;
7075
7076 // Call into the runtime system.
7077 GenerateCore(masm,
7078 &throw_normal_exception,
7079 &throw_termination_exception,
7080 &throw_out_of_memory_exception,
Steve Blockd0582a62009-12-15 09:54:21 +00007081 mode,
Steve Blocka7e24c12009-10-30 11:49:00 +00007082 false,
7083 false);
7084
7085 // Do space-specific GC and retry runtime call.
7086 GenerateCore(masm,
7087 &throw_normal_exception,
7088 &throw_termination_exception,
7089 &throw_out_of_memory_exception,
Steve Blockd0582a62009-12-15 09:54:21 +00007090 mode,
Steve Blocka7e24c12009-10-30 11:49:00 +00007091 true,
7092 false);
7093
7094 // Do full GC and retry runtime call one final time.
7095 Failure* failure = Failure::InternalError();
7096 __ movq(rax, failure, RelocInfo::NONE);
7097 GenerateCore(masm,
7098 &throw_normal_exception,
7099 &throw_termination_exception,
7100 &throw_out_of_memory_exception,
Steve Blockd0582a62009-12-15 09:54:21 +00007101 mode,
Steve Blocka7e24c12009-10-30 11:49:00 +00007102 true,
7103 true);
7104
7105 __ bind(&throw_out_of_memory_exception);
7106 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
7107
7108 __ bind(&throw_termination_exception);
7109 GenerateThrowUncatchable(masm, TERMINATION);
7110
7111 __ bind(&throw_normal_exception);
7112 GenerateThrowTOS(masm);
7113}
7114
7115
Steve Blockd0582a62009-12-15 09:54:21 +00007116void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
7117 UNREACHABLE();
7118}
7119
7120
Steve Blocka7e24c12009-10-30 11:49:00 +00007121void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
7122 Label invoke, exit;
7123#ifdef ENABLE_LOGGING_AND_PROFILING
7124 Label not_outermost_js, not_outermost_js_2;
7125#endif
7126
7127 // Setup frame.
7128 __ push(rbp);
7129 __ movq(rbp, rsp);
7130
7131 // Push the stack frame type marker twice.
7132 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
Steve Block3ce2e202009-11-05 08:53:23 +00007133 __ Push(Smi::FromInt(marker)); // context slot
7134 __ Push(Smi::FromInt(marker)); // function slot
Steve Blocka7e24c12009-10-30 11:49:00 +00007135 // Save callee-saved registers (X64 calling conventions).
7136 __ push(r12);
7137 __ push(r13);
7138 __ push(r14);
7139 __ push(r15);
7140 __ push(rdi);
7141 __ push(rsi);
7142 __ push(rbx);
7143 // TODO(X64): Push XMM6-XMM15 (low 64 bits) as well, or make them
7144 // callee-save in JS code as well.
7145
7146 // Save copies of the top frame descriptor on the stack.
7147 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
7148 __ load_rax(c_entry_fp);
7149 __ push(rax);
7150
7151#ifdef ENABLE_LOGGING_AND_PROFILING
7152 // If this is the outermost JS call, set js_entry_sp value.
7153 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
7154 __ load_rax(js_entry_sp);
7155 __ testq(rax, rax);
7156 __ j(not_zero, &not_outermost_js);
7157 __ movq(rax, rbp);
7158 __ store_rax(js_entry_sp);
7159 __ bind(&not_outermost_js);
7160#endif
7161
7162 // Call a faked try-block that does the invoke.
7163 __ call(&invoke);
7164
7165 // Caught exception: Store result (exception) in the pending
7166 // exception field in the JSEnv and return a failure sentinel.
7167 ExternalReference pending_exception(Top::k_pending_exception_address);
7168 __ store_rax(pending_exception);
7169 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
7170 __ jmp(&exit);
7171
7172 // Invoke: Link this frame into the handler chain.
7173 __ bind(&invoke);
7174 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
7175
7176 // Clear any pending exceptions.
7177 __ load_rax(ExternalReference::the_hole_value_location());
7178 __ store_rax(pending_exception);
7179
7180 // Fake a receiver (NULL).
7181 __ push(Immediate(0)); // receiver
7182
7183 // Invoke the function by calling through JS entry trampoline
7184 // builtin and pop the faked function when we return. We load the address
7185 // from an external reference instead of inlining the call target address
7186 // directly in the code, because the builtin stubs may not have been
7187 // generated yet at the time this code is generated.
7188 if (is_construct) {
7189 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
7190 __ load_rax(construct_entry);
7191 } else {
7192 ExternalReference entry(Builtins::JSEntryTrampoline);
7193 __ load_rax(entry);
7194 }
7195 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
7196 __ call(kScratchRegister);
7197
7198 // Unlink this frame from the handler chain.
7199 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
7200 __ pop(Operand(kScratchRegister, 0));
7201 // Pop next_sp.
7202 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
7203
7204#ifdef ENABLE_LOGGING_AND_PROFILING
7205 // If current EBP value is the same as js_entry_sp value, it means that
7206 // the current function is the outermost.
7207 __ movq(kScratchRegister, js_entry_sp);
7208 __ cmpq(rbp, Operand(kScratchRegister, 0));
7209 __ j(not_equal, &not_outermost_js_2);
7210 __ movq(Operand(kScratchRegister, 0), Immediate(0));
7211 __ bind(&not_outermost_js_2);
7212#endif
7213
7214 // Restore the top frame descriptor from the stack.
7215 __ bind(&exit);
7216 __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
7217 __ pop(Operand(kScratchRegister, 0));
7218
7219 // Restore callee-saved registers (X64 conventions).
7220 __ pop(rbx);
7221 __ pop(rsi);
7222 __ pop(rdi);
7223 __ pop(r15);
7224 __ pop(r14);
7225 __ pop(r13);
7226 __ pop(r12);
7227 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
7228
7229 // Restore frame pointer and return.
7230 __ pop(rbp);
7231 __ ret(0);
7232}
7233
7234
7235// -----------------------------------------------------------------------------
7236// Implementation of stubs.
7237
7238// Stub classes have public member named masm, not masm_.
7239
7240void StackCheckStub::Generate(MacroAssembler* masm) {
7241 // Because builtins always remove the receiver from the stack, we
7242 // have to fake one to avoid underflowing the stack. The receiver
7243 // must be inserted below the return address on the stack so we
7244 // temporarily store that in a register.
7245 __ pop(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00007246 __ Push(Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00007247 __ push(rax);
7248
7249 // Do tail-call to runtime routine.
7250 Runtime::Function* f = Runtime::FunctionForId(Runtime::kStackGuard);
7251 __ TailCallRuntime(ExternalReference(f), 1, f->result_size);
7252}
7253
7254
Steve Blocka7e24c12009-10-30 11:49:00 +00007255void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
7256 Register number) {
7257 Label load_smi, done;
7258
7259 __ JumpIfSmi(number, &load_smi);
7260 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
7261 __ jmp(&done);
7262
7263 __ bind(&load_smi);
7264 __ SmiToInteger32(number, number);
7265 __ push(number);
7266 __ fild_s(Operand(rsp, 0));
7267 __ pop(number);
7268
7269 __ bind(&done);
7270}
7271
7272
7273void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
7274 Register src,
7275 XMMRegister dst) {
7276 Label load_smi, done;
7277
7278 __ JumpIfSmi(src, &load_smi);
7279 __ movsd(dst, FieldOperand(src, HeapNumber::kValueOffset));
7280 __ jmp(&done);
7281
7282 __ bind(&load_smi);
7283 __ SmiToInteger32(src, src);
7284 __ cvtlsi2sd(dst, src);
7285
7286 __ bind(&done);
7287}
7288
7289
7290void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
7291 XMMRegister dst1,
7292 XMMRegister dst2) {
7293 __ movq(kScratchRegister, Operand(rsp, 2 * kPointerSize));
7294 LoadFloatOperand(masm, kScratchRegister, dst1);
7295 __ movq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
7296 LoadFloatOperand(masm, kScratchRegister, dst2);
7297}
7298
7299
Leon Clarkeeab96aa2010-01-27 16:31:12 +00007300void FloatingPointHelper::LoadInt32Operand(MacroAssembler* masm,
7301 const Operand& src,
7302 Register dst) {
7303 // TODO(X64): Convert number operands to int32 values.
7304 // Don't convert a Smi to a double first.
7305 UNIMPLEMENTED();
7306}
7307
7308
Steve Blocka7e24c12009-10-30 11:49:00 +00007309void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm) {
7310 Label load_smi_1, load_smi_2, done_load_1, done;
7311 __ movq(kScratchRegister, Operand(rsp, 2 * kPointerSize));
7312 __ JumpIfSmi(kScratchRegister, &load_smi_1);
7313 __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
7314 __ bind(&done_load_1);
7315
7316 __ movq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
7317 __ JumpIfSmi(kScratchRegister, &load_smi_2);
7318 __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
7319 __ jmp(&done);
7320
7321 __ bind(&load_smi_1);
7322 __ SmiToInteger32(kScratchRegister, kScratchRegister);
7323 __ push(kScratchRegister);
7324 __ fild_s(Operand(rsp, 0));
7325 __ pop(kScratchRegister);
7326 __ jmp(&done_load_1);
7327
7328 __ bind(&load_smi_2);
7329 __ SmiToInteger32(kScratchRegister, kScratchRegister);
7330 __ push(kScratchRegister);
7331 __ fild_s(Operand(rsp, 0));
7332 __ pop(kScratchRegister);
7333
7334 __ bind(&done);
7335}
7336
7337
7338void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
7339 Register lhs,
7340 Register rhs) {
7341 Label load_smi_lhs, load_smi_rhs, done_load_lhs, done;
7342 __ JumpIfSmi(lhs, &load_smi_lhs);
7343 __ fld_d(FieldOperand(lhs, HeapNumber::kValueOffset));
7344 __ bind(&done_load_lhs);
7345
7346 __ JumpIfSmi(rhs, &load_smi_rhs);
7347 __ fld_d(FieldOperand(rhs, HeapNumber::kValueOffset));
7348 __ jmp(&done);
7349
7350 __ bind(&load_smi_lhs);
7351 __ SmiToInteger64(kScratchRegister, lhs);
7352 __ push(kScratchRegister);
7353 __ fild_d(Operand(rsp, 0));
7354 __ pop(kScratchRegister);
7355 __ jmp(&done_load_lhs);
7356
7357 __ bind(&load_smi_rhs);
7358 __ SmiToInteger64(kScratchRegister, rhs);
7359 __ push(kScratchRegister);
7360 __ fild_d(Operand(rsp, 0));
7361 __ pop(kScratchRegister);
7362
7363 __ bind(&done);
7364}
7365
7366
Steve Block3ce2e202009-11-05 08:53:23 +00007367void FloatingPointHelper::CheckNumberOperands(MacroAssembler* masm,
7368 Label* non_float) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007369 Label test_other, done;
7370 // Test if both operands are numbers (heap_numbers or smis).
7371 // If not, jump to label non_float.
7372 __ JumpIfSmi(rdx, &test_other); // argument in rdx is OK
7373 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), Factory::heap_number_map());
7374 __ j(not_equal, non_float); // The argument in rdx is not a number.
7375
7376 __ bind(&test_other);
7377 __ JumpIfSmi(rax, &done); // argument in rax is OK
7378 __ Cmp(FieldOperand(rax, HeapObject::kMapOffset), Factory::heap_number_map());
7379 __ j(not_equal, non_float); // The argument in rax is not a number.
7380
7381 // Fall-through: Both operands are numbers.
7382 __ bind(&done);
7383}
7384
7385
7386const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +00007387 if (name_ != NULL) return name_;
7388 const int len = 100;
7389 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
7390 if (name_ == NULL) return "OOM";
7391 const char* op_name = Token::Name(op_);
7392 const char* overwrite_name;
7393 switch (mode_) {
7394 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7395 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7396 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7397 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007398 }
Leon Clarkee46be812010-01-19 14:06:41 +00007399
7400 OS::SNPrintF(Vector<char>(name_, len),
7401 "GenericBinaryOpStub_%s_%s%s_%s%s_%s",
7402 op_name,
7403 overwrite_name,
7404 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
7405 args_in_registers_ ? "RegArgs" : "StackArgs",
7406 args_reversed_ ? "_R" : "",
7407 use_sse3_ ? "SSE3" : "SSE2");
7408 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007409}
7410
7411
Steve Blockd0582a62009-12-15 09:54:21 +00007412void GenericBinaryOpStub::GenerateCall(
7413 MacroAssembler* masm,
7414 Register left,
7415 Register right) {
7416 if (!ArgsInRegistersSupported()) {
7417 // Pass arguments on the stack.
7418 __ push(left);
7419 __ push(right);
7420 } else {
7421 // The calling convention with registers is left in rdx and right in rax.
7422 Register left_arg = rdx;
7423 Register right_arg = rax;
7424 if (!(left.is(left_arg) && right.is(right_arg))) {
7425 if (left.is(right_arg) && right.is(left_arg)) {
7426 if (IsOperationCommutative()) {
7427 SetArgsReversed();
7428 } else {
7429 __ xchg(left, right);
7430 }
7431 } else if (left.is(left_arg)) {
7432 __ movq(right_arg, right);
7433 } else if (left.is(right_arg)) {
7434 if (IsOperationCommutative()) {
7435 __ movq(left_arg, right);
7436 SetArgsReversed();
7437 } else {
7438 // Order of moves important to avoid destroying left argument.
7439 __ movq(left_arg, left);
7440 __ movq(right_arg, right);
7441 }
7442 } else if (right.is(left_arg)) {
7443 if (IsOperationCommutative()) {
7444 __ movq(right_arg, left);
7445 SetArgsReversed();
7446 } else {
7447 // Order of moves important to avoid destroying right argument.
7448 __ movq(right_arg, right);
7449 __ movq(left_arg, left);
7450 }
7451 } else if (right.is(right_arg)) {
7452 __ movq(left_arg, left);
7453 } else {
7454 // Order of moves is not important.
7455 __ movq(left_arg, left);
7456 __ movq(right_arg, right);
7457 }
7458 }
7459
7460 // Update flags to indicate that arguments are in registers.
7461 SetArgsInRegisters();
7462 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
7463 }
7464
7465 // Call the stub.
7466 __ CallStub(this);
7467}
7468
7469
7470void GenericBinaryOpStub::GenerateCall(
7471 MacroAssembler* masm,
7472 Register left,
7473 Smi* right) {
7474 if (!ArgsInRegistersSupported()) {
7475 // Pass arguments on the stack.
7476 __ push(left);
7477 __ Push(right);
7478 } else {
7479 // The calling convention with registers is left in rdx and right in rax.
7480 Register left_arg = rdx;
7481 Register right_arg = rax;
7482 if (left.is(left_arg)) {
7483 __ Move(right_arg, right);
7484 } else if (left.is(right_arg) && IsOperationCommutative()) {
7485 __ Move(left_arg, right);
7486 SetArgsReversed();
7487 } else {
7488 __ movq(left_arg, left);
7489 __ Move(right_arg, right);
7490 }
7491
7492 // Update flags to indicate that arguments are in registers.
7493 SetArgsInRegisters();
7494 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
7495 }
7496
7497 // Call the stub.
7498 __ CallStub(this);
7499}
7500
7501
7502void GenericBinaryOpStub::GenerateCall(
7503 MacroAssembler* masm,
7504 Smi* left,
7505 Register right) {
7506 if (!ArgsInRegistersSupported()) {
7507 // Pass arguments on the stack.
7508 __ Push(left);
7509 __ push(right);
7510 } else {
7511 // The calling convention with registers is left in rdx and right in rax.
7512 Register left_arg = rdx;
7513 Register right_arg = rax;
7514 if (right.is(right_arg)) {
7515 __ Move(left_arg, left);
7516 } else if (right.is(left_arg) && IsOperationCommutative()) {
7517 __ Move(right_arg, left);
7518 SetArgsReversed();
7519 } else {
7520 __ Move(left_arg, left);
7521 __ movq(right_arg, right);
7522 }
7523 // Update flags to indicate that arguments are in registers.
7524 SetArgsInRegisters();
7525 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
7526 }
7527
7528 // Call the stub.
7529 __ CallStub(this);
7530}
7531
7532
Steve Blocka7e24c12009-10-30 11:49:00 +00007533void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
7534 // Perform fast-case smi code for the operation (rax <op> rbx) and
7535 // leave result in register rax.
7536
7537 // Smi check both operands.
7538 __ JumpIfNotBothSmi(rax, rbx, slow);
7539
7540 switch (op_) {
7541 case Token::ADD: {
7542 __ SmiAdd(rax, rax, rbx, slow);
7543 break;
7544 }
7545
7546 case Token::SUB: {
7547 __ SmiSub(rax, rax, rbx, slow);
7548 break;
7549 }
7550
7551 case Token::MUL:
7552 __ SmiMul(rax, rax, rbx, slow);
7553 break;
7554
7555 case Token::DIV:
7556 __ SmiDiv(rax, rax, rbx, slow);
7557 break;
7558
7559 case Token::MOD:
7560 __ SmiMod(rax, rax, rbx, slow);
7561 break;
7562
7563 case Token::BIT_OR:
7564 __ SmiOr(rax, rax, rbx);
7565 break;
7566
7567 case Token::BIT_AND:
7568 __ SmiAnd(rax, rax, rbx);
7569 break;
7570
7571 case Token::BIT_XOR:
7572 __ SmiXor(rax, rax, rbx);
7573 break;
7574
7575 case Token::SHL:
7576 case Token::SHR:
7577 case Token::SAR:
Leon Clarkeeab96aa2010-01-27 16:31:12 +00007578 // Move the second operand into register ecx.
Steve Block3ce2e202009-11-05 08:53:23 +00007579 __ movq(rcx, rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007580 // Perform the operation.
7581 switch (op_) {
7582 case Token::SAR:
Steve Block3ce2e202009-11-05 08:53:23 +00007583 __ SmiShiftArithmeticRight(rax, rax, rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007584 break;
7585 case Token::SHR:
Steve Block3ce2e202009-11-05 08:53:23 +00007586 __ SmiShiftLogicalRight(rax, rax, rcx, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00007587 break;
7588 case Token::SHL:
Steve Block3ce2e202009-11-05 08:53:23 +00007589 __ SmiShiftLeft(rax, rax, rcx, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00007590 break;
7591 default:
7592 UNREACHABLE();
7593 }
7594 break;
7595
7596 default:
7597 UNREACHABLE();
7598 break;
7599 }
7600}
7601
7602
7603void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
7604 Label call_runtime;
Steve Blockd0582a62009-12-15 09:54:21 +00007605 if (HasSmiCodeInStub()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007606 // The fast case smi code wasn't inlined in the stub caller
7607 // code. Generate it here to speed up common operations.
7608 Label slow;
7609 __ movq(rbx, Operand(rsp, 1 * kPointerSize)); // get y
7610 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // get x
7611 GenerateSmiCode(masm, &slow);
Steve Blockd0582a62009-12-15 09:54:21 +00007612 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007613
7614 // Too bad. The fast case smi code didn't succeed.
7615 __ bind(&slow);
7616 }
7617
Steve Blockd0582a62009-12-15 09:54:21 +00007618 // Make sure the arguments are in rdx and rax.
7619 GenerateLoadArguments(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007620
7621 // Floating point case.
7622 switch (op_) {
7623 case Token::ADD:
7624 case Token::SUB:
7625 case Token::MUL:
7626 case Token::DIV: {
7627 // rax: y
7628 // rdx: x
Steve Block3ce2e202009-11-05 08:53:23 +00007629 FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007630 // Fast-case: Both operands are numbers.
7631 // Allocate a heap number, if needed.
7632 Label skip_allocation;
7633 switch (mode_) {
7634 case OVERWRITE_LEFT:
7635 __ movq(rax, rdx);
7636 // Fall through!
7637 case OVERWRITE_RIGHT:
7638 // If the argument in rax is already an object, we skip the
7639 // allocation of a heap number.
7640 __ JumpIfNotSmi(rax, &skip_allocation);
7641 // Fall through!
7642 case NO_OVERWRITE:
Steve Blockd0582a62009-12-15 09:54:21 +00007643 // Allocate a heap number for the result. Keep rax and rdx intact
7644 // for the possible runtime call.
7645 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
7646 __ movq(rax, rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007647 __ bind(&skip_allocation);
7648 break;
7649 default: UNREACHABLE();
7650 }
7651 // xmm4 and xmm5 are volatile XMM registers.
7652 FloatingPointHelper::LoadFloatOperands(masm, xmm4, xmm5);
7653
7654 switch (op_) {
7655 case Token::ADD: __ addsd(xmm4, xmm5); break;
7656 case Token::SUB: __ subsd(xmm4, xmm5); break;
7657 case Token::MUL: __ mulsd(xmm4, xmm5); break;
7658 case Token::DIV: __ divsd(xmm4, xmm5); break;
7659 default: UNREACHABLE();
7660 }
7661 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm4);
Steve Blockd0582a62009-12-15 09:54:21 +00007662 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007663 }
7664 case Token::MOD: {
7665 // For MOD we go directly to runtime in the non-smi case.
7666 break;
7667 }
7668 case Token::BIT_OR:
7669 case Token::BIT_AND:
7670 case Token::BIT_XOR:
7671 case Token::SAR:
7672 case Token::SHL:
7673 case Token::SHR: {
Leon Clarkeeab96aa2010-01-27 16:31:12 +00007674 FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
7675 // TODO(X64): Don't convert a Smi to float and then back to int32
7676 // afterwards.
7677 FloatingPointHelper::LoadFloatOperands(masm);
7678
7679 Label skip_allocation, non_smi_result, operand_conversion_failure;
7680
7681 // Reserve space for converted numbers.
7682 __ subq(rsp, Immediate(2 * kPointerSize));
7683
7684 if (use_sse3_) {
7685 // Truncate the operands to 32-bit integers and check for
7686 // exceptions in doing so.
7687 CpuFeatures::Scope scope(SSE3);
7688 __ fisttp_s(Operand(rsp, 0 * kPointerSize));
7689 __ fisttp_s(Operand(rsp, 1 * kPointerSize));
7690 __ fnstsw_ax();
7691 __ testl(rax, Immediate(1));
7692 __ j(not_zero, &operand_conversion_failure);
7693 } else {
7694 // Check if right operand is int32.
7695 __ fist_s(Operand(rsp, 0 * kPointerSize));
7696 __ fild_s(Operand(rsp, 0 * kPointerSize));
7697 __ FCmp();
7698 __ j(not_zero, &operand_conversion_failure);
7699 __ j(parity_even, &operand_conversion_failure);
7700
7701 // Check if left operand is int32.
7702 __ fist_s(Operand(rsp, 1 * kPointerSize));
7703 __ fild_s(Operand(rsp, 1 * kPointerSize));
7704 __ FCmp();
7705 __ j(not_zero, &operand_conversion_failure);
7706 __ j(parity_even, &operand_conversion_failure);
7707 }
7708
7709 // Get int32 operands and perform bitop.
7710 __ pop(rcx);
7711 __ pop(rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00007712 switch (op_) {
Steve Block3ce2e202009-11-05 08:53:23 +00007713 case Token::BIT_OR: __ orl(rax, rcx); break;
7714 case Token::BIT_AND: __ andl(rax, rcx); break;
7715 case Token::BIT_XOR: __ xorl(rax, rcx); break;
Steve Blockd0582a62009-12-15 09:54:21 +00007716 case Token::SAR: __ sarl_cl(rax); break;
7717 case Token::SHL: __ shll_cl(rax); break;
7718 case Token::SHR: __ shrl_cl(rax); break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007719 default: UNREACHABLE();
7720 }
7721 if (op_ == Token::SHR) {
Steve Block3ce2e202009-11-05 08:53:23 +00007722 // Check if result is non-negative. This can only happen for a shift
7723 // by zero, which also doesn't update the sign flag.
7724 __ testl(rax, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00007725 __ j(negative, &non_smi_result);
7726 }
Steve Block3ce2e202009-11-05 08:53:23 +00007727 __ JumpIfNotValidSmiValue(rax, &non_smi_result);
7728 // Tag smi result, if possible, and return.
Steve Blocka7e24c12009-10-30 11:49:00 +00007729 __ Integer32ToSmi(rax, rax);
Steve Blockd0582a62009-12-15 09:54:21 +00007730 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007731
7732 // All ops except SHR return a signed int32 that we load in a HeapNumber.
Steve Block3ce2e202009-11-05 08:53:23 +00007733 if (op_ != Token::SHR && non_smi_result.is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007734 __ bind(&non_smi_result);
7735 // Allocate a heap number if needed.
7736 __ movsxlq(rbx, rax); // rbx: sign extended 32-bit result
7737 switch (mode_) {
7738 case OVERWRITE_LEFT:
7739 case OVERWRITE_RIGHT:
7740 // If the operand was an object, we skip the
7741 // allocation of a heap number.
7742 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
7743 1 * kPointerSize : 2 * kPointerSize));
7744 __ JumpIfNotSmi(rax, &skip_allocation);
7745 // Fall through!
7746 case NO_OVERWRITE:
Steve Block3ce2e202009-11-05 08:53:23 +00007747 __ AllocateHeapNumber(rax, rcx, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007748 __ bind(&skip_allocation);
7749 break;
7750 default: UNREACHABLE();
7751 }
7752 // Store the result in the HeapNumber and return.
7753 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
7754 __ fild_s(Operand(rsp, 1 * kPointerSize));
7755 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00007756 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007757 }
7758
Leon Clarkeeab96aa2010-01-27 16:31:12 +00007759 // Clear the FPU exception flag and reset the stack before calling
7760 // the runtime system.
7761 __ bind(&operand_conversion_failure);
7762 __ addq(rsp, Immediate(2 * kPointerSize));
7763 if (use_sse3_) {
7764 // If we've used the SSE3 instructions for truncating the
7765 // floating point values to integers and it failed, we have a
7766 // pending #IA exception. Clear it.
7767 __ fnclex();
7768 } else {
7769 // The non-SSE3 variant does early bailout if the right
7770 // operand isn't a 32-bit integer, so we may have a single
7771 // value on the FPU stack we need to get rid of.
7772 __ ffree(0);
7773 }
7774
Steve Blocka7e24c12009-10-30 11:49:00 +00007775 // SHR should return uint32 - go to runtime for non-smi/negative result.
7776 if (op_ == Token::SHR) {
7777 __ bind(&non_smi_result);
7778 }
7779 __ movq(rax, Operand(rsp, 1 * kPointerSize));
7780 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
7781 break;
7782 }
7783 default: UNREACHABLE(); break;
7784 }
7785
7786 // If all else fails, use the runtime system to get the correct
Steve Blockd0582a62009-12-15 09:54:21 +00007787 // result. If arguments was passed in registers now place them on the
7788 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +00007789 __ bind(&call_runtime);
Steve Blockd0582a62009-12-15 09:54:21 +00007790 if (HasArgumentsInRegisters()) {
7791 __ pop(rcx);
7792 if (HasArgumentsReversed()) {
7793 __ push(rax);
7794 __ push(rdx);
7795 } else {
7796 __ push(rdx);
7797 __ push(rax);
7798 }
7799 __ push(rcx);
7800 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007801 switch (op_) {
Steve Blockd0582a62009-12-15 09:54:21 +00007802 case Token::ADD: {
7803 // Test for string arguments before calling runtime.
7804 Label not_strings, both_strings, not_string1, string1;
7805 Condition is_smi;
7806 Result answer;
7807 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // First argument.
7808 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // Second argument.
7809 is_smi = masm->CheckSmi(rdx);
7810 __ j(is_smi, &not_string1);
7811 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rdx);
7812 __ j(above_equal, &not_string1);
7813
7814 // First argument is a a string, test second.
7815 is_smi = masm->CheckSmi(rax);
7816 __ j(is_smi, &string1);
7817 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rax);
7818 __ j(above_equal, &string1);
7819
7820 // First and second argument are strings.
Leon Clarkee46be812010-01-19 14:06:41 +00007821 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
7822 __ TailCallStub(&stub);
Steve Blockd0582a62009-12-15 09:54:21 +00007823
7824 // Only first argument is a string.
7825 __ bind(&string1);
7826 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
7827
7828 // First argument was not a string, test second.
7829 __ bind(&not_string1);
7830 is_smi = masm->CheckSmi(rax);
7831 __ j(is_smi, &not_strings);
7832 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rax);
7833 __ j(above_equal, &not_strings);
7834
7835 // Only second argument is a string.
7836 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
7837
7838 __ bind(&not_strings);
7839 // Neither argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +00007840 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
7841 break;
Steve Blockd0582a62009-12-15 09:54:21 +00007842 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007843 case Token::SUB:
7844 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
7845 break;
7846 case Token::MUL:
7847 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
7848 break;
7849 case Token::DIV:
7850 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
7851 break;
7852 case Token::MOD:
7853 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
7854 break;
7855 case Token::BIT_OR:
7856 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
7857 break;
7858 case Token::BIT_AND:
7859 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
7860 break;
7861 case Token::BIT_XOR:
7862 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
7863 break;
7864 case Token::SAR:
7865 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
7866 break;
7867 case Token::SHL:
7868 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
7869 break;
7870 case Token::SHR:
7871 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
7872 break;
7873 default:
7874 UNREACHABLE();
7875 }
7876}
7877
7878
Steve Blockd0582a62009-12-15 09:54:21 +00007879void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
7880 // If arguments are not passed in registers read them from the stack.
7881 if (!HasArgumentsInRegisters()) {
7882 __ movq(rax, Operand(rsp, 1 * kPointerSize));
7883 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
7884 }
7885}
7886
7887
7888void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
7889 // If arguments are not passed in registers remove them from the stack before
7890 // returning.
7891 if (!HasArgumentsInRegisters()) {
7892 __ ret(2 * kPointerSize); // Remove both operands
7893 } else {
7894 __ ret(0);
7895 }
7896}
7897
7898
Steve Blocka7e24c12009-10-30 11:49:00 +00007899int CompareStub::MinorKey() {
Leon Clarkee46be812010-01-19 14:06:41 +00007900 // Encode the three parameters in a unique 16 bit value.
7901 ASSERT(static_cast<unsigned>(cc_) < (1 << 14));
7902 int nnn_value = (never_nan_nan_ ? 2 : 0);
7903 if (cc_ != equal) nnn_value = 0; // Avoid duplicate stubs.
7904 return (static_cast<unsigned>(cc_) << 2) | nnn_value | (strict_ ? 1 : 0);
7905}
7906
7907
7908const char* CompareStub::GetName() {
7909 switch (cc_) {
7910 case less: return "CompareStub_LT";
7911 case greater: return "CompareStub_GT";
7912 case less_equal: return "CompareStub_LE";
7913 case greater_equal: return "CompareStub_GE";
7914 case not_equal: {
7915 if (strict_) {
7916 if (never_nan_nan_) {
7917 return "CompareStub_NE_STRICT_NO_NAN";
7918 } else {
7919 return "CompareStub_NE_STRICT";
7920 }
7921 } else {
7922 if (never_nan_nan_) {
7923 return "CompareStub_NE_NO_NAN";
7924 } else {
7925 return "CompareStub_NE";
7926 }
7927 }
7928 }
7929 case equal: {
7930 if (strict_) {
7931 if (never_nan_nan_) {
7932 return "CompareStub_EQ_STRICT_NO_NAN";
7933 } else {
7934 return "CompareStub_EQ_STRICT";
7935 }
7936 } else {
7937 if (never_nan_nan_) {
7938 return "CompareStub_EQ_NO_NAN";
7939 } else {
7940 return "CompareStub_EQ";
7941 }
7942 }
7943 }
7944 default: return "CompareStub";
7945 }
7946}
7947
7948
7949void StringAddStub::Generate(MacroAssembler* masm) {
7950 Label string_add_runtime;
7951
7952 // Load the two arguments.
7953 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument.
7954 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument.
7955
7956 // Make sure that both arguments are strings if not known in advance.
7957 if (string_check_) {
7958 Condition is_smi;
7959 is_smi = masm->CheckSmi(rax);
7960 __ j(is_smi, &string_add_runtime);
7961 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
7962 __ j(above_equal, &string_add_runtime);
7963
7964 // First argument is a a string, test second.
7965 is_smi = masm->CheckSmi(rdx);
7966 __ j(is_smi, &string_add_runtime);
7967 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
7968 __ j(above_equal, &string_add_runtime);
7969 }
7970
7971 // Both arguments are strings.
7972 // rax: first string
7973 // rdx: second string
7974 // Check if either of the strings are empty. In that case return the other.
7975 Label second_not_zero_length, both_not_zero_length;
7976 __ movl(rcx, FieldOperand(rdx, String::kLengthOffset));
7977 __ testl(rcx, rcx);
7978 __ j(not_zero, &second_not_zero_length);
7979 // Second string is empty, result is first string which is already in rax.
7980 __ IncrementCounter(&Counters::string_add_native, 1);
7981 __ ret(2 * kPointerSize);
7982 __ bind(&second_not_zero_length);
7983 __ movl(rbx, FieldOperand(rax, String::kLengthOffset));
7984 __ testl(rbx, rbx);
7985 __ j(not_zero, &both_not_zero_length);
7986 // First string is empty, result is second string which is in rdx.
7987 __ movq(rax, rdx);
7988 __ IncrementCounter(&Counters::string_add_native, 1);
7989 __ ret(2 * kPointerSize);
7990
7991 // Both strings are non-empty.
7992 // rax: first string
7993 // rbx: length of first string
Leon Clarkeeab96aa2010-01-27 16:31:12 +00007994 // ecx: length of second string
7995 // edx: second string
Leon Clarkee46be812010-01-19 14:06:41 +00007996 // r8: instance type of first string if string check was performed above
7997 // r9: instance type of first string if string check was performed above
7998 Label string_add_flat_result;
7999 __ bind(&both_not_zero_length);
8000 // Look at the length of the result of adding the two strings.
8001 __ addl(rbx, rcx);
8002 // Use the runtime system when adding two one character strings, as it
8003 // contains optimizations for this specific case using the symbol table.
8004 __ cmpl(rbx, Immediate(2));
8005 __ j(equal, &string_add_runtime);
8006 // If arguments where known to be strings, maps are not loaded to r8 and r9
8007 // by the code above.
8008 if (!string_check_) {
8009 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
8010 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
8011 }
8012 // Get the instance types of the two strings as they will be needed soon.
8013 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
8014 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
8015 // Check if resulting string will be flat.
8016 __ cmpl(rbx, Immediate(String::kMinNonFlatLength));
8017 __ j(below, &string_add_flat_result);
8018 // Handle exceptionally long strings in the runtime system.
8019 ASSERT((String::kMaxLength & 0x80000000) == 0);
8020 __ cmpl(rbx, Immediate(String::kMaxLength));
8021 __ j(above, &string_add_runtime);
8022
8023 // If result is not supposed to be flat, allocate a cons string object. If
8024 // both strings are ascii the result is an ascii cons string.
8025 // rax: first string
8026 // ebx: length of resulting flat string
8027 // rdx: second string
8028 // r8: instance type of first string
8029 // r9: instance type of second string
8030 Label non_ascii, allocated;
8031 __ movl(rcx, r8);
8032 __ and_(rcx, r9);
8033 ASSERT(kStringEncodingMask == kAsciiStringTag);
8034 __ testl(rcx, Immediate(kAsciiStringTag));
8035 __ j(zero, &non_ascii);
8036 // Allocate an acsii cons string.
8037 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
8038 __ bind(&allocated);
8039 // Fill the fields of the cons string.
8040 __ movl(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
8041 __ movl(FieldOperand(rcx, ConsString::kHashFieldOffset),
8042 Immediate(String::kEmptyHashField));
8043 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
8044 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
8045 __ movq(rax, rcx);
8046 __ IncrementCounter(&Counters::string_add_native, 1);
8047 __ ret(2 * kPointerSize);
8048 __ bind(&non_ascii);
8049 // Allocate a two byte cons string.
8050 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
8051 __ jmp(&allocated);
8052
8053 // Handle creating a flat result. First check that both strings are not
8054 // external strings.
8055 // rax: first string
8056 // ebx: length of resulting flat string
8057 // rdx: second string
8058 // r8: instance type of first string
8059 // r9: instance type of first string
8060 __ bind(&string_add_flat_result);
8061 __ movl(rcx, r8);
8062 __ and_(rcx, Immediate(kStringRepresentationMask));
8063 __ cmpl(rcx, Immediate(kExternalStringTag));
8064 __ j(equal, &string_add_runtime);
8065 __ movl(rcx, r9);
8066 __ and_(rcx, Immediate(kStringRepresentationMask));
8067 __ cmpl(rcx, Immediate(kExternalStringTag));
8068 __ j(equal, &string_add_runtime);
8069 // Now check if both strings are ascii strings.
8070 // rax: first string
8071 // ebx: length of resulting flat string
8072 // rdx: second string
8073 // r8: instance type of first string
8074 // r9: instance type of second string
8075 Label non_ascii_string_add_flat_result;
8076 ASSERT(kStringEncodingMask == kAsciiStringTag);
8077 __ testl(r8, Immediate(kAsciiStringTag));
8078 __ j(zero, &non_ascii_string_add_flat_result);
8079 __ testl(r9, Immediate(kAsciiStringTag));
8080 __ j(zero, &string_add_runtime);
8081 // Both strings are ascii strings. As they are short they are both flat.
8082 __ AllocateAsciiString(rcx, rbx, rdi, r14, r15, &string_add_runtime);
8083 // rcx: result string
8084 __ movq(rbx, rcx);
8085 // Locate first character of result.
8086 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
8087 // Locate first character of first argument
8088 __ movl(rdi, FieldOperand(rax, String::kLengthOffset));
8089 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
8090 // rax: first char of first argument
8091 // rbx: result string
8092 // rcx: first character of result
8093 // rdx: second string
8094 // rdi: length of first argument
8095 GenerateCopyCharacters(masm, rcx, rax, rdi, true);
8096 // Locate first character of second argument.
8097 __ movl(rdi, FieldOperand(rdx, String::kLengthOffset));
8098 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
8099 // rbx: result string
8100 // rcx: next character of result
8101 // rdx: first char of second argument
8102 // rdi: length of second argument
8103 GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
8104 __ movq(rax, rbx);
8105 __ IncrementCounter(&Counters::string_add_native, 1);
8106 __ ret(2 * kPointerSize);
8107
8108 // Handle creating a flat two byte result.
8109 // rax: first string - known to be two byte
8110 // rbx: length of resulting flat string
8111 // rdx: second string
8112 // r8: instance type of first string
8113 // r9: instance type of first string
8114 __ bind(&non_ascii_string_add_flat_result);
8115 __ and_(r9, Immediate(kAsciiStringTag));
8116 __ j(not_zero, &string_add_runtime);
8117 // Both strings are two byte strings. As they are short they are both
8118 // flat.
8119 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r15, &string_add_runtime);
8120 // rcx: result string
8121 __ movq(rbx, rcx);
8122 // Locate first character of result.
8123 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
8124 // Locate first character of first argument.
8125 __ movl(rdi, FieldOperand(rax, String::kLengthOffset));
8126 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
8127 // rax: first char of first argument
8128 // rbx: result string
8129 // rcx: first character of result
8130 // rdx: second argument
8131 // rdi: length of first argument
8132 GenerateCopyCharacters(masm, rcx, rax, rdi, false);
8133 // Locate first character of second argument.
8134 __ movl(rdi, FieldOperand(rdx, String::kLengthOffset));
8135 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
8136 // rbx: result string
8137 // rcx: next character of result
8138 // rdx: first char of second argument
8139 // rdi: length of second argument
8140 GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
8141 __ movq(rax, rbx);
8142 __ IncrementCounter(&Counters::string_add_native, 1);
8143 __ ret(2 * kPointerSize);
8144
8145 // Just jump to runtime to add the two strings.
8146 __ bind(&string_add_runtime);
8147 __ TailCallRuntime(ExternalReference(Runtime::kStringAdd), 2, 1);
8148}
8149
8150
Leon Clarkeeab96aa2010-01-27 16:31:12 +00008151void StringAddStub::GenerateCopyCharacters(MacroAssembler* masm,
8152 Register dest,
8153 Register src,
8154 Register count,
8155 bool ascii) {
Leon Clarkee46be812010-01-19 14:06:41 +00008156 Label loop;
8157 __ bind(&loop);
8158 // This loop just copies one character at a time, as it is only used for very
8159 // short strings.
8160 if (ascii) {
8161 __ movb(kScratchRegister, Operand(src, 0));
8162 __ movb(Operand(dest, 0), kScratchRegister);
8163 __ addq(src, Immediate(1));
8164 __ addq(dest, Immediate(1));
8165 } else {
8166 __ movzxwl(kScratchRegister, Operand(src, 0));
8167 __ movw(Operand(dest, 0), kScratchRegister);
8168 __ addq(src, Immediate(2));
8169 __ addq(dest, Immediate(2));
8170 }
8171 __ subl(count, Immediate(1));
8172 __ j(not_zero, &loop);
8173}
8174
8175
8176
8177void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
8178 Register left,
8179 Register right,
8180 Register scratch1,
8181 Register scratch2,
8182 Register scratch3,
8183 Register scratch4) {
8184 // Ensure that you can always subtract a string length from a non-negative
8185 // number (e.g. another length).
8186 ASSERT(String::kMaxLength < 0x7fffffff);
8187
8188 // Find minimum length and length difference.
8189 __ movl(scratch1, FieldOperand(left, String::kLengthOffset));
8190 __ movl(scratch4, scratch1);
8191 __ subl(scratch4, FieldOperand(right, String::kLengthOffset));
8192 // Register scratch4 now holds left.length - right.length.
8193 const Register length_difference = scratch4;
8194 Label left_shorter;
8195 __ j(less, &left_shorter);
8196 // The right string isn't longer that the left one.
8197 // Get the right string's length by subtracting the (non-negative) difference
8198 // from the left string's length.
8199 __ subl(scratch1, length_difference);
8200 __ bind(&left_shorter);
8201 // Register scratch1 now holds Min(left.length, right.length).
8202 const Register min_length = scratch1;
8203
8204 Label compare_lengths;
8205 // If min-length is zero, go directly to comparing lengths.
8206 __ testl(min_length, min_length);
8207 __ j(zero, &compare_lengths);
8208
8209 // Registers scratch2 and scratch3 are free.
8210 Label result_not_equal;
8211 Label loop;
8212 {
8213 // Check characters 0 .. min_length - 1 in a loop.
8214 // Use scratch3 as loop index, min_length as limit and scratch2
8215 // for computation.
8216 const Register index = scratch3;
8217 __ movl(index, Immediate(0)); // Index into strings.
8218 __ bind(&loop);
8219 // Compare characters.
8220 // TODO(lrn): Could we load more than one character at a time?
8221 __ movb(scratch2, FieldOperand(left,
8222 index,
8223 times_1,
8224 SeqAsciiString::kHeaderSize));
8225 // Increment index and use -1 modifier on next load to give
8226 // the previous load extra time to complete.
8227 __ addl(index, Immediate(1));
8228 __ cmpb(scratch2, FieldOperand(right,
8229 index,
8230 times_1,
8231 SeqAsciiString::kHeaderSize - 1));
8232 __ j(not_equal, &result_not_equal);
8233 __ cmpl(index, min_length);
8234 __ j(not_equal, &loop);
8235 }
8236 // Completed loop without finding different characters.
8237 // Compare lengths (precomputed).
8238 __ bind(&compare_lengths);
8239 __ testl(length_difference, length_difference);
8240 __ j(not_zero, &result_not_equal);
8241
8242 // Result is EQUAL.
8243 __ Move(rax, Smi::FromInt(EQUAL));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00008244 __ IncrementCounter(&Counters::string_compare_native, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00008245 __ ret(2 * kPointerSize);
8246
8247 Label result_greater;
8248 __ bind(&result_not_equal);
8249 // Unequal comparison of left to right, either character or length.
8250 __ j(greater, &result_greater);
8251
8252 // Result is LESS.
8253 __ Move(rax, Smi::FromInt(LESS));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00008254 __ IncrementCounter(&Counters::string_compare_native, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00008255 __ ret(2 * kPointerSize);
8256
8257 // Result is GREATER.
8258 __ bind(&result_greater);
8259 __ Move(rax, Smi::FromInt(GREATER));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00008260 __ IncrementCounter(&Counters::string_compare_native, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00008261 __ ret(2 * kPointerSize);
8262}
8263
8264
8265void StringCompareStub::Generate(MacroAssembler* masm) {
8266 Label runtime;
8267
8268 // Stack frame on entry.
8269 // rsp[0]: return address
8270 // rsp[8]: right string
8271 // rsp[16]: left string
8272
8273 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
8274 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
8275
8276 // Check for identity.
8277 Label not_same;
8278 __ cmpq(rdx, rax);
8279 __ j(not_equal, &not_same);
8280 __ Move(rax, Smi::FromInt(EQUAL));
8281 __ IncrementCounter(&Counters::string_compare_native, 1);
8282 __ ret(2 * kPointerSize);
8283
8284 __ bind(&not_same);
8285
8286 // Check that both are sequential ASCII strings.
8287 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
8288
8289 // Inline comparison of ascii strings.
8290 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
8291
8292 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
8293 // tagged as a small integer.
8294 __ bind(&runtime);
8295 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008296}
8297
Steve Block3ce2e202009-11-05 08:53:23 +00008298#undef __
8299
8300#define __ masm.
8301
8302#ifdef _WIN64
8303typedef double (*ModuloFunction)(double, double);
8304// Define custom fmod implementation.
8305ModuloFunction CreateModuloFunction() {
8306 size_t actual_size;
8307 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
8308 &actual_size,
8309 true));
8310 CHECK(buffer);
Steve Blockd0582a62009-12-15 09:54:21 +00008311 Assembler masm(buffer, static_cast<int>(actual_size));
Steve Block3ce2e202009-11-05 08:53:23 +00008312 // Generated code is put into a fixed, unmovable, buffer, and not into
8313 // the V8 heap. We can't, and don't, refer to any relocatable addresses
8314 // (e.g. the JavaScript nan-object).
8315
8316 // Windows 64 ABI passes double arguments in xmm0, xmm1 and
8317 // returns result in xmm0.
8318 // Argument backing space is allocated on the stack above
8319 // the return address.
8320
8321 // Compute x mod y.
8322 // Load y and x (use argument backing store as temporary storage).
8323 __ movsd(Operand(rsp, kPointerSize * 2), xmm1);
8324 __ movsd(Operand(rsp, kPointerSize), xmm0);
8325 __ fld_d(Operand(rsp, kPointerSize * 2));
8326 __ fld_d(Operand(rsp, kPointerSize));
8327
8328 // Clear exception flags before operation.
8329 {
8330 Label no_exceptions;
8331 __ fwait();
8332 __ fnstsw_ax();
8333 // Clear if Illegal Operand or Zero Division exceptions are set.
8334 __ testb(rax, Immediate(5));
8335 __ j(zero, &no_exceptions);
8336 __ fnclex();
8337 __ bind(&no_exceptions);
8338 }
8339
8340 // Compute st(0) % st(1)
8341 {
8342 Label partial_remainder_loop;
8343 __ bind(&partial_remainder_loop);
8344 __ fprem();
8345 __ fwait();
8346 __ fnstsw_ax();
8347 __ testl(rax, Immediate(0x400 /* C2 */));
8348 // If C2 is set, computation only has partial result. Loop to
8349 // continue computation.
8350 __ j(not_zero, &partial_remainder_loop);
8351 }
8352
8353 Label valid_result;
8354 Label return_result;
8355 // If Invalid Operand or Zero Division exceptions are set,
8356 // return NaN.
8357 __ testb(rax, Immediate(5));
8358 __ j(zero, &valid_result);
8359 __ fstp(0); // Drop result in st(0).
8360 int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000);
8361 __ movq(rcx, kNaNValue, RelocInfo::NONE);
8362 __ movq(Operand(rsp, kPointerSize), rcx);
8363 __ movsd(xmm0, Operand(rsp, kPointerSize));
8364 __ jmp(&return_result);
8365
8366 // If result is valid, return that.
8367 __ bind(&valid_result);
8368 __ fstp_d(Operand(rsp, kPointerSize));
8369 __ movsd(xmm0, Operand(rsp, kPointerSize));
8370
8371 // Clean up FPU stack and exceptions and return xmm0
8372 __ bind(&return_result);
8373 __ fstp(0); // Unload y.
8374
8375 Label clear_exceptions;
8376 __ testb(rax, Immediate(0x3f /* Any Exception*/));
8377 __ j(not_zero, &clear_exceptions);
8378 __ ret(0);
8379 __ bind(&clear_exceptions);
8380 __ fnclex();
8381 __ ret(0);
8382
8383 CodeDesc desc;
8384 masm.GetCode(&desc);
8385 // Call the function from C++.
8386 return FUNCTION_CAST<ModuloFunction>(buffer);
8387}
8388
8389#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00008390
Leon Clarkee46be812010-01-19 14:06:41 +00008391
Steve Blocka7e24c12009-10-30 11:49:00 +00008392#undef __
8393
8394} } // namespace v8::internal