blob: 57baf77ca2a6909b1a9290200c439ded752e48fb [file] [log] [blame]
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#if defined(V8_TARGET_ARCH_X64)
#include "code-stubs.h"
#include "codegen.h"
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm_)
static unsigned GetPropertyId(Property* property) {
if (property->is_synthetic()) return AstNode::kNoNumber;
return property->id();
}
class JumpPatchSite BASE_EMBEDDED {
public:
explicit JumpPatchSite(MacroAssembler* masm)
: masm_(masm) {
#ifdef DEBUG
info_emitted_ = false;
#endif
}
~JumpPatchSite() {
ASSERT(patch_site_.is_bound() == info_emitted_);
}
void EmitJumpIfNotSmi(Register reg,
Label* target,
Label::Distance near_jump = Label::kFar) {
__ testb(reg, Immediate(kSmiTagMask));
EmitJump(not_carry, target, near_jump); // Always taken before patched.
}
void EmitJumpIfSmi(Register reg,
Label* target,
Label::Distance near_jump = Label::kFar) {
__ testb(reg, Immediate(kSmiTagMask));
EmitJump(carry, target, near_jump); // Never taken before patched.
}
void EmitPatchInfo() {
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
ASSERT(is_int8(delta_to_patch_site));
__ testl(rax, Immediate(delta_to_patch_site));
#ifdef DEBUG
info_emitted_ = true;
#endif
}
bool is_bound() const { return patch_site_.is_bound(); }
private:
// jc will be patched with jz, jnc will become jnz.
void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
ASSERT(!patch_site_.is_bound() && !info_emitted_);
ASSERT(cc == carry || cc == not_carry);
__ bind(&patch_site_);
__ j(cc, target, near_jump);
}
MacroAssembler* masm_;
Label patch_site_;
#ifdef DEBUG
bool info_emitted_;
#endif
};
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
// formal parameter count expected by the function.
//
// The live registers are:
// o rdi: the JS function object being called (ie, ourselves)
// o rsi: our context
// o rbp: our caller's frame pointer
// o rsp: stack pointer (pointing to return address)
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-x64.h for its layout.
void FullCodeGenerator::Generate(CompilationInfo* info) {
ASSERT(info_ == NULL);
info_ = info;
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
__ int3();
}
#endif
// Strict mode functions need to replace the receiver with undefined
// when called as functions (without an explicit receiver
// object). rcx is zero for method calls and non-zero for function
// calls.
if (info->is_strict_mode()) {
Label ok;
__ testq(rcx, rcx);
__ j(zero, &ok, Label::kNear);
// +1 for return address.
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
__ movq(Operand(rsp, receiver_offset), kScratchRegister);
__ bind(&ok);
}
__ push(rbp); // Caller's frame pointer.
__ movq(rbp, rsp);
__ push(rsi); // Callee's context.
__ push(rdi); // Callee's JS Function.
{ Comment cmnt(masm_, "[ Allocate locals");
int locals_count = scope()->num_stack_slots();
if (locals_count == 1) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
} else if (locals_count > 1) {
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < locals_count; i++) {
__ push(rdx);
}
}
}
bool function_in_register = true;
// Possibly allocate a local context.
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate local context");
// Argument to NewContext is the function, which is still in rdi.
__ push(rdi);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewContext, 1);
}
function_in_register = false;
// Context is returned in both rax and rsi. It replaces the context
// passed to us. It's saved in the stack and kept live in rsi.
__ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
// Copy any necessary parameters into the context.
int num_parameters = scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
Slot* slot = scope()->parameter(i)->AsSlot();
if (slot != NULL && slot->type() == Slot::CONTEXT) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ movq(rax, Operand(rbp, parameter_offset));
// Store it in the context.
int context_offset = Context::SlotOffset(slot->index());
__ movq(Operand(rsi, context_offset), rax);
// Update the write barrier. This clobbers all involved
// registers, so we have use a third register to avoid
// clobbering rsi.
__ movq(rcx, rsi);
__ RecordWrite(rcx, context_offset, rax, rbx);
}
}
}
// Possibly allocate an arguments object.
Variable* arguments = scope()->arguments();
if (arguments != NULL) {
// Arguments object must be allocated after the context object, in
// case the "arguments" or ".arguments" variables are in the context.
Comment cmnt(masm_, "[ Allocate arguments object");
if (function_in_register) {
__ push(rdi);
} else {
__ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
}
// The receiver is just before the parameters on the caller's stack.
int offset = scope()->num_parameters() * kPointerSize;
__ lea(rdx,
Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
__ push(rdx);
__ Push(Smi::FromInt(scope()->num_parameters()));
// Arguments to ArgumentsAccessStub:
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub stub(
is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
: ArgumentsAccessStub::NEW_NON_STRICT);
__ CallStub(&stub);
Variable* arguments_shadow = scope()->arguments_shadow();
if (arguments_shadow != NULL) {
// Store new arguments object in both "arguments" and ".arguments" slots.
__ movq(rcx, rax);
Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
}
Move(arguments->AsSlot(), rax, rbx, rdx);
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
// Visit the declarations and body unless there is an illegal
// redeclaration.
if (scope()->HasIllegalRedeclaration()) {
Comment cmnt(masm_, "[ Declarations");
scope()->VisitIllegalRedeclaration(this);
} else {
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
EmitDeclaration(scope()->function(), Variable::CONST, NULL);
}
VisitDeclarations(scope()->declarations());
}
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
}
{ Comment cmnt(masm_, "[ Body");
ASSERT(loop_depth() == 0);
VisitStatements(function()->body());
ASSERT(loop_depth() == 0);
}
}
// Always emit a 'return undefined' in case control fell off the end of
// the body.
{ Comment cmnt(masm_, "[ return <undefined>;");
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
EmitReturnSequence();
}
}
void FullCodeGenerator::ClearAccumulator() {
__ Set(rax, 0);
}
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
Comment cmnt(masm_, "[ Stack check");
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ CallStub(&stub);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
RecordStackCheck(stmt->OsrEntryId());
// Loop stack checks can be patched to perform on-stack replacement. In
// order to decide whether or not to perform OSR we embed the loop depth
// in a test instruction after the call so we can extract it from the OSR
// builtin.
ASSERT(loop_depth() > 0);
__ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
// entry becomes the target of a bailout. We don't expect it to be, but
// we want it to work if it is.
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
}
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
if (return_label_.is_bound()) {
__ jmp(&return_label_);
} else {
__ bind(&return_label_);
if (FLAG_trace) {
__ push(rax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
masm_->bind(&check_exit_codesize);
#endif
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
__ RecordJSReturn();
// Do not use the leave instruction here because it is too short to
// patch with the code required by the debugger.
__ movq(rsp, rbp);
__ pop(rbp);
int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
__ Ret(arguments_bytes, rcx);
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint. We
// have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
// (3 + 1 + 3).
const int kPadding = Assembler::kJSReturnSequenceLength - 7;
for (int i = 0; i < kPadding; ++i) {
masm_->int3();
}
// Check that the size of the code used for returning is large enough
// for the debugger's requirements.
ASSERT(Assembler::kJSReturnSequenceLength <=
masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
#endif
}
}
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
__ movq(result_register(), slot_operand);
}
void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
__ push(slot_operand);
}
void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
codegen()->Move(result_register(), slot);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
}
void FullCodeGenerator::AccumulatorValueContext::Plug(
Heap::RootListIndex index) const {
__ LoadRoot(result_register(), index);
}
void FullCodeGenerator::StackValueContext::Plug(
Heap::RootListIndex index) const {
__ PushRoot(index);
}
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
true,
true_label_,
false_label_);
if (index == Heap::kUndefinedValueRootIndex ||
index == Heap::kNullValueRootIndex ||
index == Heap::kFalseValueRootIndex) {
if (false_label_ != fall_through_) __ jmp(false_label_);
} else if (index == Heap::kTrueValueRootIndex) {
if (true_label_ != fall_through_) __ jmp(true_label_);
} else {
__ LoadRoot(result_register(), index);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
}
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
}
void FullCodeGenerator::AccumulatorValueContext::Plug(
Handle<Object> lit) const {
__ Move(result_register(), lit);
}
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
__ Push(lit);
}
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
true,
true_label_,
false_label_);
ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
if (false_label_ != fall_through_) __ jmp(false_label_);
} else if (lit->IsTrue() || lit->IsJSObject()) {
if (true_label_ != fall_through_) __ jmp(true_label_);
} else if (lit->IsString()) {
if (String::cast(*lit)->length() == 0) {
if (false_label_ != fall_through_) __ jmp(false_label_);
} else {
if (true_label_ != fall_through_) __ jmp(true_label_);
}
} else if (lit->IsSmi()) {
if (Smi::cast(*lit)->value() == 0) {
if (false_label_ != fall_through_) __ jmp(false_label_);
} else {
if (true_label_ != fall_through_) __ jmp(true_label_);
}
} else {
// For simplicity we always test the accumulator register.
__ Move(result_register(), lit);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
}
void FullCodeGenerator::EffectContext::DropAndPlug(int count,
Register reg) const {
ASSERT(count > 0);
__ Drop(count);
}
void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
int count,
Register reg) const {
ASSERT(count > 0);
__ Drop(count);
__ Move(result_register(), reg);
}
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
Register reg) const {
ASSERT(count > 0);
if (count > 1) __ Drop(count - 1);
__ movq(Operand(rsp, 0), reg);
}
void FullCodeGenerator::TestContext::DropAndPlug(int count,
Register reg) const {
ASSERT(count > 0);
// For simplicity we always test the accumulator register.
__ Drop(count);
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
Label* materialize_false) const {
ASSERT(materialize_true == materialize_false);
__ bind(materialize_true);
}
void FullCodeGenerator::AccumulatorValueContext::Plug(
Label* materialize_true,
Label* materialize_false) const {
Label done;
__ bind(materialize_true);
__ Move(result_register(), isolate()->factory()->true_value());
__ jmp(&done, Label::kNear);
__ bind(materialize_false);
__ Move(result_register(), isolate()->factory()->false_value());
__ bind(&done);
}
void FullCodeGenerator::StackValueContext::Plug(
Label* materialize_true,
Label* materialize_false) const {
Label done;
__ bind(materialize_true);
__ Push(isolate()->factory()->true_value());
__ jmp(&done, Label::kNear);
__ bind(materialize_false);
__ Push(isolate()->factory()->false_value());
__ bind(&done);
}
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
Label* materialize_false) const {
ASSERT(materialize_true == true_label_);
ASSERT(materialize_false == false_label_);
}
void FullCodeGenerator::EffectContext::Plug(bool flag) const {
}
void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
Heap::RootListIndex value_root_index =
flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
__ LoadRoot(result_register(), value_root_index);
}
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
Heap::RootListIndex value_root_index =
flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
__ PushRoot(value_root_index);
}
void FullCodeGenerator::TestContext::Plug(bool flag) const {
codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
true,
true_label_,
false_label_);
if (flag) {
if (true_label_ != fall_through_) __ jmp(true_label_);
} else {
if (false_label_ != fall_through_) __ jmp(false_label_);
}
}
void FullCodeGenerator::DoTest(Label* if_true,
Label* if_false,
Label* fall_through) {
ToBooleanStub stub;
__ push(result_register());
__ CallStub(&stub);
__ testq(rax, rax);
// The stub returns nonzero for true.
Split(not_zero, if_true, if_false, fall_through);
}
void FullCodeGenerator::Split(Condition cc,
Label* if_true,
Label* if_false,
Label* fall_through) {
if (if_false == fall_through) {
__ j(cc, if_true);
} else if (if_true == fall_through) {
__ j(NegateCondition(cc), if_false);
} else {
__ j(cc, if_true);
__ jmp(if_false);
}
}
MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
return Operand(rbp, SlotOffset(slot));
case Slot::CONTEXT: {
int context_chain_length =
scope()->ContextChainLength(slot->var()->scope());
__ LoadContext(scratch, context_chain_length);
return ContextOperand(scratch, slot->index());
}
case Slot::LOOKUP:
UNREACHABLE();
}
UNREACHABLE();
return Operand(rax, 0);
}
void FullCodeGenerator::Move(Register destination, Slot* source) {
MemOperand location = EmitSlotSearch(source, destination);
__ movq(destination, location);
}
void FullCodeGenerator::Move(Slot* dst,
Register src,
Register scratch1,
Register scratch2) {
ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
ASSERT(!scratch1.is(src) && !scratch2.is(src));
MemOperand location = EmitSlotSearch(dst, scratch1);
__ movq(location, src);
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
__ RecordWrite(scratch1, offset, src, scratch2);
}
}
void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
bool should_normalize,
Label* if_true,
Label* if_false) {
// Only prepare for bailouts before splits if we're in a test
// context. Otherwise, we let the Visit function deal with the
// preparation to avoid preparing with the same AST id twice.
if (!context()->IsTest() || !info_->IsOptimizable()) return;
Label skip;
if (should_normalize) __ jmp(&skip, Label::kNear);
ForwardBailoutStack* current = forward_bailout_stack_;
while (current != NULL) {
PrepareForBailout(current->expr(), state);
current = current->parent();
}
if (should_normalize) {
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
Split(equal, if_true, if_false, NULL);
__ bind(&skip);
}
}
void FullCodeGenerator::EmitDeclaration(Variable* variable,
Variable::Mode mode,
FunctionLiteral* function) {
Comment cmnt(masm_, "[ Declaration");
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->AsSlot();
Property* prop = variable->AsProperty();
if (slot != NULL) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
if (mode == Variable::CONST) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
} else if (function != NULL) {
VisitForAccumulatorValue(function);
__ movq(Operand(rbp, SlotOffset(slot)), result_register());
}
break;
case Slot::CONTEXT:
// We bypass the general EmitSlotSearch because we know more about
// this specific context.
// The variable in the decl always resides in the current context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) {
// Check if we have the correct context pointer.
__ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
__ cmpq(rbx, rsi);
__ Check(equal, "Unexpected declaration in current context.");
}
if (mode == Variable::CONST) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
// No write barrier since the hole value is in old space.
} else if (function != NULL) {
VisitForAccumulatorValue(function);
__ movq(ContextOperand(rsi, slot->index()), result_register());
int offset = Context::SlotOffset(slot->index());
__ movq(rbx, rsi);
__ RecordWrite(rbx, offset, result_register(), rcx);
}
break;
case Slot::LOOKUP: {
__ push(rsi);
__ Push(variable->name());
// Declaration nodes are always introduced in one of two modes.
ASSERT(mode == Variable::VAR || mode == Variable::CONST);
PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
__ Push(Smi::FromInt(attr));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
if (mode == Variable::CONST) {
__ PushRoot(Heap::kTheHoleValueRootIndex);
} else if (function != NULL) {
VisitForStackValue(function);
} else {
__ Push(Smi::FromInt(0)); // no initial value!
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
break;
}
}
} else if (prop != NULL) {
// A const declaration aliasing a parameter is an illegal redeclaration.
ASSERT(mode != Variable::CONST);
if (function != NULL) {
// We are declaring a function that rewrites to a property.
// Use (keyed) IC to set the initial value. We cannot visit the
// rewrite because it's shared and we risk recording duplicate AST
// IDs for bailouts from optimized code.
ASSERT(prop->obj()->AsVariableProxy() != NULL);
{ AccumulatorValueContext for_object(this);
EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
}
__ push(rax);
VisitForAccumulatorValue(function);
__ pop(rdx);
ASSERT(prop->key()->AsLiteral() != NULL &&
prop->key()->AsLiteral()->handle()->IsSmi());
__ Move(rcx, prop->key()->AsLiteral()->handle());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
}
}
}
void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
}
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument.
__ Push(pairs);
__ Push(Smi::FromInt(is_eval() ? 1 : 0));
__ Push(Smi::FromInt(strict_mode_flag()));
__ CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Comment cmnt(masm_, "[ SwitchStatement");
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
ZoneList<CaseClause*>* clauses = stmt->cases();
CaseClause* default_clause = NULL; // Can occur anywhere in the list.
Label next_test; // Recycled for each test.
// Compile all the tests with branches to their bodies.
for (int i = 0; i < clauses->length(); i++) {
CaseClause* clause = clauses->at(i);
clause->body_target()->Unuse();
// The default is not a test, but remember it as final fall through.
if (clause->is_default()) {
default_clause = clause;
continue;
}
Comment cmnt(masm_, "[ Case comparison");
__ bind(&next_test);
next_test.Unuse();
// Compile the label expression.
VisitForAccumulatorValue(clause->label());
// Perform the comparison as if via '==='.
__ movq(rdx, Operand(rsp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
Label slow_case;
__ movq(rcx, rdx);
__ or_(rcx, rax);
patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
__ cmpq(rdx, rax);
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
__ jmp(clause->body_target());
__ bind(&slow_case);
}
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
EmitCallIC(ic, &patch_site, clause->CompareId());
__ testq(rax, rax);
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
__ jmp(clause->body_target());
}
// Discard the test value and jump to the default if present, otherwise to
// the end of the statement.
__ bind(&next_test);
__ Drop(1); // Switch value is no longer needed.
if (default_clause == NULL) {
__ jmp(nested_statement.break_target());
} else {
__ jmp(default_clause->body_target());
}
// Compile all the case bodies.
for (int i = 0; i < clauses->length(); i++) {
Comment cmnt(masm_, "[ Case body");
CaseClause* clause = clauses->at(i);
__ bind(clause->body_target());
PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
VisitStatements(clause->statements());
}
__ bind(nested_statement.break_target());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Comment cmnt(masm_, "[ ForInStatement");
SetStatementPosition(stmt);
Label loop, exit;
ForIn loop_statement(this, stmt);
increment_loop_depth();
// Get the object to enumerate over. Both SpiderMonkey and JSC
// ignore null and undefined in contrast to the specification; see
// ECMA-262 section 12.6.4.
VisitForAccumulatorValue(stmt->enumerable());
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, &exit);
Register null_value = rdi;
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
__ cmpq(rax, null_value);
__ j(equal, &exit);
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(rax, &convert);
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
__ j(above_equal, &done_convert);
__ bind(&convert);
__ push(rax);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
__ push(rax);
// Check cache validity in generated code. This is a fast case for
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
// guarantee cache validity, call the runtime system to check cache
// validity or get the property names in a fixed array.
Label next, call_runtime;
Register empty_fixed_array_value = r8;
__ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Register empty_descriptor_array_value = r9;
__ LoadRoot(empty_descriptor_array_value,
Heap::kEmptyDescriptorArrayRootIndex);
__ movq(rcx, rax);
__ bind(&next);
// Check that there are no elements. Register rcx contains the
// current JS object we've reached through the prototype chain.
__ cmpq(empty_fixed_array_value,
FieldOperand(rcx, JSObject::kElementsOffset));
__ j(not_equal, &call_runtime);
// Check that instance descriptors are not empty so that we can
// check for an enum cache. Leave the map in rbx for the subsequent
// prototype load.
__ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
__ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
__ JumpIfSmi(rdx, &call_runtime);
// Check that there is an enum cache in the non-empty instance
// descriptors (rdx). This is the case if the next enumeration
// index field does not contain a smi.
__ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
__ JumpIfSmi(rdx, &call_runtime);
// For all objects but the receiver, check that the cache is empty.
Label check_prototype;
__ cmpq(rcx, rax);
__ j(equal, &check_prototype, Label::kNear);
__ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
__ cmpq(rdx, empty_fixed_array_value);
__ j(not_equal, &call_runtime);
// Load the prototype from the map and loop if non-null.
__ bind(&check_prototype);
__ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
__ cmpq(rcx, null_value);
__ j(not_equal, &next);
// The enum cache is valid. Load the map of the object being
// iterated over and use the cache for the iteration.
Label use_cache;
__ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
__ jmp(&use_cache, Label::kNear);
// Get the set of properties to enumerate.
__ bind(&call_runtime);
__ push(rax); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
// to do a slow check.
Label fixed_array;
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kMetaMapRootIndex);
__ j(not_equal, &fixed_array, Label::kNear);
// We got a map in register rax. Get the enumeration cache from it.
__ bind(&use_cache);
__ LoadInstanceDescriptors(rax, rcx);
__ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
__ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Setup the four remaining stack slots.
__ push(rax); // Map.
__ push(rdx); // Enumeration cache.
__ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
__ push(rax); // Enumeration cache length (as smi).
__ Push(Smi::FromInt(0)); // Initial index.
__ jmp(&loop);
// We got a fixed array in register rax. Iterate through that.
__ bind(&fixed_array);
__ Push(Smi::FromInt(0)); // Map (0) - force slow check.
__ push(rax);
__ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
__ push(rax); // Fixed array length (as smi).
__ Push(Smi::FromInt(0)); // Initial index.
// Generate code for doing the condition check.
__ bind(&loop);
__ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
__ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
__ j(above_equal, loop_statement.break_target());
// Get the current entry of the array into register rbx.
__ movq(rbx, Operand(rsp, 2 * kPointerSize));
SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
__ movq(rbx, FieldOperand(rbx,
index.reg,
index.scale,
FixedArray::kHeaderSize));
// Get the expected map from the stack or a zero map in the
// permanent slow case into register rdx.
__ movq(rdx, Operand(rsp, 3 * kPointerSize));
// Check if the expected map still matches that of the enumerable.
// If not, we have to filter the key.
Label update_each;
__ movq(rcx, Operand(rsp, 4 * kPointerSize));
__ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
__ j(equal, &update_each, Label::kNear);
// Convert the entry to a string or null if it isn't a property
// anymore. If the property has been removed while iterating, we
// just skip it.
__ push(rcx); // Enumerable.
__ push(rbx); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ Cmp(rax, Smi::FromInt(0));
__ j(equal, loop_statement.continue_target());
__ movq(rbx, rax);
// Update the 'each' property or variable from the possibly filtered
// entry in register rbx.
__ bind(&update_each);
__ movq(result_register(), rbx);
// Perform the assignment as if via '='.
{ EffectContext context(this);
EmitAssignment(stmt->each(), stmt->AssignmentId());
}
// Generate code for the body of the loop.
Visit(stmt->body());
// Generate code for going to the next element by incrementing the
// index (smi) stored on top of the stack.
__ bind(loop_statement.continue_target());
__ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
EmitStackCheck(stmt);
__ jmp(&loop);
// Remove the pointers stored on the stack.
__ bind(loop_statement.break_target());
__ addq(rsp, Immediate(5 * kPointerSize));
// Exit and decrement the loop depth.
__ bind(&exit);
decrement_loop_depth();
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
__ Push(info);
__ CallStub(&stub);
} else {
__ push(rsi);
__ Push(info);
__ Push(pretenure
? isolate()->factory()->true_value()
: isolate()->factory()->false_value());
__ CallRuntime(Runtime::kNewClosure, 3);
}
context()->Plug(rax);
}
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
EmitVariableLoad(expr->var());
}
void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
Slot* slot,
TypeofState typeof_state,
Label* slow) {
Register context = rsi;
Register temp = rdx;
Scope* s = scope();
while (s != NULL) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
__ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
Immediate(0));
__ j(not_equal, slow);
}
// Load next context in chain.
__ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
__ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
// Walk the rest of the chain without clobbering rsi.
context = temp;
}
// If no outer scope calls eval, we do not need to check more
// context extensions. If we have reached an eval scope, we check
// all extensions from this point.
if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
s = s->outer_scope();
}
if (s != NULL && s->is_eval_scope()) {
// Loop up the context chain. There is no frame effect so it is
// safe to use raw labels here.
Label next, fast;
if (!context.is(temp)) {
__ movq(temp, context);
}
// Load map for comparison into register, outside loop.
__ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
__ bind(&next);
// Terminate at global context.
__ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
__ j(equal, &fast, Label::kNear);
// Check that extension is NULL.
__ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
__ j(not_equal, slow);
// Load next context in chain.
__ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
__ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
__ jmp(&next);
__ bind(&fast);
}
// All extension objects were empty and it is safe to use a global
// load IC call.
__ movq(rax, GlobalObjectOperand());
__ Move(rcx, slot->var()->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
EmitCallIC(ic, mode, AstNode::kNoNumber);
}
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
Slot* slot,
Label* slow) {
ASSERT(slot->type() == Slot::CONTEXT);
Register context = rsi;
Register temp = rbx;
for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
__ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
Immediate(0));
__ j(not_equal, slow);
}
__ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
__ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
// Walk the rest of the chain without clobbering rsi.
context = temp;
}
}
// Check that last extension is NULL.
__ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
__ j(not_equal, slow);
// This function is used only for loads, not stores, so it's safe to
// return an rsi-based operand (the write barrier cannot be allowed to
// destroy the rsi register).
return ContextOperand(context, slot->index());
}
void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
Slot* slot,
TypeofState typeof_state,
Label* slow,
Label* done) {
// Generate fast-case code for variables that might be shadowed by
// eval-introduced variables. Eval is used a lot without
// introducing variables. In those cases, we do not want to
// perform a runtime call for all variables in the scope
// containing the eval.
if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
__ jmp(done);
} else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
if (potential_slot != NULL) {
// Generate fast case for locals that rewrite to slots.
__ movq(rax,
ContextSlotOperandCheckExtensions(potential_slot, slow));
if (potential_slot->var()->mode() == Variable::CONST) {
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
__ j(not_equal, done);
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
}
__ jmp(done);
} else if (rewrite != NULL) {
// Generate fast case for calls of an argument function.
Property* property = rewrite->AsProperty();
if (property != NULL) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
Literal* key_literal = property->key()->AsLiteral();
if (obj_proxy != NULL &&
key_literal != NULL &&
obj_proxy->IsArguments() &&
key_literal->handle()->IsSmi()) {
// Load arguments object if there are no eval-introduced
// variables. Then load the argument from the arguments
// object using keyed load.
__ movq(rdx,
ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
slow));
__ Move(rax, key_literal->handle());
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
__ jmp(done);
}
}
}
}
}
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
// Four cases: non-this global variables, lookup slots, all other
// types of slots, and parameters that rewrite to explicit property
// accesses on the arguments object.
Slot* slot = var->AsSlot();
Property* property = var->AsProperty();
if (var->is_global() && !var->is_this()) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in rcx and the global
// object on the stack.
__ Move(rcx, var->name());
__ movq(rax, GlobalObjectOperand());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
context()->Plug(rax);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
Comment cmnt(masm_, "Lookup slot");
__ push(rsi); // Context.
__ Push(var->name());
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ bind(&done);
context()->Plug(rax);
} else if (slot != NULL) {
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
if (var->mode() == Variable::CONST) {
// Constants may be the hole value if they have not been initialized.
// Unhole them.
Label done;
MemOperand slot_operand = EmitSlotSearch(slot, rax);
__ movq(rax, slot_operand);
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &done, Label::kNear);
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ bind(&done);
context()->Plug(rax);
} else {
context()->Plug(slot);
}
} else {
Comment cmnt(masm_, "Rewritten parameter");
ASSERT_NOT_NULL(property);
// Rewritten parameter accesses are of the form "slot[literal]".
// Assert that the object is in a slot.
Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
ASSERT_NOT_NULL(object_var);
Slot* object_slot = object_var->AsSlot();
ASSERT_NOT_NULL(object_slot);
// Load the object.
MemOperand object_loc = EmitSlotSearch(object_slot, rax);
__ movq(rdx, object_loc);
// Assert that the key is a smi.
Literal* key_literal = property->key()->AsLiteral();
ASSERT_NOT_NULL(key_literal);
ASSERT(key_literal->handle()->IsSmi());
// Load the key.
__ Move(rax, key_literal->handle());
// Do a keyed property load.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
context()->Plug(rax);
}
}
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
Label materialized;
// Registers will be used as follows:
// rdi = JS function.
// rcx = literals array.
// rbx = regexp literal.
// rax = regexp literal clone.
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
int literal_offset =
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
__ movq(rbx, FieldOperand(rcx, literal_offset));
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(not_equal, &materialized);
// Create regexp literal using runtime function
// Result will be in rax.
__ push(rcx);
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->pattern());
__ Push(expr->flags());
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
__ movq(rbx, rax);
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
__ push(rbx);
__ Push(Smi::FromInt(size));
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
__ pop(rbx);
__ bind(&allocated);
// Copy the content into the newly allocated memory.
// (Unroll copy loop once for better throughput).
for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
__ movq(rdx, FieldOperand(rbx, i));
__ movq(rcx, FieldOperand(rbx, i + kPointerSize));
__ movq(FieldOperand(rax, i), rdx);
__ movq(FieldOperand(rax, i + kPointerSize), rcx);
}
if ((size % (2 * kPointerSize)) != 0) {
__ movq(rdx, FieldOperand(rbx, size - kPointerSize));
__ movq(FieldOperand(rax, size - kPointerSize), rdx);
}
context()->Plug(rax);
}
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->constant_properties());
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
flags |= expr->has_function()
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
__ Push(Smi::FromInt(flags));
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
} else {
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
}
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in rax.
bool result_saved = false;
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
expr->CalculateEmitStore();
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
Literal* key = property->key();
Expression* value = property->value();
if (!result_saved) {
__ push(rax); // Save result on the stack
result_saved = true;
}
switch (property->kind()) {
case ObjectLiteral::Property::CONSTANT:
UNREACHABLE();
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
// Fall through.
case ObjectLiteral::Property::COMPUTED:
if (key->handle()->IsSymbol()) {
if (property->emit_store()) {
VisitForAccumulatorValue(value);
__ Move(rcx, key->handle());
__ movq(rdx, Operand(rsp, 0));
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
}
break;
}
// Fall through.
case ObjectLiteral::Property::PROTOTYPE:
__ push(Operand(rsp, 0)); // Duplicate receiver.
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
__ CallRuntime(Runtime::kSetProperty, 4);
} else {
__ Drop(3);
}
break;
case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
__ push(Operand(rsp, 0)); // Duplicate receiver.
VisitForStackValue(key);
__ Push(property->kind() == ObjectLiteral::Property::SETTER ?
Smi::FromInt(1) :
Smi::FromInt(0));
VisitForStackValue(value);
__ CallRuntime(Runtime::kDefineAccessor, 4);
break;
}
}
if (expr->has_function()) {
ASSERT(result_saved);
__ push(Operand(rsp, 0));
__ CallRuntime(Runtime::kToFastProperties, 1);
}
if (result_saved) {
context()->PlugTOS();
} else {
context()->Plug(rax);
}
}
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Comment cmnt(masm_, "[ ArrayLiteral");
ZoneList<Expression*>* subexprs = expr->values();
int length = subexprs->length();
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->constant_elements());
if (expr->constant_elements()->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
__ CallStub(&stub);
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
} else if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
__ CallStub(&stub);
}
bool result_saved = false; // Is the result saved to the stack?
// Emit code to evaluate all the non-constant subexpressions and to store
// them into the newly cloned array.
for (int i = 0; i < length; i++) {
Expression* subexpr = subexprs->at(i);
// If the subexpression is a literal or a simple materialized literal it
// is already set in the cloned array.
if (subexpr->AsLiteral() != NULL ||
CompileTimeValue::IsCompileTimeValue(subexpr)) {
continue;
}
if (!result_saved) {
__ push(rax);
result_saved = true;
}
VisitForAccumulatorValue(subexpr);
// Store the subexpression value in the array's elements.
__ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ movq(FieldOperand(rbx, offset), result_register());
// Update the write barrier for the array store.
__ RecordWrite(rbx, offset, result_register(), rcx);
PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
if (result_saved) {
context()->PlugTOS();
} else {
context()->Plug(rax);
}
}
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
Comment cmnt(masm_, "[ Assignment");
// Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
// on the left-hand side.
if (!expr->target()->IsValidLeftHandSide()) {
VisitForEffect(expr->target());
return;
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* property = expr->target()->AsProperty();
if (property != NULL) {
assign_type = (property->key()->IsPropertyName())
? NAMED_PROPERTY
: KEYED_PROPERTY;
}
// Evaluate LHS expression.
switch (assign_type) {
case VARIABLE:
// Nothing to do here.
break;
case NAMED_PROPERTY:
if (expr->is_compound()) {
// We need the receiver both on the stack and in the accumulator.
VisitForAccumulatorValue(property->obj());
__ push(result_register());
} else {
VisitForStackValue(property->obj());
}
break;
case KEYED_PROPERTY: {
if (expr->is_compound()) {
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
__ push(slot_operand);
__ Move(rax, property->key()->AsLiteral()->handle());
} else {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
}
__ movq(rdx, Operand(rsp, 0));
__ push(rax);
} else {
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
__ push(slot_operand);
__ Push(property->key()->AsLiteral()->handle());
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
}
}
break;
}
}
// For compound assignments we need another deoptimization point after the
// variable/property load.
if (expr->is_compound()) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
EmitVariableLoad(expr->target()->AsVariableProxy()->var());
PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
break;
}
}
Token::Value op = expr->binary_op();
__ push(rax); // Left operand goes on the stack.
VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
: NO_OVERWRITE;
SetSourcePosition(expr->position() + 1);
AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
EmitInlineSmiBinaryOp(expr->binary_operation(),
op,
mode,
expr->target(),
expr->value());
} else {
EmitBinaryOp(expr->binary_operation(), op, mode);
}
// Deoptimization point in case the binary operation may have side effects.
PrepareForBailout(expr->binary_operation(), TOS_REG);
} else {
VisitForAccumulatorValue(expr->value());
}
// Record source position before possible IC call.
SetSourcePosition(expr->position());
// Store the value.
switch (assign_type) {
case VARIABLE:
EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
expr->op());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
break;
case NAMED_PROPERTY:
EmitNamedPropertyAssignment(expr);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyAssignment(expr);
break;
}
}
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
__ Move(rcx, key->handle());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
}
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
Token::Value op,
OverwriteMode mode,
Expression* left,
Expression* right) {
// Do combined smi check of the operands. Left operand is on the
// stack (popped into rdx). Right operand is in rax but moved into
// rcx to make the shifts easier.
Label done, stub_call, smi_case;
__ pop(rdx);
__ movq(rcx, rax);
__ or_(rax, rdx);
JumpPatchSite patch_site(masm_);
patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
__ bind(&stub_call);
__ movq(rax, rcx);
BinaryOpStub stub(op, mode);
EmitCallIC(stub.GetCode(), &patch_site, expr->id());
__ jmp(&done, Label::kNear);
__ bind(&smi_case);
switch (op) {
case Token::SAR:
__ SmiShiftArithmeticRight(rax, rdx, rcx);
break;
case Token::SHL:
__ SmiShiftLeft(rax, rdx, rcx);
break;
case Token::SHR:
__ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
break;
case Token::ADD:
__ SmiAdd(rax, rdx, rcx, &stub_call);
break;
case Token::SUB:
__ SmiSub(rax, rdx, rcx, &stub_call);
break;
case Token::MUL:
__ SmiMul(rax, rdx, rcx, &stub_call);
break;
case Token::BIT_OR:
__ SmiOr(rax, rdx, rcx);
break;
case Token::BIT_AND:
__ SmiAnd(rax, rdx, rcx);
break;
case Token::BIT_XOR:
__ SmiXor(rax, rdx, rcx);
break;
default:
UNREACHABLE();
break;
}
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
Token::Value op,
OverwriteMode mode) {
__ pop(rdx);
BinaryOpStub stub(op, mode);
// NULL signals no inlined smi code.
EmitCallIC(stub.GetCode(), NULL, expr->id());
context()->Plug(rax);
}
void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
VisitForEffect(expr);
return;
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->AsProperty();
if (prop != NULL) {
assign_type = (prop->key()->IsPropertyName())
? NAMED_PROPERTY
: KEYED_PROPERTY;
}
switch (assign_type) {
case VARIABLE: {
Variable* var = expr->AsVariableProxy()->var();
EffectContext context(this);
EmitVariableAssignment(var, Token::ASSIGN);
break;
}
case NAMED_PROPERTY: {
__ push(rax); // Preserve value.
VisitForAccumulatorValue(prop->obj());
__ movq(rdx, rax);
__ pop(rax); // Restore value.
__ Move(rcx, prop->key()->AsLiteral()->handle());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
break;
}
case KEYED_PROPERTY: {
__ push(rax); // Preserve value.
if (prop->is_synthetic()) {
ASSERT(prop->obj()->AsVariableProxy() != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
{ AccumulatorValueContext for_object(this);
EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
}
__ movq(rdx, rax);
__ Move(rcx, prop->key()->AsLiteral()->handle());
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ movq(rcx, rax);
__ pop(rdx);
}
__ pop(rax); // Restore value.
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
break;
}
}
PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(rax);
}
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
// Left-hand sides that rewrite to explicit property accesses do not reach
// here.
ASSERT(var != NULL);
ASSERT(var->is_global() || var->AsSlot() != NULL);
if (var->is_global()) {
ASSERT(!var->is_this());
// Assignment to a global variable. Use inline caching for the
// assignment. Right-hand-side value is passed in rax, variable name in
// rcx, and the global object on the stack.
__ Move(rcx, var->name());
__ movq(rdx, GlobalObjectOperand());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
// scope. However, unlike var initializers, const initializers are able
// to drill a hole to that function context, even from inside a 'with'
// context. We thus bypass the normal static scope lookup.
Slot* slot = var->AsSlot();
Label skip;
switch (slot->type()) {
case Slot::PARAMETER:
// No const parameters.
UNREACHABLE();
break;
case Slot::LOCAL:
__ movq(rdx, Operand(rbp, SlotOffset(slot)));
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &skip);
__ movq(Operand(rbp, SlotOffset(slot)), rax);
break;
case Slot::CONTEXT: {
__ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
__ movq(rdx, ContextOperand(rcx, slot->index()));
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &skip);
__ movq(ContextOperand(rcx, slot->index()), rax);
int offset = Context::SlotOffset(slot->index());
__ movq(rdx, rax); // Preserve the stored value in eax.
__ RecordWrite(rcx, offset, rdx, rbx);
break;
}
case Slot::LOOKUP:
__ push(rax);
__ push(rsi);
__ Push(var->name());
__ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
break;
}
__ bind(&skip);
} else if (var->mode() != Variable::CONST) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
Slot* slot = var->AsSlot();
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
// Perform the assignment.
__ movq(Operand(rbp, SlotOffset(slot)), rax);
break;
case Slot::CONTEXT: {
MemOperand target = EmitSlotSearch(slot, rcx);
// Perform the assignment and issue the write barrier.
__ movq(target, rax);
// The value of the assignment is in rax. RecordWrite clobbers its
// register arguments.
__ movq(rdx, rax);
int offset = Context::SlotOffset(slot->index());
__ RecordWrite(rcx, offset, rdx, rbx);
break;
}
case Slot::LOOKUP:
// Call the runtime for the assignment.
__ push(rax); // Value.
__ push(rsi); // Context.
__ Push(var->name());
__ Push(Smi::FromInt(strict_mode_flag()));
__ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
}
}
}
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a named store IC.
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
// If the assignment starts a block of assignments to the same object,
// change to slow case to avoid the quadratic behavior of repeatedly
// adding fast properties.
if (expr->starts_initialization_block()) {
__ push(result_register());
__ push(Operand(rsp, kPointerSize)); // Receiver is now under value.
__ CallRuntime(Runtime::kToSlowProperties, 1);
__ pop(result_register());
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ Move(rcx, prop->key()->AsLiteral()->handle());
if (expr->ends_initialization_block()) {
__ movq(rdx, Operand(rsp, 0));
} else {
__ pop(rdx);
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
__ push(rax); // Result of assignment, saved even if not needed.
__ push(Operand(rsp, kPointerSize)); // Receiver is under value.
__ CallRuntime(Runtime::kToFastProperties, 1);
__ pop(rax);
__ Drop(1);
}
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
// If the assignment starts a block of assignments to the same object,
// change to slow case to avoid the quadratic behavior of repeatedly
// adding fast properties.
if (expr->starts_initialization_block()) {
__ push(result_register());
// Receiver is now under the key and value.
__ push(Operand(rsp, 2 * kPointerSize));
__ CallRuntime(Runtime::kToSlowProperties, 1);
__ pop(result_register());
}
__ pop(rcx);
if (expr->ends_initialization_block()) {
__ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
} else {
__ pop(rdx);
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
__ pop(rdx);
__ push(rax); // Result of assignment, saved even if not needed.
__ push(rdx);
__ CallRuntime(Runtime::kToFastProperties, 1);
__ pop(rax);
}
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
void FullCodeGenerator::VisitProperty(Property* expr) {
Comment cmnt(masm_, "[ Property");
Expression* key = expr->key();
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
context()->Plug(rax);
} else {
VisitForStackValue(expr->obj());
VisitForAccumulatorValue(expr->key());
__ pop(rdx);
EmitKeyedPropertyLoad(expr);
context()->Plug(rax);
}
}
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
// Code common for calls using the IC.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope scope(masm()->positions_recorder());
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
__ Move(rcx, name);
}
// Record source position for debugger.
SetSourcePosition(expr->position());
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
EmitCallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->Plug(rax);
}
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Expression* key) {
// Load the key.
VisitForAccumulatorValue(key);
// Swap the name of the function and the receiver on the stack to follow
// the calling convention for call ICs.
__ pop(rcx);
__ push(rax);
__ push(rcx);
// Load the arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope scope(masm()->positions_recorder());
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
}
// Record source position for debugger.
SetSourcePosition(expr->position());
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, rax); // Drop the key still on the stack.
}
void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Code common for calls using the call stub.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope scope(masm()->positions_recorder());
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
}
// Record source position for debugger.
SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, flags);
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
// Discard the function left on TOS.
context()->DropAndPlug(1, rax);
}
void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
int arg_count) {
// Push copy of the first argument or undefined if it doesn't exist.
if (arg_count > 0) {
__ push(Operand(rsp, arg_count * kPointerSize));
} else {
__ PushRoot(Heap::kUndefinedValueRootIndex);
}
// Push the receiver of the enclosing function and do runtime call.
__ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
// Push the strict mode flag.
__ Push(Smi::FromInt(strict_mode_flag()));
__ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
? Runtime::kResolvePossiblyDirectEvalNoLookup
: Runtime::kResolvePossiblyDirectEval, 4);
}
void FullCodeGenerator::VisitCall(Call* expr) {
#ifdef DEBUG
// We want to verify that RecordJSReturnSite gets called on all paths
// through this function. Avoid early returns.
expr->return_is_recorded_ = false;
#endif
Comment cmnt(masm_, "[ Call");
Expression* fun = expr->expression();
Variable* var = fun->AsVariableProxy()->AsVariable();
if (var != NULL && var->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
// resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
// arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope pos_scope(masm()->positions_recorder());
VisitForStackValue(fun);
__ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
// Push the arguments.
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
// If we know that eval can only be shadowed by eval-introduced
// variables we attempt to load the global eval function directly
// in generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
NOT_INSIDE_TYPEOF,
&slow);
// Push the function and resolve eval.
__ push(rax);
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
__ jmp(&done);
__ bind(&slow);
}
// Push copy of the function (found below the arguments) and
// resolve eval.
__ push(Operand(rsp, (arg_count + 1) * kPointerSize));
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
if (done.is_linked()) {
__ bind(&done);
}
// The runtime call returns a pair of values in rax (function) and
// rdx (receiver). Touch up the stack with the right values.
__ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
__ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
}
// Record source position for debugger.
SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, rax);
} else if (var != NULL && !var->is_this() && var->is_global()) {
// Call to a global variable.
// Push global object as receiver for the call IC lookup.
__ push(GlobalObjectOperand());
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::LOOKUP) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
NOT_INSIDE_TYPEOF,
&slow,
&done);
__ bind(&slow);
}
// Call the runtime to find the function to call (returned in rax)
// and the object holding it (returned in rdx).
__ push(context_register());
__ Push(var->name());
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ push(rax); // Function.
__ push(rdx); // Receiver.
// If fast case code has been generated, emit code to push the
// function and receiver and have the slow path jump around this
// code.
if (done.is_linked()) {
Label call;
__ jmp(&call, Label::kNear);
__ bind(&done);
// Push function.
__ push(rax);
// Push global receiver.
__ movq(rbx, GlobalObjectOperand());
__ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
__ bind(&call);
}
// The receiver is either the global receiver or an object found
// by LoadContextSlot. That object could be the hole if the
// receiver is implicitly the global object.
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
} else if (fun->AsProperty() != NULL) {
// Call to an object property.
Property* prop = fun->AsProperty();
Literal* key = prop->key()->AsLiteral();
if (key != NULL && key->handle()->IsSymbol()) {
// Call to a named property, use call IC.
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
// For a synthetic property use keyed load IC followed by function call,
// for a regular property use keyed EmitCallIC.
if (prop->is_synthetic()) {
// Do not visit the object and key subexpressions (they are shared
// by all occurrences of the same rewritten parameter).
ASSERT(prop->obj()->AsVariableProxy() != NULL);
ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
MemOperand operand = EmitSlotSearch(slot, rdx);
__ movq(rdx, operand);
ASSERT(prop->key()->AsLiteral() != NULL);
ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
__ Move(rax, prop->key()->AsLiteral()->handle());
// Record source code position for IC call.
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
// Push result (function).
__ push(rax);
// Push Global receiver.
__ movq(rcx, GlobalObjectOperand());
__ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
EmitKeyedCallWithIC(expr, prop->key());
}
}
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(fun);
}
// Load global receiver object.
__ movq(rbx, GlobalObjectOperand());
__ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
// Emit function call.
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
}
#ifdef DEBUG
// RecordJSReturnSite should have been called.
ASSERT(expr->return_is_recorded_);
#endif
}
void FullCodeGenerator::VisitCallNew(CallNew* expr) {
Comment cmnt(masm_, "[ CallNew");
// According to ECMA-262, section 11.2.2, page 44, the function
// expression in new calls must be evaluated before the
// arguments.
// Push constructor on the stack. If it's not a function it's used as
// receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
// ignored.
VisitForStackValue(expr->expression());
// Push the arguments ("left-to-right") on the stack.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
// Call the construct call builtin that handles allocation and
// constructor invocation.
SetSourcePosition(expr->position());
// Load function and argument count into rdi and rax.
__ Set(rax, arg_count);
__ movq(rdi, Operand(rsp, arg_count * kPointerSize));
Handle<Code> construct_builtin =
isolate()->builtins()->JSConstructCall();
__ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
context()->Plug(rax);
}
void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ JumpIfSmi(rax, if_true);
__ jmp(if_false);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
Split(non_negative_smi, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ JumpIfSmi(rax, if_false);
__ CompareRoot(rax, Heap::kNullValueRootIndex);
__ j(equal, if_true);
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
// Undetectable objects behave like undefined when tested with typeof.
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, if_false);
__ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
__ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
__ j(below, if_false);
__ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(below_equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(above_equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ JumpIfSmi(rax, if_false);
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(not_zero, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
if (FLAG_debug_code) __ AbortIfSmi(rax);
// Check whether this map has already been checked to be safe for default
// valueOf.
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
__ testb(FieldOperand(rbx, Map::kBitField2Offset),
Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
__ j(not_zero, if_true);
// Check for fast case object. Generate false result for slow case object.
__ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
__ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
__ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
__ j(equal, if_false);
// Look for valueOf symbol in the descriptor array, and indicate false if
// found. The type is not checked, so if it is a transition it is a false
// negative.
__ LoadInstanceDescriptors(rbx, rbx);
__ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
// rbx: descriptor array
// rcx: length of descriptor array
// Calculate the end of the descriptor array.
SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
__ lea(rcx,
Operand(
rbx, index.reg, index.scale, FixedArray::kHeaderSize));
// Calculate location of the first key name.
__ addq(rbx,
Immediate(FixedArray::kHeaderSize +
DescriptorArray::kFirstIndex * kPointerSize));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
Label entry, loop;
__ jmp(&entry);
__ bind(&loop);
__ movq(rdx, FieldOperand(rbx, 0));
__ Cmp(rdx, FACTORY->value_of_symbol());
__ j(equal, if_false);
__ addq(rbx, Immediate(kPointerSize));
__ bind(&entry);
__ cmpq(rbx, rcx);
__ j(not_equal, &loop);
// Reload map as register rbx was used as temporary above.
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
// If a valueOf property is not found on the object check that it's
// prototype is the un-modified String prototype. If not result is false.
__ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
__ testq(rcx, Immediate(kSmiTagMask));
__ j(zero, if_false);
__ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
__ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
__ cmpq(rcx,
ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
__ j(not_equal, if_false);
// Set the bit in the map to indicate that it has been checked safe for
// default valueOf and set true result.
__ or_(FieldOperand(rbx, Map::kBitField2Offset),
Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
__ jmp(if_true);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
// Get the frame pointer for the calling frame.
__ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
// Skip the arguments adaptor frame if it exists.
Label check_frame_marker;
__ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
__ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
// Load the two objects into registers and perform the comparison.
VisitForStackValue(args->at(0));
VisitForAccumulatorValue(args->at(1));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ pop(rbx);
__ cmpq(rax, rbx);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
// ArgumentsAccessStub expects the key in rdx and the formal
// parameter count in rax.
VisitForAccumulatorValue(args->at(0));
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(scope()->num_parameters()));
ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
Label exit;
// Get the number of formal parameters.
__ Move(rax, Smi::FromInt(scope()->num_parameters()));
// Check if the calling frame is an arguments adaptor frame.
__ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &exit, Label::kNear);
// Arguments adaptor case: Read the arguments length from the
// adaptor frame.
__ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ bind(&exit);
if (FLAG_debug_code) __ AbortIfNotSmi(rax);
context()->Plug(rax);
}
void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
Label done, null, function, non_function_constructor;
VisitForAccumulatorValue(args->at(0));
// If the object is a smi, we return null.
__ JumpIfSmi(rax, &null);
// Check that the object is a JS object but take special care of JS
// functions to make sure they have 'Function' as their class.
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); // Map is now in rax.
__ j(below, &null);
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
// LAST_JS_OBJECT_TYPE.
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
__ CmpInstanceType(rax, JS_FUNCTION_TYPE);
__ j(equal, &function);
// Check if the constructor in the map is a function.
__ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
__ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
__ j(not_equal, &non_function_constructor);
// rax now contains the constructor function. Grab the
// instance class name from there.
__ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
__ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
__ jmp(&done);
// Functions have class 'Function'.
__ bind(&function);
__ Move(rax, isolate()->factory()->function_class_symbol());
__ jmp(&done);
// Objects with a non-function constructor have class 'Object'.
__ bind(&non_function_constructor);
__ Move(rax, isolate()->factory()->Object_symbol());
__ jmp(&done);
// Non-JS objects have class null.
__ bind(&null);
__ LoadRoot(rax, Heap::kNullValueRootIndex);
// All done.
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
// Conditionally generate a log call.
// Args:
// 0 (literal string): The type of logging (corresponds to the flags).
// This is used to determine whether or not to generate the log call.
// 1 (string): Format string. Access the string at argument index 2
// with '%2s' (see Logger::LogRuntime for all the formats).
// 2 (array): Arguments to the format string.
ASSERT_EQ(args->length(), 3);
#ifdef ENABLE_LOGGING_AND_PROFILING
if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
}
#endif
// Finally, we're expected to leave a value on the top of the stack.
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
context()->Plug(rax);
}
void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
Label slow_allocate_heapnumber;
Label heapnumber_allocated;
__ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
// Allocate a heap number.
__ CallRuntime(Runtime::kNumberAlloc, 0);
__ movq(rbx, rax);
__ bind(&heapnumber_allocated);
// Return a random uint32 number in rax.
// The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
__ PrepareCallCFunction(1);
#ifdef _WIN64
__ LoadAddress(rcx, ExternalReference::isolate_address());
#else
__ LoadAddress(rdi, ExternalReference::isolate_address());
#endif
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
// Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
__ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
__ movd(xmm1, rcx);
__ movd(xmm0, rax);
__ cvtss2sd(xmm1, xmm1);
__ xorps(xmm0, xmm1);
__ subsd(xmm0, xmm1);
__ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
__ movq(rax, rbx);
context()->Plug(rax);
}
void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
// Load the arguments on the stack and call the stub.
SubStringStub stub;
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
// Load the arguments on the stack and call the stub.
RegExpExecStub stub;
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
VisitForStackValue(args->at(3));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0)); // Load the object.
Label done;
// If the object is a smi return the object.
__ JumpIfSmi(rax, &done);
// If the object is not a value type, return the object.
__ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
__ j(not_equal, &done);
__ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
// Load the arguments on the stack and call the runtime function.
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
MathPowStub stub;
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0)); // Load the object.
VisitForAccumulatorValue(args->at(1)); // Load the value.
__ pop(rbx); // rax = value. rbx = object.
Label done;
// If the object is a smi, return the value.
__ JumpIfSmi(rbx, &done);
// If the object is not a value type, return the value.
__ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
__ j(not_equal, &done);
// Store the value.
__ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
// Update the write barrier. Save the value as it will be
// overwritten by the write barrier code and is needed afterward.
__ movq(rdx, rax);
__ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx);
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
ASSERT_EQ(args->length(), 1);
// Load the argument on the stack and call the stub.
VisitForStackValue(args->at(0));
NumberToStringStub stub;
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label done;
StringCharFromCodeGenerator generator(rax, rbx);
generator.GenerateFast(masm_);
__ jmp(&done);
NopRuntimeCallHelper call_helper;
generator.GenerateSlow(masm_, call_helper);
__ bind(&done);
context()->Plug(rbx);
}
void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForAccumulatorValue(args->at(1));
Register object = rbx;
Register index = rax;
Register scratch = rcx;
Register result = rdx;
__ pop(object);
Label need_conversion;
Label index_out_of_range;
Label done;
StringCharCodeAtGenerator generator(object,
index,
scratch,
result,
&need_conversion,
&need_conversion,
&index_out_of_range,
STRING_INDEX_IS_NUMBER);
generator.GenerateFast(masm_);
__ jmp(&done);
__ bind(&index_out_of_range);
// When the index is out of range, the spec requires us to return
// NaN.
__ LoadRoot(result, Heap::kNanValueRootIndex);
__ jmp(&done);
__ bind(&need_conversion);
// Move the undefined value into the result register, which will
// trigger conversion.
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);
__ jmp(&done);
NopRuntimeCallHelper call_helper;
generator.GenerateSlow(masm_, call_helper);
__ bind(&done);
context()->Plug(result);
}
void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForAccumulatorValue(args->at(1));
Register object = rbx;
Register index = rax;
Register scratch1 = rcx;
Register scratch2 = rdx;
Register result = rax;
__ pop(object);
Label need_conversion;
Label index_out_of_range;
Label done;
StringCharAtGenerator generator(object,
index,
scratch1,
scratch2,
result,
&need_conversion,
&need_conversion,
&index_out_of_range,
STRING_INDEX_IS_NUMBER);
generator.GenerateFast(masm_);
__ jmp(&done);
__ bind(&index_out_of_range);
// When the index is out of range, the spec requires us to return
// the empty string.
__ LoadRoot(result, Heap::kEmptyStringRootIndex);
__ jmp(&done);
__ bind(&need_conversion);
// Move smi zero into the result register, which will trigger
// conversion.
__ Move(result, Smi::FromInt(0));
__ jmp(&done);
NopRuntimeCallHelper call_helper;
generator.GenerateSlow(masm_, call_helper);
__ bind(&done);
context()->Plug(result);
}
void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
StringAddStub stub(NO_STRING_ADD_FLAGS);
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
StringCompareStub stub;
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::SIN,
TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::COS,
TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG,
TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the runtime function.
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_sqrt, 1);
context()->Plug(rax);
}
void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() >= 2);
int arg_count = args->length() - 2; // 2 ~ receiver and function.
for (int i = 0; i < arg_count + 1; i++) {
VisitForStackValue(args->at(i));
}
VisitForAccumulatorValue(args->last()); // Function.
// InvokeFunction requires the function in rdi. Move it in there.
__ movq(rdi, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(rdi, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->Plug(rax);
}
void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
RegExpConstructResultStub stub;
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
Label done;
Label slow_case;
Register object = rax;
Register index_1 = rbx;
Register index_2 = rcx;
Register elements = rdi;
Register temp = rdx;
__ movq(object, Operand(rsp, 2 * kPointerSize));
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object, JS_ARRAY_TYPE, temp);
__ j(not_equal, &slow_case);
__ testb(FieldOperand(temp, Map::kBitFieldOffset),
Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
__ j(not_zero, &slow_case);
// Check the object's elements are in fast case and writable.
__ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
__ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Heap::kFixedArrayMapRootIndex);
__ j(not_equal, &slow_case);
// Check that both indices are smis.
__ movq(index_1, Operand(rsp, 1 * kPointerSize));
__ movq(index_2, Operand(rsp, 0 * kPointerSize));
__ JumpIfNotBothSmi(index_1, index_2, &slow_case);
// Check that both indices are valid.
// The JSArray length field is a smi since the array is in fast case mode.
__ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
__ SmiCompare(temp, index_1);
__ j(below_equal, &slow_case);
__ SmiCompare(temp, index_2);
__ j(below_equal, &slow_case);
__ SmiToInteger32(index_1, index_1);
__ SmiToInteger32(index_2, index_2);
// Bring addresses into index1 and index2.
__ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
FixedArray::kHeaderSize));
__ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
FixedArray::kHeaderSize));
// Swap elements. Use object and temp as scratch registers.
__ movq(object, Operand(index_1, 0));
__ movq(temp, Operand(index_2, 0));
__ movq(Operand(index_2, 0), object);
__ movq(Operand(index_1, 0), temp);
Label new_space;
__ InNewSpace(elements, temp, equal, &new_space);
__ movq(object, elements);
__ RecordWriteHelper(object, index_1, temp);
__ RecordWriteHelper(elements, index_2, temp);
__ bind(&new_space);
// We are done. Drop elements from the stack, and return undefined.
__ addq(rsp, Immediate(3 * kPointerSize));
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ jmp(&done);
__ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
ASSERT_NE(NULL, args->at(0)->AsLiteral());
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
isolate()->global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
context()->Plug(rax);
return;
}
VisitForAccumulatorValue(args->at(1));
Register key = rax;
Register cache = rbx;
Register tmp = rcx;
__ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
__ movq(cache,
FieldOperand(cache, GlobalObject::kGlobalContextOffset));
__ movq(cache,
ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
__ movq(cache,
FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
Label done, not_found;
// tmp now holds finger offset as a smi.
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
SmiIndex index =
__ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
__ cmpq(key, FieldOperand(cache,
index.reg,
index.scale,
FixedArray::kHeaderSize));
__ j(not_equal, &not_found, Label::kNear);
__ movq(rax, FieldOperand(cache,
index.reg,
index.scale,
FixedArray::kHeaderSize + kPointerSize));
__ jmp(&done, Label::kNear);
__ bind(&not_found);
// Call runtime to perform the lookup.
__ push(cache);
__ push(key);
__ CallRuntime(Runtime::kGetFromCache, 2);
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
Register right = rax;
Register left = rbx;
Register tmp = rcx;
VisitForStackValue(args->at(0));
VisitForAccumulatorValue(args->at(1));
__ pop(left);
Label done, fail, ok;
__ cmpq(left, right);
__ j(equal, &ok, Label::kNear);
// Fail if either is a non-HeapObject.
Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
__ j(either_smi, &fail, Label::kNear);
__ j(zero, &fail, Label::kNear);
__ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
__ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(JS_REGEXP_TYPE));
__ j(not_equal, &fail, Label::kNear);
__ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
__ j(not_equal, &fail, Label::kNear);
__ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
__ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
__ j(equal, &ok, Label::kNear);
__ bind(&fail);
__ Move(rax, isolate()->factory()->false_value());
__ jmp(&done, Label::kNear);
__ bind(&ok);
__ Move(rax, isolate()->factory()->true_value());
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ testl(FieldOperand(rax, String::kHashFieldOffset),
Immediate(String::kContainsCachedArrayIndexMask));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ j(zero, if_true);
__ jmp(if_false);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) {
__ AbortIfNotString(rax);
}
__ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
ASSERT(String::kHashShift >= kSmiTagSize);
__ IndexFromHash(rax, rax);
context()->Plug(rax);
}
void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
Label bailout, return_result, done, one_char_separator, long_separator,
non_trivial_array, not_size_one_array, loop,
loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
ASSERT(args->length() == 2);
// We will leave the separator on the stack until the end of the function.
VisitForStackValue(args->at(1));
// Load this to rax (= array)
VisitForAccumulatorValue(args->at(0));
// All aliases of the same register have disjoint lifetimes.
Register array = rax;
Register elements = no_reg; // Will be rax.
Register index = rdx;
Register string_length = rcx;
Register string = rsi;
Register scratch = rbx;
Register array_length = rdi;
Register result_pos = no_reg; // Will be rdi.
Operand separator_operand = Operand(rsp, 2 * kPointerSize);
Operand result_operand = Operand(rsp, 1 * kPointerSize);
Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
// Separator operand is already pushed. Make room for the two
// other stack fields, and clear the direction flag in anticipation
// of calling CopyBytes.
__ subq(rsp, Immediate(2 * kPointerSize));
__ cld();
// Check that the array is a JSArray
__ JumpIfSmi(array, &bailout);
__ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
__ j(not_equal, &bailout);
// Check that the array has fast elements.
__ testb(FieldOperand(scratch, Map::kBitField2Offset),
Immediate(1 << Map::kHasFastElements));
__ j(zero, &bailout);
// Array has fast elements, so its length must be a smi.
// If the array has length zero, return the empty string.
__ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
__ SmiCompare(array_length, Smi::FromInt(0));
__ j(not_zero, &non_trivial_array);
__ LoadRoot(rax, Heap::kEmptyStringRootIndex);
__ jmp(&return_result);
// Save the array length on the stack.
__ bind(&non_trivial_array);
__ SmiToInteger32(array_length, array_length);
__ movl(array_length_operand, array_length);
// Save the FixedArray containing array's elements.
// End of array's live range.
elements = array;
__ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
array = no_reg;
// Check that all array elements are sequential ASCII strings, and
// accumulate the sum of their lengths, as a smi-encoded value.
__ Set(index, 0);
__ Set(string_length, 0);
// Loop condition: while (index < array_length).
// Live loop registers: index(int32), array_length(int32), string(String*),
// scratch, string_length(int32), elements(FixedArray*).
if (FLAG_debug_code) {
__ cmpq(index, array_length);
__ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
}
__ bind(&loop);
__ movq(string, FieldOperand(elements,
index,
times_pointer_size,
FixedArray::kHeaderSize));
__ JumpIfSmi(string, &bailout);
__ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
__ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ andb(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
__ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
__ j(not_equal, &bailout);
__ AddSmiField(string_length,
FieldOperand(string, SeqAsciiString::kLengthOffset));
__ j(overflow, &bailout);
__ incl(index);
__ cmpl(index, array_length);
__ j(less, &loop);
// Live registers:
// string_length: Sum of string lengths.
// elements: FixedArray of strings.
// index: Array length.
// array_length: Array length.
// If array_length is 1, return elements[0], a string.
__ cmpl(array_length, Immediate(1));
__ j(not_equal, &not_size_one_array);
__ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
__ jmp(&return_result);
__ bind(&not_size_one_array);
// End of array_length live range.
result_pos = array_length;
array_length = no_reg;
// Live registers:
// string_length: Sum of string lengths.
// elements: FixedArray of strings.
// index: Array length.
// Check that the separator is a sequential ASCII string.
__ movq(string, separator_operand);
__ JumpIfSmi(string, &bailout);
__ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
__ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ andb(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
__ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
__ j(not_equal, &bailout);
// Live registers:
// string_length: Sum of string lengths.
// elements: FixedArray of strings.
// index: Array length.
// string: Separator string.
// Add (separator length times (array_length - 1)) to string_length.
__ SmiToInteger32(scratch,
FieldOperand(string, SeqAsciiString::kLengthOffset));
__ decl(index);
__ imull(scratch, index);
__ j(overflow, &bailout);
__ addl(string_length, scratch);
__ j(overflow, &bailout);
// Live registers and stack values:
// string_length: Total length of result string.
// elements: FixedArray of strings.
__ AllocateAsciiString(result_pos, string_length, scratch,
index, string, &bailout);
__ movq(result_operand, result_pos);
__ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
__ movq(string, separator_operand);
__ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
Smi::FromInt(1));
__ j(equal, &one_char_separator);
__ j(greater, &long_separator);
// Empty separator case:
__ Set(index, 0);
__ movl(scratch, array_length_operand);
__ jmp(&loop_1_condition);
// Loop condition: while (index < array_length).
__ bind(&loop_1);
// Each iteration of the loop concatenates one string to the result.
// Live values in registers:
// index: which element of the elements array we are adding to the result.
// result_pos: the position to which we are currently copying characters.
// elements: the FixedArray of strings we are joining.
// scratch: array length.
// Get string = array[index].
__ movq(string, FieldOperand(elements, index,
times_pointer_size,
FixedArray::kHeaderSize));
__ SmiToInteger32(string_length,
FieldOperand(string, String::kLengthOffset));
__ lea(string,
FieldOperand(string, SeqAsciiString::kHeaderSize));
__ CopyBytes(result_pos, string, string_length);
__ incl(index);
__ bind(&loop_1_condition);
__ cmpl(index, scratch);
__ j(less, &loop_1); // Loop while (index < array_length).
__ jmp(&done);
// Generic bailout code used from several places.
__ bind(&bailout);
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ jmp(&return_result);
// One-character separator case
__ bind(&one_char_separator);
// Get the separator ascii character value.
// Register "string" holds the separator.
__ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
__ Set(index, 0);
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator
__ jmp(&loop_2_entry);
// Loop condition: while (index < length).
__ bind(&loop_2);
// Each iteration of the loop concatenates one string to the result.
// Live values in registers:
// elements: The FixedArray of strings we are joining.
// index: which element of the elements array we are adding to the result.
// result_pos: the position to which we are currently copying characters.
// scratch: Separator character.
// Copy the separator character to the result.
__ movb(Operand(result_pos, 0), scratch);
__ incq(result_pos);
__ bind(&loop_2_entry);
// Get string = array[index].
__ movq(string, FieldOperand(elements, index,
times_pointer_size,
FixedArray::kHeaderSize));
__ SmiToInteger32(string_length,
FieldOperand(string, String::kLengthOffset));
__ lea(string,
FieldOperand(string, SeqAsciiString::kHeaderSize));
__ CopyBytes(result_pos, string, string_length);
__ incl(index);
__ cmpl(index, array_length_operand);
__ j(less, &loop_2); // End while (index < length).
__ jmp(&done);
// Long separator case (separator is more than one character).
__ bind(&long_separator);
// Make elements point to end of elements array, and index
// count from -array_length to zero, so we don't need to maintain
// a loop limit.
__ movl(index, array_length_operand);
__ lea(elements, FieldOperand(elements, index, times_pointer_size,
FixedArray::kHeaderSize));
__ neg(index);
// Replace separator string with pointer to its first character, and
// make scratch be its length.
__ movq(string, separator_operand);
__ SmiToInteger32(scratch,
FieldOperand(string, String::kLengthOffset));
__ lea(string,
FieldOperand(string, SeqAsciiString::kHeaderSize));
__ movq(separator_operand, string);
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator
__ jmp(&loop_3_entry);
// Loop condition: while (index < length).
__ bind(&loop_3);
// Each iteration of the loop concatenates one string to the result.
// Live values in registers:
// index: which element of the elements array we are adding to the result.
// result_pos: the position to which we are currently copying characters.
// scratch: Separator length.
// separator_operand (rsp[0x10]): Address of first char of separator.
// Copy the separator to the result.
__ movq(string, separator_operand);
__ movl(string_length, scratch);
__ CopyBytes(result_pos, string, string_length, 2);
__ bind(&loop_3_entry);
// Get string = array[index].
__ movq(string, Operand(elements, index, times_pointer_size, 0));
__ SmiToInteger32(string_length,
FieldOperand(string, String::kLengthOffset));
__ lea(string,
FieldOperand(string, SeqAsciiString::kHeaderSize));
__ CopyBytes(result_pos, string, string_length);
__ incq(index);
__ j(not_equal, &loop_3); // Loop while (index < 0).
__ bind(&done);
__ movq(rax, result_operand);
__ bind(&return_result);
// Drop temp values from the stack, and restore context register.
__ addq(rsp, Immediate(3 * kPointerSize));
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->Plug(rax);
}
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
Comment cmnt(masm_, "[ InlineRuntimeCall");
EmitInlineRuntimeCall(expr);
return;
}
Comment cmnt(masm_, "[ CallRuntime");
ZoneList<Expression*>* args = expr->arguments();
if (expr->is_jsruntime()) {
// Prepare for calling JS runtime function.
__ movq(rax, GlobalObjectOperand());
__ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
}
// Push the arguments ("left-to-right").
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
if (expr->is_jsruntime()) {
// Call the JS runtime function using a call IC.
__ Move(rcx, expr->name());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
EmitCallIC(ic, mode, expr->id());
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
} else {
__ CallRuntime(expr->function(), arg_count);
}
context()->Plug(rax);
}
void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
switch (expr->op()) {
case Token::DELETE: {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Property* prop = expr->expression()->AsProperty();
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
if (prop != NULL) {
if (prop->is_synthetic()) {
// Result of deleting parameters is false, even when they rewrite
// to accesses on the arguments object.
context()->Plug(false);
} else {
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
__ Push(Smi::FromInt(strict_mode_flag()));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(rax);
}
} else if (var != NULL) {
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is.
ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
if (var->is_global()) {
__ push(GlobalObjectOperand());
__ Push(var->name());
__ Push(Smi::FromInt(kNonStrictMode));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(rax);
} else if (var->AsSlot() != NULL &&
var->AsSlot()->type() != Slot::LOOKUP) {
// Result of deleting non-global, non-dynamic variables is false.
// The subexpression does not have side effects.
context()->Plug(false);
} else {
// Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
__ push(context_register());
__ Push(var->name());
__ CallRuntime(Runtime::kDeleteContextSlot, 2);
context()->Plug(rax);
}
} else {
// Result of deleting non-property, non-variable reference is true.
// The subexpression may have side effects.
VisitForEffect(expr->expression());
context()->Plug(true);
}
break;
}
case Token::VOID: {
Comment cmnt(masm_, "[ UnaryOperation (VOID)");
VisitForEffect(expr->expression());
context()->Plug(Heap::kUndefinedValueRootIndex);
break;
}
case Token::NOT: {
Comment cmnt(masm_, "[ UnaryOperation (NOT)");
if (context()->IsEffect()) {
// Unary NOT has no side effects so it's only necessary to visit the
// subexpression. Match the optimizing compiler by not branching.
VisitForEffect(expr->expression());
} else {
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
// Notice that the labels are swapped.
context()->PrepareTest(&materialize_true, &materialize_false,
&if_false, &if_true, &fall_through);
if (context()->IsTest()) ForwardBailoutToChild(expr);
VisitForControl(expr->expression(), if_true, if_false, fall_through);
context()->Plug(if_false, if_true); // Labels swapped.
}
break;
}
case Token::TYPEOF: {
Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
{ StackValueContext context(this);
VisitForTypeofValue(expr->expression());
}
__ CallRuntime(Runtime::kTypeof, 1);
context()->Plug(rax);
break;
}
case Token::ADD: {
Comment cmt(masm_, "[ UnaryOperation (ADD)");
VisitForAccumulatorValue(expr->expression());
Label no_conversion;
Condition is_smi = masm_->CheckSmi(result_register());
__ j(is_smi, &no_conversion);
ToNumberStub convert_stub;
__ CallStub(&convert_stub);
__ bind(&no_conversion);
context()->Plug(result_register());
break;
}
case Token::SUB:
EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
break;
case Token::BIT_NOT:
EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
break;
default:
UNREACHABLE();
}
}
void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
const char* comment) {
// TODO(svenpanne): Allowing format strings in Comment would be nice here...
Comment cmt(masm_, comment);
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
UnaryOpStub stub(expr->op(), overwrite);
// UnaryOpStub expects the argument to be in the
// accumulator register rax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
EmitCallIC(stub.GetCode(), NULL, expr->id());
context()->Plug(rax);
}
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
// Invalid left-hand-sides are rewritten to have a 'throw
// ReferenceError' as the left-hand side.
if (!expr->expression()->IsValidLeftHandSide()) {
VisitForEffect(expr->expression());
return;
}
// Expression can only be a property, a global or a (parameter or local)
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->expression()->AsProperty();
// In case of a property we use the uninitialized expression context
// of the key to detect a named property.
if (prop != NULL) {
assign_type =
(prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
}
// Evaluate expression and get value.
if (assign_type == VARIABLE) {
ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
AccumulatorValueContext context(this);
EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ Push(Smi::FromInt(0));
}
if (assign_type == NAMED_PROPERTY) {
VisitForAccumulatorValue(prop->obj());
__ push(rax); // Copy of receiver, needed for later store.
EmitNamedPropertyLoad(prop);
} else {
if (prop->is_arguments_access()) {
VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
__ push(slot_operand);
__ Move(rax, prop->key()->AsLiteral()->handle());
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
}
__ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
__ push(rax); // Copy of key, needed for later store.
EmitKeyedPropertyLoad(prop);
}
}
// We need a second deoptimization point after loading the value
// in case evaluating the property load my have a side effect.
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
} else {
PrepareForBailoutForId(expr->CountId(), TOS_REG);
}
// Call ToNumber only if operand is not a smi.
Label no_conversion;
Condition is_smi;
is_smi = masm_->CheckSmi(rax);
__ j(is_smi, &no_conversion, Label::kNear);
ToNumberStub convert_stub;
__ CallStub(&convert_stub);
__ bind(&no_conversion);
// Save result for postfix expressions.
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
// Save the result on the stack. If we have a named or keyed property
// we store the result under the receiver that is currently on top
// of the stack.
switch (assign_type) {
case VARIABLE:
__ push(rax);
break;
case NAMED_PROPERTY:
__ movq(Operand(rsp, kPointerSize), rax);
break;
case KEYED_PROPERTY:
__ movq(Operand(rsp, 2 * kPointerSize), rax);
break;
}
}
}
// Inline smi case if we are in a loop.
Label done, stub_call;
JumpPatchSite patch_site(masm_);
if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) {
__ SmiAddConstant(rax, rax, Smi::FromInt(1));
} else {
__ SmiSubConstant(rax, rax, Smi::FromInt(1));
}
__ j(overflow, &stub_call, Label::kNear);
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
__ bind(&stub_call);
// Call stub. Undo operation first.
if (expr->op() == Token::INC) {
__ SmiSubConstant(rax, rax, Smi::FromInt(1));
} else {
__ SmiAddConstant(rax, rax, Smi::FromInt(1));
}
}
// Record position before stub call.
SetSourcePosition(expr->position());
// Call stub for +1/-1.
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
if (expr->op() == Token::INC) {
__ Move(rdx, Smi::FromInt(1));
} else {
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
}
EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
__ bind(&done);
// Store the value returned in rax.
switch (assign_type) {
case VARIABLE:
if (expr->is_postfix()) {
// Perform the assignment as if via '='.
{ EffectContext context(this);
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Token::ASSIGN);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context.Plug(rax);
}
// For all contexts except kEffect: We have the result on
// top of the stack.
if (!context()->IsEffect()) {
context()->PlugTOS();
}
} else {
// Perform the assignment as if via '='.
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Token::ASSIGN);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
break;
case NAMED_PROPERTY: {
__ Move(rcx, prop->key()->AsLiteral()->handle());
__ pop(rdx);
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
}
} else {
context()->Plug(rax);
}
break;
}
case KEYED_PROPERTY: {
__ pop(rcx);
__ pop(rdx);
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
}
} else {
context()->Plug(rax);
}
break;
}
}
}
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
VariableProxy* proxy = expr->AsVariableProxy();
ASSERT(!context()->IsEffect());
ASSERT(!context()->IsTest());
if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
Comment cmnt(masm_, "Global variable");
__ Move(rcx, proxy->name());
__ movq(rax, GlobalObjectOperand());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
} else if (proxy != NULL &&
proxy->var()->AsSlot() != NULL &&
proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
Slot* slot = proxy->var()->AsSlot();
EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
__ push(rsi);
__ Push(proxy->name());
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
PrepareForBailout(expr, TOS_REG);
__ bind(&done);
context()->Plug(rax);
} else {
// This expression cannot throw a reference error at the top level.
context()->HandleExpression(expr);
}
}
bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
Expression* left,
Expression* right,
Label* if_true,
Label* if_false,
Label* fall_through) {
if (op != Token::EQ && op != Token::EQ_STRICT) return false;
// Check for the pattern: typeof <expression> == <string literal>.
Literal* right_literal = right->AsLiteral();
if (right_literal == NULL) return false;
Handle<Object> right_literal_value = right_literal->handle();
if (!right_literal_value->IsString()) return false;
UnaryOperation* left_unary = left->AsUnaryOperation();
if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
Handle<String> check = Handle<String>::cast(right_literal_value);
{ AccumulatorValueContext context(this);
VisitForTypeofValue(left_unary->expression());
}
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
if (check->Equals(isolate()->heap()->number_symbol())) {
__ JumpIfSmi(rax, if_true);
__ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
__ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
Split(equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->string_symbol())) {
__ JumpIfSmi(rax, if_false);
// Check for undetectable objects => false.
__ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
__ j(above_equal, if_false);
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
Split(zero, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->boolean_symbol())) {
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
__ j(equal, if_true);
__ CompareRoot(rax, Heap::kFalseValueRootIndex);
Split(equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->undefined_symbol())) {
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, if_true);
__ JumpIfSmi(rax, if_false);
// Check for undetectable objects => true.
__ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
Split(not_zero, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->function_symbol())) {
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
Split(above_equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->object_symbol())) {
__ JumpIfSmi(rax, if_false);
__ CompareRoot(rax, Heap::kNullValueRootIndex);
__ j(equal, if_true);
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
__ j(below, if_false);
__ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
__ j(above_equal, if_false);
// Check for undetectable objects => false.
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
Split(zero, if_true, if_false, fall_through);
} else {
if (if_false != fall_through) __ jmp(if_false);
}
return true;
}
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
Comment cmnt(masm_, "[ CompareOperation");
SetSourcePosition(expr->position());
// Always perform the comparison for its control flow. Pack the result
// into the expression's context after the comparison is performed.
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
// First we try a fast inlined version of the compare when one of
// the operands is a literal.
Token::Value op = expr->op();
Expression* left = expr->left();
Expression* right = expr->right();
if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
context()->Plug(if_true, if_false);
return;
}
VisitForStackValue(expr->left());
switch (op) {
case Token::IN:
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
Split(equal, if_true, if_false, fall_through);
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ testq(rax, rax);
// The stub returns 0 for true.
Split(zero, if_true, if_false, fall_through);
break;
}
default: {
VisitForAccumulatorValue(expr->right());
Condition cc = no_condition;
bool strict = false;
switch (op) {
case Token::EQ_STRICT:
strict = true;
// Fall through.
case Token::EQ:
cc = equal;
__ pop(rdx);
break;
case Token::LT:
cc = less;
__ pop(rdx);
break;
case Token::GT:
// Reverse left and right sizes to obtain ECMA-262 conversion order.
cc = less;
__ movq(rdx, result_register());
__ pop(rax);
break;
case Token::LTE:
// Reverse left and right sizes to obtain ECMA-262 conversion order.
cc = greater_equal;
__ movq(rdx, result_register());
__ pop(rax);
break;
case Token::GTE:
cc = greater_equal;
__ pop(rdx);
break;
case Token::IN:
case Token::INSTANCEOF:
default:
UNREACHABLE();
}
bool inline_smi_code = ShouldInlineSmiCase(op);
JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
Label slow_case;
__ movq(rcx, rdx);
__ or_(rcx, rax);
patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
__ cmpq(rdx, rax);
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
}
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
EmitCallIC(ic, &patch_site, expr->id());
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ testq(rax, rax);
Split(cc, if_true, if_false, fall_through);
}
}
// Convert the result of the comparison into one expected for this
// expression's context.
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
Comment cmnt(masm_, "[ CompareToNull");
Label materialize_true, materialize_false;
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
VisitForAccumulatorValue(expr->expression());
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ CompareRoot(rax, Heap::kNullValueRootIndex);
if (expr->is_strict()) {
Split(equal, if_true, if_false, fall_through);
} else {
__ j(equal, if_true);
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, if_true);
Condition is_smi = masm_->CheckSmi(rax);
__ j(is_smi, if_false);
// It can be an undetectable object.
__ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
Split(not_zero, if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
context()->Plug(rax);
}
Register FullCodeGenerator::result_register() {
return rax;
}
Register FullCodeGenerator::context_register() {
return rsi;
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
RelocInfo::Mode mode,
unsigned ast_id) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
__ IncrementCounter(counters->named_load_full(), 1);
break;
case Code::KEYED_LOAD_IC:
__ IncrementCounter(counters->keyed_load_full(), 1);
break;
case Code::STORE_IC:
__ IncrementCounter(counters->named_store_full(), 1);
break;
case Code::KEYED_STORE_IC:
__ IncrementCounter(counters->keyed_store_full(), 1);
default:
break;
}
__ call(ic, mode, ast_id);
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
JumpPatchSite* patch_site,
unsigned ast_id) {
Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
__ IncrementCounter(counters->named_load_full(), 1);
break;
case Code::KEYED_LOAD_IC:
__ IncrementCounter(counters->keyed_load_full(), 1);
break;
case Code::STORE_IC:
__ IncrementCounter(counters->named_store_full(), 1);
break;
case Code::KEYED_STORE_IC:
__ IncrementCounter(counters->keyed_store_full(), 1);
default:
break;
}
__ call(ic, RelocInfo::CODE_TARGET, ast_id);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
__ nop(); // Signals no inlined code.
}
}
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
ASSERT(IsAligned(frame_offset, kPointerSize));
__ movq(Operand(rbp, frame_offset), value);
}
void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
__ movq(dst, ContextOperand(rsi, context_index));
}
// ----------------------------------------------------------------------------
// Non-local control flow support.
void FullCodeGenerator::EnterFinallyBlock() {
ASSERT(!result_register().is(rdx));
ASSERT(!result_register().is(rcx));
// Cook return address on top of stack (smi encoded Code* delta)
__ movq(rdx, Operand(rsp, 0));
__ Move(rcx, masm_->CodeObject());
__ subq(rdx, rcx);
__ Integer32ToSmi(rdx, rdx);
__ movq(Operand(rsp, 0), rdx);
// Store result register while executing finally block.
__ push(result_register());
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(rdx));
ASSERT(!result_register().is(rcx));
// Restore result register from stack.
__ pop(result_register());
// Uncook return address.
__ movq(rdx, Operand(rsp, 0));
__ SmiToInteger32(rdx, rdx);
__ Move(rcx, masm_->CodeObject());
__ addq(rdx, rcx);
__ movq(Operand(rsp, 0), rdx);
// And return.
__ ret(0);
}
#undef __
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64