Update V8 to r5901 as required by WebKit r73109
Change-Id: Ic48c5b085ce90e0151e2e7e58c4c5afe87fce9d1
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index c179769..14e3527 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -80,8 +80,9 @@
__ pop(rdx);
__ push(rsi);
__ push(rdx);
+ __ Push(Factory::false_value());
__ push(rcx); // Restore return address.
- __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
+ __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
}
@@ -2483,20 +2484,6 @@
}
-void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
- __ PrepareCallApiFunction(kStackSpace);
-#ifdef _WIN64
- // All the parameters should be set up by a caller.
-#else
- // Set 1st parameter register with property name.
- __ movq(rsi, rdx);
- // Second parameter register rdi should be set with pointer to AccessorInfo
- // by a caller.
-#endif
- __ CallApiFunctionAndReturn(fun());
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
@@ -2549,18 +2536,18 @@
#ifdef _WIN64
// Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
// Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
- __ movq(Operand(rsp, 4 * kPointerSize), r14); // argc.
- __ movq(Operand(rsp, 5 * kPointerSize), r12); // argv.
+ __ movq(StackSpaceOperand(0), r14); // argc.
+ __ movq(StackSpaceOperand(1), r12); // argv.
if (result_size_ < 2) {
// Pass a pointer to the Arguments object as the first argument.
// Return result in single register (rax).
- __ lea(rcx, Operand(rsp, 4 * kPointerSize));
+ __ lea(rcx, StackSpaceOperand(0));
} else {
ASSERT_EQ(2, result_size_);
// Pass a pointer to the result location as the first argument.
- __ lea(rcx, Operand(rsp, 6 * kPointerSize));
+ __ lea(rcx, StackSpaceOperand(2));
// Pass a pointer to the Arguments object as the second argument.
- __ lea(rdx, Operand(rsp, 4 * kPointerSize));
+ __ lea(rdx, StackSpaceOperand(0));
}
#else // _WIN64
@@ -2596,7 +2583,7 @@
__ j(zero, &failure_returned);
// Exit the JavaScript to C++ exit frame.
- __ LeaveExitFrame(result_size_);
+ __ LeaveExitFrame();
__ ret(0);
// Handling of failure.
@@ -2700,7 +2687,12 @@
// builtin once.
// Enter the exit frame that transitions from JavaScript to C++.
- __ EnterExitFrame(result_size_);
+#ifdef _WIN64
+ int arg_stack_space = (result_size_ < 2 ? 2 : 4);
+#else
+ int arg_stack_space = 0;
+#endif
+ __ EnterExitFrame(arg_stack_space);
// rax: Holds the context at this point, but should not be used.
// On entry to code generated by GenerateCore, it must hold
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index e0e4095..5abf3c8 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -568,10 +568,10 @@
void CodeGenerator::LoadGlobal() {
if (in_spilled_code()) {
- frame_->EmitPush(GlobalObject());
+ frame_->EmitPush(GlobalObjectOperand());
} else {
Result temp = allocator_->Allocate();
- __ movq(temp.reg(), GlobalObject());
+ __ movq(temp.reg(), GlobalObjectOperand());
frame_->Push(&temp);
}
}
@@ -580,7 +580,7 @@
void CodeGenerator::LoadGlobalReceiver() {
Result temp = allocator_->Allocate();
Register reg = temp.reg();
- __ movq(reg, GlobalObject());
+ __ movq(reg, GlobalObjectOperand());
__ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
frame_->Push(&temp);
}
@@ -4244,7 +4244,8 @@
void CodeGenerator::InstantiateFunction(
- Handle<SharedFunctionInfo> function_info) {
+ Handle<SharedFunctionInfo> function_info,
+ bool pretenure) {
// The inevitable call will sync frame elements to memory anyway, so
// we do it eagerly to allow us to push the arguments directly into
// place.
@@ -4252,7 +4253,9 @@
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning.
- if (scope()->is_function_scope() && function_info->num_literals() == 0) {
+ if (scope()->is_function_scope() &&
+ function_info->num_literals() == 0 &&
+ !pretenure) {
FastNewClosureStub stub;
frame_->Push(function_info);
Result answer = frame_->CallStub(&stub, 1);
@@ -4262,7 +4265,10 @@
// shared function info.
frame_->EmitPush(rsi);
frame_->EmitPush(function_info);
- Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
+ frame_->EmitPush(pretenure
+ ? Factory::true_value()
+ : Factory::false_value());
+ Result result = frame_->CallRuntime(Runtime::kNewClosure, 3);
frame_->Push(&result);
}
}
@@ -4279,14 +4285,14 @@
SetStackOverflow();
return;
}
- InstantiateFunction(function_info);
+ InstantiateFunction(function_info, node->pretenure());
}
void CodeGenerator::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* node) {
Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
- InstantiateFunction(node->shared_function_info());
+ InstantiateFunction(node->shared_function_info(), false);
}
@@ -5592,6 +5598,18 @@
// Push the receiver onto the frame.
Load(property->obj());
+ // Load the name of the function.
+ Load(property->key());
+
+ // Swap the name of the function and the receiver on the stack to follow
+ // the calling convention for call ICs.
+ Result key = frame_->Pop();
+ Result receiver = frame_->Pop();
+ frame_->Push(&key);
+ frame_->Push(&receiver);
+ key.Unuse();
+ receiver.Unuse();
+
// Load the arguments.
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
@@ -5599,14 +5617,13 @@
frame_->SpillTop();
}
- // Load the name of the function.
- Load(property->key());
-
- // Call the IC initialization code.
+ // Place the key on top of stack and call the IC initialization code.
+ frame_->PushElementAt(arg_count + 1);
CodeForSourcePosition(node->position());
Result result = frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
arg_count,
loop_nesting());
+ frame_->Drop(); // Drop the key still on the stack.
frame_->RestoreContextRegister();
frame_->Push(&result);
}
@@ -6062,7 +6079,7 @@
__ movq(scratch2_,
FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
__ cmpq(scratch1_,
- CodeGenerator::ContextOperand(
+ ContextOperand(
scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
__ j(not_equal, &false_result);
// Set the bit in the map to indicate that it has been checked safe for
@@ -7206,6 +7223,11 @@
}
+void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
+ frame_->Push(Factory::undefined_value());
+}
+
+
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
if (CheckForInlineRuntimeCall(node)) {
return;
@@ -7219,7 +7241,7 @@
// Push the builtins object found in the current global object.
Result temp = allocator()->Allocate();
ASSERT(temp.is_valid());
- __ movq(temp.reg(), GlobalObject());
+ __ movq(temp.reg(), GlobalObjectOperand());
__ movq(temp.reg(),
FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
frame_->Push(&temp);
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index 1853c83..1a5e7df 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -341,10 +341,6 @@
bool in_spilled_code() const { return in_spilled_code_; }
void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; }
- static Operand ContextOperand(Register context, int index) {
- return Operand(context, Context::SlotOffset(index));
- }
-
private:
// Type of a member function that generates inline code for a native function.
typedef void (CodeGenerator::*InlineFunctionGenerator)
@@ -417,10 +413,6 @@
JumpTarget* slow);
// Expressions
- static Operand GlobalObject() {
- return ContextOperand(rsi, Context::GLOBAL_INDEX);
- }
-
void LoadCondition(Expression* x,
ControlDestination* destination,
bool force_control);
@@ -588,16 +580,13 @@
void ProcessDeclarations(ZoneList<Declaration*>* declarations);
- static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
-
- static Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
-
// Declare global variables and functions in the given array of
// name/value pairs.
void DeclareGlobals(Handle<FixedArray> pairs);
// Instantiate the function based on the shared function info.
- void InstantiateFunction(Handle<SharedFunctionInfo> function_info);
+ void InstantiateFunction(Handle<SharedFunctionInfo> function_info,
+ bool pretenure);
// Support for type checks.
void GenerateIsSmi(ZoneList<Expression*>* args);
@@ -680,6 +669,7 @@
void GenerateHasCachedArrayIndex(ZoneList<Expression*>* args);
void GenerateGetCachedArrayIndex(ZoneList<Expression*>* args);
+ void GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 00ea684..ee80169 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -36,6 +36,7 @@
#include "full-codegen.h"
#include "parser.h"
#include "scopes.h"
+#include "stub-cache.h"
namespace v8 {
namespace internal {
@@ -836,17 +837,21 @@
}
-void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info) {
+void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
+ bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning.
- if (scope()->is_function_scope() && info->num_literals() == 0) {
+ if (scope()->is_function_scope() &&
+ info->num_literals() == 0 &&
+ !pretenure) {
FastNewClosureStub stub;
__ Push(info);
__ CallStub(&stub);
} else {
__ push(rsi);
__ Push(info);
- __ CallRuntime(Runtime::kNewClosure, 2);
+ __ Push(pretenure ? Factory::true_value() : Factory::false_value());
+ __ CallRuntime(Runtime::kNewClosure, 3);
}
context()->Plug(rax);
}
@@ -912,7 +917,7 @@
// All extension objects were empty and it is safe to use a global
// load IC call.
- __ movq(rax, CodeGenerator::GlobalObject());
+ __ movq(rax, GlobalObjectOperand());
__ Move(rcx, slot->var()->name());
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
@@ -1016,7 +1021,7 @@
// Use inline caching. Variable name is passed in rcx and the global
// object on the stack.
__ Move(rcx, var->name());
- __ movq(rax, CodeGenerator::GlobalObject());
+ __ movq(rax, GlobalObjectOperand());
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(rax);
@@ -1555,7 +1560,7 @@
// assignment. Right-hand-side value is passed in rax, variable name in
// rcx, and the global object on the stack.
__ Move(rcx, var->name());
- __ movq(rdx, CodeGenerator::GlobalObject());
+ __ movq(rdx, GlobalObjectOperand());
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1727,8 +1732,7 @@
SetSourcePosition(expr->position(), FORCED_POSITION);
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count,
- in_loop);
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, mode);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -1739,26 +1743,33 @@
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Expression* key,
RelocInfo::Mode mode) {
- // Code common for calls using the IC.
+ // Load the key.
+ VisitForAccumulatorValue(key);
+
+ // Swap the name of the function and the receiver on the stack to follow
+ // the calling convention for call ICs.
+ __ pop(rcx);
+ __ push(rax);
+ __ push(rcx);
+
+ // Load the arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreserveStatementPositionScope scope(masm()->positions_recorder());
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
- VisitForAccumulatorValue(key);
- __ movq(rcx, rax);
}
// Record source position for debugger.
SetSourcePosition(expr->position(), FORCED_POSITION);
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = CodeGenerator::ComputeKeyedCallInitialize(arg_count,
- in_loop);
+ Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+ __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
EmitCallIC(ic, mode);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- context()->Plug(rax);
+ context()->DropAndPlug(1, rax); // Drop the key still on the stack.
}
@@ -1834,7 +1845,7 @@
} else if (var != NULL && !var->is_this() && var->is_global()) {
// Call to a global variable.
// Push global object as receiver for the call IC lookup.
- __ push(CodeGenerator::GlobalObject());
+ __ push(GlobalObjectOperand());
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::LOOKUP) {
@@ -1868,7 +1879,7 @@
// Push function.
__ push(rax);
// Push global receiver.
- __ movq(rbx, CodeGenerator::GlobalObject());
+ __ movq(rbx, GlobalObjectOperand());
__ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
__ bind(&call);
}
@@ -1907,7 +1918,7 @@
// Push result (function).
__ push(rax);
// Push receiver object on stack.
- __ movq(rcx, CodeGenerator::GlobalObject());
+ __ movq(rcx, GlobalObjectOperand());
__ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
EmitCallWithStub(expr);
} else {
@@ -1928,7 +1939,7 @@
VisitForStackValue(fun);
}
// Load global receiver object.
- __ movq(rbx, CodeGenerator::GlobalObject());
+ __ movq(rbx, GlobalObjectOperand());
__ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
// Emit function call.
EmitCallWithStub(expr);
@@ -2788,6 +2799,11 @@
}
+void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
+ context()->Plug(Heap::kUndefinedValueRootIndex);
+}
+
+
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
@@ -2801,7 +2817,7 @@
if (expr->is_jsruntime()) {
// Prepare for calling JS runtime function.
- __ movq(rax, CodeGenerator::GlobalObject());
+ __ movq(rax, GlobalObjectOperand());
__ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
}
@@ -2815,7 +2831,7 @@
// Call the JS runtime function using a call IC.
__ Move(rcx, expr->name());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, in_loop);
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2851,7 +2867,7 @@
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
} else if (var->is_global()) {
- __ push(CodeGenerator::GlobalObject());
+ __ push(GlobalObjectOperand());
__ Push(var->name());
} else {
// Non-global variable. Call the runtime to look up the context
@@ -3122,7 +3138,7 @@
if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
Comment cmnt(masm_, "Global variable");
__ Move(rcx, proxy->name());
- __ movq(rax, CodeGenerator::GlobalObject());
+ __ movq(rax, GlobalObjectOperand());
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
// Use a regular load, not a contextual load, to avoid a reference
// error.
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 293d8a5..d919833 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -327,7 +327,7 @@
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
@@ -456,6 +456,24 @@
}
+MaybeObject* MacroAssembler::TryTailCallExternalReference(
+ const ExternalReference& ext, int num_arguments, int result_size) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : argument num_arguments - 1
+ // ...
+ // -- rsp[8 * num_arguments] : argument 0 (receiver)
+ // -----------------------------------
+
+ // TODO(1236192): Most runtime routines don't need the number of
+ // arguments passed in because it is constant. At some point we
+ // should remove this need and make the runtime routine entry code
+ // smarter.
+ Set(rax, num_arguments);
+ return TryJumpToExternalReference(ext, result_size);
+}
+
+
void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
int num_arguments,
int result_size) {
@@ -463,6 +481,15 @@
}
+MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
+ int num_arguments,
+ int result_size) {
+ return TryTailCallExternalReference(ExternalReference(fid),
+ num_arguments,
+ result_size);
+}
+
+
static int Offset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
// Check that fits into int.
@@ -471,12 +498,22 @@
}
-void MacroAssembler::PrepareCallApiFunction(int stack_space) {
- EnterApiExitFrame(stack_space, 0);
+void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
+#ifdef _WIN64
+ // We need to prepare a slot for result handle on stack and put
+ // a pointer to it into 1st arg register.
+ EnterApiExitFrame(arg_stack_space + 1);
+
+ // rcx must be used to pass the pointer to the return value slot.
+ lea(rcx, StackSpaceOperand(arg_stack_space));
+#else
+ EnterApiExitFrame(arg_stack_space);
+#endif
}
-void MacroAssembler::CallApiFunctionAndReturn(ApiFunction* function) {
+MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
+ ApiFunction* function, int stack_space) {
Label empty_result;
Label prologue;
Label promote_scheduled_exception;
@@ -499,7 +536,7 @@
// Allocate HandleScope in callee-save registers.
Register prev_next_address_reg = r14;
Register prev_limit_reg = rbx;
- Register base_reg = kSmiConstantRegister;
+ Register base_reg = r12;
movq(base_reg, next_address);
movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
@@ -528,18 +565,21 @@
cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
j(not_equal, &delete_allocated_handles);
bind(&leave_exit_frame);
- InitializeSmiConstantRegister();
// Check if the function scheduled an exception.
movq(rsi, scheduled_exception_address);
Cmp(Operand(rsi, 0), Factory::the_hole_value());
j(not_equal, &promote_scheduled_exception);
- LeaveExitFrame();
- ret(0);
+ LeaveApiExitFrame();
+ ret(stack_space * kPointerSize);
bind(&promote_scheduled_exception);
- TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+ MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
+ 0, 1);
+ if (result->IsFailure()) {
+ return result;
+ }
bind(&empty_result);
// It was zero; the result is undefined.
@@ -554,6 +594,8 @@
call(rax);
movq(rax, prev_limit_reg);
jmp(&leave_exit_frame);
+
+ return result;
}
@@ -566,6 +608,15 @@
}
+MaybeObject* MacroAssembler::TryJumpToExternalReference(
+ const ExternalReference& ext, int result_size) {
+ // Set the entry point and jump to the C entry runtime stub.
+ movq(rbx, ext);
+ CEntryStub ces(result_size);
+ return TryTailCallStub(&ces);
+}
+
+
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
// Calls are not allowed in some stubs.
ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
@@ -1690,22 +1741,15 @@
store_rax(context_address);
}
-void MacroAssembler::EnterExitFrameEpilogue(int result_size,
- int argc) {
+
+void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
#ifdef _WIN64
- // Reserve space on stack for result and argument structures, if necessary.
- int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
- // Reserve space for the Arguments object. The Windows 64-bit ABI
- // requires us to pass this structure as a pointer to its location on
- // the stack. The structure contains 2 values.
- int argument_stack_space = argc * kPointerSize;
- // We also need backing space for 4 parameters, even though
- // we only pass one or two parameter, and it is in a register.
- int argument_mirror_space = 4 * kPointerSize;
- int total_stack_space =
- argument_mirror_space + argument_stack_space + result_stack_space;
- subq(rsp, Immediate(total_stack_space));
+ const int kShaddowSpace = 4;
+ arg_stack_space += kShaddowSpace;
#endif
+ if (arg_stack_space > 0) {
+ subq(rsp, Immediate(arg_stack_space * kPointerSize));
+ }
// Get the required frame alignment for the OS.
static const int kFrameAlignment = OS::ActivationFrameAlignment();
@@ -1720,7 +1764,7 @@
}
-void MacroAssembler::EnterExitFrame(int result_size) {
+void MacroAssembler::EnterExitFrame(int arg_stack_space) {
EnterExitFramePrologue(true);
// Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
@@ -1728,25 +1772,17 @@
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r12, Operand(rbp, r14, times_pointer_size, offset));
- EnterExitFrameEpilogue(result_size, 2);
+ EnterExitFrameEpilogue(arg_stack_space);
}
-void MacroAssembler::EnterApiExitFrame(int stack_space,
- int argc,
- int result_size) {
+void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
EnterExitFramePrologue(false);
-
- // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
- // so it must be retained across the C-call.
- int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
- lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
-
- EnterExitFrameEpilogue(result_size, argc);
+ EnterExitFrameEpilogue(arg_stack_space);
}
-void MacroAssembler::LeaveExitFrame(int result_size) {
+void MacroAssembler::LeaveExitFrame() {
// Registers:
// r12 : argv
@@ -1758,6 +1794,22 @@
// from the caller stack.
lea(rsp, Operand(r12, 1 * kPointerSize));
+ // Push the return address to get ready to return.
+ push(rcx);
+
+ LeaveExitFrameEpilogue();
+}
+
+
+void MacroAssembler::LeaveApiExitFrame() {
+ movq(rsp, rbp);
+ pop(rbp);
+
+ LeaveExitFrameEpilogue();
+}
+
+
+void MacroAssembler::LeaveExitFrameEpilogue() {
// Restore current context from top and clear it in debug mode.
ExternalReference context_address(Top::k_context_address);
movq(kScratchRegister, context_address);
@@ -1766,9 +1818,6 @@
movq(Operand(kScratchRegister, 0), Immediate(0));
#endif
- // Push the return address to get ready to return.
- push(rcx);
-
// Clear the top frame.
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
movq(kScratchRegister, c_entry_fp_address);
@@ -1840,7 +1889,6 @@
void MacroAssembler::LoadAllocationTopHelper(Register result,
- Register result_end,
Register scratch,
AllocationFlags flags) {
ExternalReference new_space_allocation_top =
@@ -1862,7 +1910,6 @@
// Move address of new object to result. Use scratch register if available,
// and keep address in scratch until call to UpdateAllocationTopHelper.
if (scratch.is_valid()) {
- ASSERT(!scratch.is(result_end));
movq(scratch, new_space_allocation_top);
movq(result, Operand(scratch, 0));
} else if (result.is(rax)) {
@@ -1923,7 +1970,7 @@
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result, result_end, scratch, flags);
+ LoadAllocationTopHelper(result, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -1980,7 +2027,7 @@
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result, result_end, scratch, flags);
+ LoadAllocationTopHelper(result, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
@@ -2022,7 +2069,7 @@
ASSERT(!result.is(result_end));
// Load address of new object into result.
- LoadAllocationTopHelper(result, result_end, scratch, flags);
+ LoadAllocationTopHelper(result, scratch, flags);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 2f6e956..0b7e601 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -155,17 +155,23 @@
// debug mode. Expects the number of arguments in register rax and
// sets up the number of arguments in register rdi and the pointer
// to the first argument in register rsi.
- void EnterExitFrame(int result_size = 1);
+ //
+ // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
+ // accessible via StackSpaceOperand.
+ void EnterExitFrame(int arg_stack_space = 0);
- void EnterApiExitFrame(int stack_space,
- int argc,
- int result_size = 1);
+ // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
+ // memory (not GCed) on the stack accessible via StackSpaceOperand.
+ void EnterApiExitFrame(int arg_stack_space);
// Leave the current exit frame. Expects/provides the return value in
// register rax:rdx (untouched) and the pointer to the first
// argument in register rsi.
- void LeaveExitFrame(int result_size = 1);
+ void LeaveExitFrame();
+ // Leave the current exit frame. Expects/provides the return value in
+ // register rax (untouched).
+ void LeaveApiExitFrame();
// ---------------------------------------------------------------------------
// JavaScript invokes
@@ -813,22 +819,38 @@
int num_arguments,
int result_size);
+ MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
+ const ExternalReference& ext, int num_arguments, int result_size);
+
// Convenience function: tail call a runtime routine (jump).
void TailCallRuntime(Runtime::FunctionId fid,
int num_arguments,
int result_size);
+ MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
+ int num_arguments,
+ int result_size);
+
// Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& ext, int result_size);
- // Prepares stack to put arguments (aligns and so on).
- // Uses calle-saved esi to restore stack state after call.
- void PrepareCallApiFunction(int stack_space);
+ // Jump to a runtime routine.
+ MaybeObject* TryJumpToExternalReference(const ExternalReference& ext,
+ int result_size);
- // Tail call an API function (jump). Allocates HandleScope, extracts
- // returned value from handle and propogates exceptions.
- // Clobbers ebx, edi and caller-save registers.
- void CallApiFunctionAndReturn(ApiFunction* function);
+ // Prepares stack to put arguments (aligns and so on).
+ // WIN64 calling convention requires to put the pointer to the return value
+ // slot into rcx (rcx must be preserverd until TryCallApiFunctionAndReturn).
+ // Saves context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
+ // inside the exit frame (not GCed) accessible via StackSpaceOperand.
+ void PrepareCallApiFunction(int arg_stack_space);
+
+ // Calls an API function. Allocates HandleScope, extracts
+ // returned value from handle and propagates exceptions.
+ // Clobbers r12, r14, rbx and caller-save registers. Restores context.
+ // On return removes stack_space * kPointerSize (GCed).
+ MUST_USE_RESULT MaybeObject* TryCallApiFunctionAndReturn(
+ ApiFunction* function, int stack_space);
// Before calling a C-function from generated code, align arguments on stack.
// After aligning the frame, arguments must be stored in esp[0], esp[4],
@@ -919,16 +941,18 @@
void LeaveFrame(StackFrame::Type type);
void EnterExitFramePrologue(bool save_rax);
- void EnterExitFrameEpilogue(int result_size, int argc);
+
+ // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
+ // accessible via StackSpaceOperand.
+ void EnterExitFrameEpilogue(int arg_stack_space);
+
+ void LeaveExitFrameEpilogue();
// Allocation support helpers.
// Loads the top of new-space into the result register.
- // If flags contains RESULT_CONTAINS_TOP then result_end is valid and
- // already contains the top of new-space, and scratch is invalid.
// Otherwise the address of the new-space top is loaded into scratch (if
// scratch is valid), and the new-space top is loaded into result.
void LoadAllocationTopHelper(Register result,
- Register result_end,
Register scratch,
AllocationFlags flags);
// Update allocation top with value in result_end register.
@@ -982,6 +1006,28 @@
}
+static inline Operand ContextOperand(Register context, int index) {
+ return Operand(context, Context::SlotOffset(index));
+}
+
+
+static inline Operand GlobalObjectOperand() {
+ return ContextOperand(rsi, Context::GLOBAL_INDEX);
+}
+
+
+// Provides access to exit frame stack space (not GCed).
+static inline Operand StackSpaceOperand(int index) {
+#ifdef _WIN64
+ const int kShaddowSpace = 4;
+ return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
+#else
+ return Operand(rsp, index * kPointerSize);
+#endif
+}
+
+
+
#ifdef GENERATED_CODE_COVERAGE
extern void LogGeneratedCodeCoverage(const char* file_line);
#define CODE_COVERAGE_STRINGIFY(x) #x
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 24609bf..7ba482c 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -497,8 +497,10 @@
__ ret(0);
}
+// Number of pointers to be reserved on stack for fast API call.
+static const int kFastApiCallArguments = 3;
-// Reserves space for the extra arguments to FastHandleApiCall in the
+// Reserves space for the extra arguments to API function in the
// caller's frame.
//
// These arguments are set by CheckPrototypes and GenerateFastApiCall.
@@ -508,48 +510,48 @@
// -- rsp[8] : last argument in the internal frame of the caller
// -----------------------------------
__ movq(scratch, Operand(rsp, 0));
- __ subq(rsp, Immediate(4 * kPointerSize));
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
__ movq(Operand(rsp, 0), scratch);
__ Move(scratch, Smi::FromInt(0));
- __ movq(Operand(rsp, 1 * kPointerSize), scratch);
- __ movq(Operand(rsp, 2 * kPointerSize), scratch);
- __ movq(Operand(rsp, 3 * kPointerSize), scratch);
- __ movq(Operand(rsp, 4 * kPointerSize), scratch);
+ for (int i = 1; i <= kFastApiCallArguments; i++) {
+ __ movq(Operand(rsp, i * kPointerSize), scratch);
+ }
}
// Undoes the effects of ReserveSpaceForFastApiCall.
static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// ----------- S t a t e -------------
- // -- rsp[0] : return address
- // -- rsp[8] : last fast api call extra argument
+ // -- rsp[0] : return address.
+ // -- rsp[8] : last fast api call extra argument.
// -- ...
- // -- rsp[32] : first fast api call extra argument
- // -- rsp[40] : last argument in the internal frame
+ // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
+ // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
+ // frame.
// -----------------------------------
__ movq(scratch, Operand(rsp, 0));
- __ movq(Operand(rsp, 4 * kPointerSize), scratch);
- __ addq(rsp, Immediate(kPointerSize * 4));
+ __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
+ __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
}
-// Generates call to FastHandleApiCall builtin.
-static void GenerateFastApiCall(MacroAssembler* masm,
+// Generates call to API function.
+static bool GenerateFastApiCall(MacroAssembler* masm,
const CallOptimization& optimization,
- int argc) {
+ int argc,
+ Failure** failure) {
// ----------- S t a t e -------------
// -- rsp[0] : return address
// -- rsp[8] : object passing the type check
// (last fast api call extra argument,
// set by CheckPrototypes)
- // -- rsp[16] : api call data
- // -- rsp[24] : api callback
- // -- rsp[32] : api function
+ // -- rsp[16] : api function
// (first fast api call extra argument)
- // -- rsp[40] : last argument
+ // -- rsp[24] : api call data
+ // -- rsp[32] : last argument
// -- ...
- // -- rsp[(argc + 5) * 8] : first argument
- // -- rsp[(argc + 6) * 8] : receiver
+ // -- rsp[(argc + 3) * 8] : first argument
+ // -- rsp[(argc + 4) * 8] : receiver
// -----------------------------------
// Get the function and setup the context.
@@ -557,38 +559,58 @@
__ Move(rdi, Handle<JSFunction>(function));
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- // Pass the additional arguments FastHandleApiCall expects.
- __ movq(Operand(rsp, 4 * kPointerSize), rdi);
- bool info_loaded = false;
- Object* callback = optimization.api_call_info()->callback();
- if (Heap::InNewSpace(callback)) {
- info_loaded = true;
- __ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
- __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kCallbackOffset));
+ // Pass the additional arguments.
+ __ movq(Operand(rsp, 2 * kPointerSize), rdi);
+ Object* call_data = optimization.api_call_info()->data();
+ Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
+ if (Heap::InNewSpace(call_data)) {
+ __ Move(rcx, api_call_info_handle);
+ __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
__ movq(Operand(rsp, 3 * kPointerSize), rbx);
} else {
- __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(callback));
- }
- Object* call_data = optimization.api_call_info()->data();
- if (Heap::InNewSpace(call_data)) {
- if (!info_loaded) {
- __ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
- }
- __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
- __ movq(Operand(rsp, 2 * kPointerSize), rbx);
- } else {
- __ Move(Operand(rsp, 2 * kPointerSize), Handle<Object>(call_data));
+ __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
}
- // Set the number of arguments.
- __ movq(rax, Immediate(argc + 4));
+ // Prepare arguments.
+ __ lea(rbx, Operand(rsp, 3 * kPointerSize));
- // Jump to the fast api call builtin (tail call).
- Handle<Code> code = Handle<Code>(
- Builtins::builtin(Builtins::FastHandleApiCall));
- ParameterCount expected(0);
- __ InvokeCode(code, expected, expected,
- RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+ Object* callback = optimization.api_call_info()->callback();
+ Address api_function_address = v8::ToCData<Address>(callback);
+ ApiFunction fun(api_function_address);
+
+#ifdef _WIN64
+ // Win64 uses first register--rcx--for returned value.
+ Register arguments_arg = rdx;
+#else
+ Register arguments_arg = rdi;
+#endif
+
+ // Allocate the v8::Arguments structure in the arguments' space since
+ // it's not controlled by GC.
+ const int kApiStackSpace = 4;
+
+ __ PrepareCallApiFunction(kApiStackSpace);
+
+ __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
+ __ addq(rbx, Immediate(argc * kPointerSize));
+ __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
+ __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
+ // v8::Arguments::is_construct_call_.
+ __ Set(StackSpaceOperand(3), 0);
+
+ // v8::InvocationCallback's argument.
+ __ lea(arguments_arg, StackSpaceOperand(0));
+ // Emitting a stub call may try to allocate (if the code is not
+ // already generated). Do not allow the assembler to perform a
+ // garbage collection but instead return the allocation failure
+ // object.
+ MaybeObject* result =
+ masm->TryCallApiFunctionAndReturn(&fun, argc + kFastApiCallArguments + 1);
+ if (result->IsFailure()) {
+ *failure = Failure::cast(result);
+ return false;
+ }
+ return true;
}
@@ -601,7 +623,7 @@
arguments_(arguments),
name_(name) {}
- void Compile(MacroAssembler* masm,
+ bool Compile(MacroAssembler* masm,
JSObject* object,
JSObject* holder,
String* name,
@@ -610,7 +632,8 @@
Register scratch1,
Register scratch2,
Register scratch3,
- Label* miss) {
+ Label* miss,
+ Failure** failure) {
ASSERT(holder->HasNamedInterceptor());
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
@@ -620,17 +643,18 @@
CallOptimization optimization(lookup);
if (optimization.is_constant_call()) {
- CompileCacheable(masm,
- object,
- receiver,
- scratch1,
- scratch2,
- scratch3,
- holder,
- lookup,
- name,
- optimization,
- miss);
+ return CompileCacheable(masm,
+ object,
+ receiver,
+ scratch1,
+ scratch2,
+ scratch3,
+ holder,
+ lookup,
+ name,
+ optimization,
+ miss,
+ failure);
} else {
CompileRegular(masm,
object,
@@ -641,11 +665,12 @@
name,
holder,
miss);
+ return true;
}
}
private:
- void CompileCacheable(MacroAssembler* masm,
+ bool CompileCacheable(MacroAssembler* masm,
JSObject* object,
Register receiver,
Register scratch1,
@@ -655,7 +680,8 @@
LookupResult* lookup,
String* name,
const CallOptimization& optimization,
- Label* miss_label) {
+ Label* miss_label,
+ Failure** failure) {
ASSERT(optimization.is_constant_call());
ASSERT(!lookup->holder()->IsGlobalObject());
@@ -717,7 +743,13 @@
// Invoke function.
if (can_do_fast_api_call) {
- GenerateFastApiCall(masm, optimization, arguments_.immediate());
+ bool success = GenerateFastApiCall(masm,
+ optimization,
+ arguments_.immediate(),
+ failure);
+ if (!success) {
+ return false;
+ }
} else {
__ InvokeFunction(optimization.constant_function(), arguments_,
JUMP_FUNCTION);
@@ -735,6 +767,8 @@
if (can_do_fast_api_call) {
FreeSpaceForFastApiCall(masm, scratch1);
}
+
+ return true;
}
void CompileRegular(MacroAssembler* masm,
@@ -958,7 +992,9 @@
if (depth != kInvalidProtoDepth) {
__ IncrementCounter(&Counters::call_const_fast_api, 1);
- ReserveSpaceForFastApiCall(masm(), rax);
+ // Allocate space for v8::Arguments implicit values. Must be initialized
+ // before to call any runtime function.
+ __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
}
// Check that the maps haven't changed.
@@ -1036,7 +1072,17 @@
}
if (depth != kInvalidProtoDepth) {
- GenerateFastApiCall(masm(), optimization, argc);
+ Failure* failure;
+ // Move the return address on top of the stack.
+ __ movq(rax, Operand(rsp, 3 * kPointerSize));
+ __ movq(Operand(rsp, 0 * kPointerSize), rax);
+
+ // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
+ // duplicate of return address and will be overwritten.
+ bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
+ if (!success) {
+ return failure;
+ }
} else {
__ InvokeFunction(function, arguments(), JUMP_FUNCTION);
}
@@ -1044,7 +1090,7 @@
// Handle call cache miss.
__ bind(&miss);
if (depth != kInvalidProtoDepth) {
- FreeSpaceForFastApiCall(masm(), rax);
+ __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
}
// Handle call cache miss.
@@ -1723,16 +1769,21 @@
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
CallInterceptorCompiler compiler(this, arguments(), rcx);
- compiler.Compile(masm(),
- object,
- holder,
- name,
- &lookup,
- rdx,
- rbx,
- rdi,
- rax,
- &miss);
+ Failure* failure;
+ bool success = compiler.Compile(masm(),
+ object,
+ holder,
+ name,
+ &lookup,
+ rdx,
+ rbx,
+ rdi,
+ rax,
+ &miss,
+ &failure);
+ if (!success) {
+ return failure;
+ }
// Restore receiver.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
@@ -1844,7 +1895,7 @@
Label miss;
Failure* failure = Failure::InternalError();
- bool success = GenerateLoadCallback(object, holder, rax, rcx, rbx, rdx, rdi,
+ bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi,
callback, name, &miss, &failure);
if (!success) {
miss.Unuse();
@@ -2537,8 +2588,8 @@
__ push(receiver);
__ push(holder_reg);
__ Move(holder_reg, Handle<AccessorInfo>(callback));
- __ push(holder_reg);
__ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
+ __ push(holder_reg);
__ push(name_reg);
__ push(scratch2); // restore return address
@@ -2585,16 +2636,15 @@
Handle<AccessorInfo> callback_handle(callback);
- __ EnterInternalFrame();
- // Push the stack address where the list of arguments ends.
- __ movq(scratch2, rsp);
- __ subq(scratch2, Immediate(2 * kPointerSize));
- __ push(scratch2);
+ // Insert additional parameters into the stack frame above return address.
+ ASSERT(!scratch2.is(reg));
+ __ pop(scratch2); // Get return address to place it below.
+
__ push(receiver); // receiver
__ push(reg); // holder
if (Heap::InNewSpace(callback_handle->data())) {
- __ Move(scratch2, callback_handle);
- __ push(FieldOperand(scratch2, AccessorInfo::kDataOffset)); // data
+ __ Move(scratch1, callback_handle);
+ __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
} else {
__ Push(Handle<Object>(callback_handle->data()));
}
@@ -2607,42 +2657,43 @@
Register accessor_info_arg = r8;
Register name_arg = rdx;
#else
- Register accessor_info_arg = rdx; // temporary, copied to rsi by the stub.
+ Register accessor_info_arg = rsi;
Register name_arg = rdi;
#endif
- __ movq(accessor_info_arg, rsp);
- __ addq(accessor_info_arg, Immediate(4 * kPointerSize));
+ ASSERT(!name_arg.is(scratch2));
__ movq(name_arg, rsp);
+ __ push(scratch2); // Restore return address.
// Do call through the api.
- ASSERT_EQ(5, ApiGetterEntryStub::kStackSpace);
Address getter_address = v8::ToCData<Address>(callback->getter());
ApiFunction fun(getter_address);
- ApiGetterEntryStub stub(callback_handle, &fun);
-#ifdef _WIN64
- // We need to prepare a slot for result handle on stack and put
- // a pointer to it into 1st arg register.
- __ push(Immediate(0));
- __ movq(rcx, rsp);
-#endif
+
+ // 3 elements array for v8::Agruments::values_ and handler for name.
+ const int kStackSpace = 4;
+
+ // Allocate v8::AccessorInfo in non-GCed stack space.
+ const int kArgStackSpace = 1;
+
+ __ PrepareCallApiFunction(kArgStackSpace);
+ __ lea(rax, Operand(name_arg, 3 * kPointerSize));
+
+ // v8::AccessorInfo::args_.
+ __ movq(StackSpaceOperand(0), rax);
+
+ // The context register (rsi) has been saved in PrepareCallApiFunction and
+ // could be used to pass arguments.
+ __ lea(accessor_info_arg, StackSpaceOperand(0));
+
// Emitting a stub call may try to allocate (if the code is not
// already generated). Do not allow the assembler to perform a
// garbage collection but instead return the allocation failure
// object.
- MaybeObject* result = masm()->TryCallStub(&stub);
+ MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
if (result->IsFailure()) {
*failure = Failure::cast(result);
return false;
}
-#ifdef _WIN64
- // Discard allocated slot.
- __ addq(rsp, Immediate(kPointerSize));
-#endif
- __ LeaveInternalFrame();
-
- __ ret(0);
-
return true;
}
@@ -2839,8 +2890,7 @@
// Specialized stub for constructing objects from functions which only have only
// simple assignments of the form this.x = ...; in their body.
-MaybeObject* ConstructStubCompiler::CompileConstructStub(
- SharedFunctionInfo* shared) {
+MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
// ----------- S t a t e -------------
// -- rax : argc
// -- rdi : constructor
@@ -2913,6 +2963,7 @@
// r9: first in-object property of the JSObject
// Fill the initialized properties with a constant value or a passed argument
// depending on the this.x = ...; assignment in the function.
+ SharedFunctionInfo* shared = function->shared();
for (int i = 0; i < shared->this_property_assignments_count(); i++) {
if (shared->IsThisPropertyAssignmentArgument(i)) {
// Check if the argument assigned to the property is actually passed.
@@ -2932,8 +2983,9 @@
}
// Fill the unused in-object property fields with undefined.
+ ASSERT(function->has_initial_map());
for (int i = shared->this_property_assignments_count();
- i < shared->CalculateInObjectProperties();
+ i < function->initial_map()->inobject_properties();
i++) {
__ movq(Operand(r9, i * kPointerSize), r8);
}
diff --git a/src/x64/virtual-frame-x64.cc b/src/x64/virtual-frame-x64.cc
index e88a993..3f7b1db 100644
--- a/src/x64/virtual-frame-x64.cc
+++ b/src/x64/virtual-frame-x64.cc
@@ -32,6 +32,7 @@
#include "codegen-inl.h"
#include "register-allocator-inl.h"
#include "scopes.h"
+#include "stub-cache.h"
#include "virtual-frame-inl.h"
namespace v8 {
@@ -1194,7 +1195,7 @@
// and dropped by the call. The IC expects the name in rcx and the rest
// on the stack, and drops them all.
InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = cgen()->ComputeCallInitialize(arg_count, in_loop);
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
Result name = Pop();
// Spill args, receiver, and function. The call will drop args and
// receiver.
@@ -1213,7 +1214,7 @@
// on the stack, and drops them all.
InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
- cgen()->ComputeKeyedCallInitialize(arg_count, in_loop);
+ StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
Result name = Pop();
// Spill args, receiver, and function. The call will drop args and
// receiver.