Revert "Revert "Upgrade to 5.0.71.48"" DO NOT MERGE
This reverts commit f2e3994fa5148cc3d9946666f0b0596290192b0e,
and updates the x64 makefile properly so it doesn't break that
build.
FPIIM-449
Change-Id: Ib83e35bfbae6af627451c926a9650ec57c045605
(cherry picked from commit 109988c7ccb6f3fd1a58574fa3dfb88beaef6632)
diff --git a/src/x87/builtins-x87.cc b/src/x87/builtins-x87.cc
index 55ec55f..ce07908 100644
--- a/src/x87/builtins-x87.cc
+++ b/src/x87/builtins-x87.cc
@@ -60,42 +60,45 @@
__ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}
-
-static void CallRuntimePassFunction(
- MacroAssembler* masm, Runtime::FunctionId function_id) {
+static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
+ Runtime::FunctionId function_id) {
// ----------- S t a t e -------------
+ // -- eax : argument count (preserved for callee)
// -- edx : new target (preserved for callee)
// -- edi : target function (preserved for callee)
// -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ // Push the number of arguments to the callee.
+ __ SmiTag(eax);
+ __ push(eax);
+ // Push a copy of the target function and the new target.
+ __ push(edi);
+ __ push(edx);
+ // Function is also the parameter to the runtime call.
+ __ push(edi);
- FrameScope scope(masm, StackFrame::INTERNAL);
- // Push a copy of the target function and the new target.
- __ push(edi);
- __ push(edx);
- // Function is also the parameter to the runtime call.
- __ push(edi);
+ __ CallRuntime(function_id, 1);
+ __ mov(ebx, eax);
- __ CallRuntime(function_id, 1);
- // Restore target function and new target.
- __ pop(edx);
- __ pop(edi);
+ // Restore target function and new target.
+ __ pop(edx);
+ __ pop(edi);
+ __ pop(eax);
+ __ SmiUntag(eax);
+ }
+
+ __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
+ __ jmp(ebx);
}
-
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
- __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
- __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
- __ jmp(eax);
+ __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
+ __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
+ __ jmp(ebx);
}
-
-static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
- __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
- __ jmp(eax);
-}
-
-
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
@@ -108,17 +111,16 @@
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, Label::kNear);
- CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
- GenerateTailCallToReturnedCode(masm);
+ GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
-
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
- bool create_implicit_receiver) {
+ bool create_implicit_receiver,
+ bool check_derived_construct) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
@@ -137,148 +139,20 @@
__ push(eax);
if (create_implicit_receiver) {
- __ push(edi);
- __ push(edx);
+ // Allocate the new receiver object.
+ __ Push(edi);
+ __ Push(edx);
+ FastNewObjectStub stub(masm->isolate());
+ __ CallStub(&stub);
+ __ mov(ebx, eax);
+ __ Pop(edx);
+ __ Pop(edi);
- // Try to allocate the object without transitioning into C code. If any of
- // the preconditions is not met, the code bails out to the runtime call.
- Label rt_call, allocated;
- if (FLAG_inline_new) {
- // Verify that the new target is a JSFunction.
- __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
- __ j(not_equal, &rt_call);
-
- // Load the initial map and verify that it is in fact a map.
- // edx: new target
- __ mov(eax,
- FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
- // Will both indicate a NULL and a Smi
- __ JumpIfSmi(eax, &rt_call);
- // edi: constructor
- // eax: initial map (if proven valid below)
- __ CmpObjectType(eax, MAP_TYPE, ebx);
- __ j(not_equal, &rt_call);
-
- // Fall back to runtime if the expected base constructor and base
- // constructor differ.
- __ cmp(edi, FieldOperand(eax, Map::kConstructorOrBackPointerOffset));
- __ j(not_equal, &rt_call);
-
- // Check that the constructor is not constructing a JSFunction (see
- // comments in Runtime_NewObject in runtime.cc). In which case the
- // initial map's instance type would be JS_FUNCTION_TYPE.
- // edi: constructor
- // eax: initial map
- __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
- __ j(equal, &rt_call);
-
- // Now allocate the JSObject on the heap.
- // edi: constructor
- // eax: initial map
- __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
- __ shl(edi, kPointerSizeLog2);
-
- __ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
-
- Factory* factory = masm->isolate()->factory();
-
- // Allocated the JSObject, now initialize the fields.
- // eax: initial map
- // ebx: JSObject (not HeapObject tagged - the actual address).
- // edi: start of next object
- __ mov(Operand(ebx, JSObject::kMapOffset), eax);
- __ mov(ecx, factory->empty_fixed_array());
- __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
- __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
- __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
-
- // Add the object tag to make the JSObject real, so that we can continue
- // and jump into the continuation code at any time from now on.
- __ or_(ebx, Immediate(kHeapObjectTag));
-
- // Fill all the in-object properties with the appropriate filler.
- // ebx: JSObject (tagged)
- // ecx: First in-object property of JSObject (not tagged)
- __ mov(edx, factory->undefined_value());
-
- if (!is_api_function) {
- Label no_inobject_slack_tracking;
-
- // The code below relies on these assumptions.
- STATIC_ASSERT(Map::kNoSlackTracking == 0);
- STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
- // Check if slack tracking is enabled.
- __ mov(esi, FieldOperand(eax, Map::kBitField3Offset));
- __ shr(esi, Map::ConstructionCounter::kShift);
- __ j(zero, &no_inobject_slack_tracking); // Map::kNoSlackTracking
- __ push(esi); // Save allocation count value.
- // Decrease generous allocation count.
- __ sub(FieldOperand(eax, Map::kBitField3Offset),
- Immediate(1 << Map::ConstructionCounter::kShift));
-
- // Allocate object with a slack.
- __ movzx_b(esi, FieldOperand(eax, Map::kUnusedPropertyFieldsOffset));
- __ neg(esi);
- __ lea(esi, Operand(edi, esi, times_pointer_size, 0));
- // esi: offset of first field after pre-allocated fields
- if (FLAG_debug_code) {
- __ cmp(ecx, esi);
- __ Assert(less_equal,
- kUnexpectedNumberOfPreAllocatedPropertyFields);
- }
- __ InitializeFieldsWithFiller(ecx, esi, edx);
-
- // To allow truncation fill the remaining fields with one pointer
- // filler map.
- __ mov(edx, factory->one_pointer_filler_map());
- __ InitializeFieldsWithFiller(ecx, edi, edx);
-
- __ pop(esi); // Restore allocation count value before decreasing.
- __ cmp(esi, Map::kSlackTrackingCounterEnd);
- __ j(not_equal, &allocated);
-
- // Push the object to the stack, and then the initial map as
- // an argument to the runtime call.
- __ push(ebx);
- __ push(eax); // initial map
- __ CallRuntime(Runtime::kFinalizeInstanceSize);
- __ pop(ebx);
-
- // Continue with JSObject being successfully allocated
- // ebx: JSObject (tagged)
- __ jmp(&allocated);
-
- __ bind(&no_inobject_slack_tracking);
- }
-
- __ InitializeFieldsWithFiller(ecx, edi, edx);
-
- // Continue with JSObject being successfully allocated
- // ebx: JSObject (tagged)
- __ jmp(&allocated);
- }
-
- // Allocate the new receiver object using the runtime call.
- // edx: new target
- __ bind(&rt_call);
- int offset = kPointerSize;
-
- // Must restore esi (context) and edi (constructor) before calling
- // runtime.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- __ mov(edi, Operand(esp, offset));
- __ push(edi); // constructor function
- __ push(edx); // new target
- __ CallRuntime(Runtime::kNewObject);
- __ mov(ebx, eax); // store result in ebx
-
- // New object allocated.
- // ebx: newly allocated object
- __ bind(&allocated);
-
- // Restore the parameters.
- __ pop(edx); // new.target
- __ pop(edi); // Constructor function.
+ // ----------- S t a t e -------------
+ // -- edi: constructor function
+ // -- ebx: newly allocated object
+ // -- edx: new target
+ // -----------------------------------
// Retrieve smi-tagged arguments count from the stack.
__ mov(eax, Operand(esp, 0));
@@ -359,6 +233,19 @@
// Leave construct frame.
}
+ // ES6 9.2.2. Step 13+
+ // Check that the result is not a Smi, indicating that the constructor result
+ // from a derived class is neither undefined nor an Object.
+ if (check_derived_construct) {
+ Label dont_throw;
+ __ JumpIfNotSmi(eax, &dont_throw);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
+ }
+ __ bind(&dont_throw);
+ }
+
// Remove caller arguments from the stack and return.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ pop(ecx);
@@ -372,17 +259,23 @@
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true);
+ Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, true);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false);
+ Generate_JSConstructStubHelper(masm, false, false, false);
+}
+
+
+void Builtins::Generate_JSBuiltinsConstructStubForDerived(
+ MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, false, true);
}
@@ -513,10 +406,8 @@
// o ebp: the caller's frame pointer
// o esp: stack pointer (pointing to return address)
//
-// The function builds a JS frame. Please see JavaScriptFrameConstants in
-// frames-ia32.h for its layout.
-// TODO(rmcilroy): We will need to include the current bytecode pointer in the
-// frame.
+// The function builds an interpreter frame. See InterpreterFrameConstants in
+// frames.h for its layout.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up
@@ -528,14 +419,17 @@
__ push(edi); // Callee's JS function.
__ push(edx); // Callee's new target.
- // Push zero for bytecode array offset.
- __ push(Immediate(0));
-
// Get the bytecode array from the function object and load the pointer to the
// first entry into edi (InterpreterBytecodeRegister).
__ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+
+ Label load_debug_bytecode_array, bytecode_array_loaded;
+ __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
+ Immediate(DebugInfo::uninitialized()));
+ __ j(not_equal, &load_debug_bytecode_array);
__ mov(kInterpreterBytecodeArrayRegister,
FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
+ __ bind(&bytecode_array_loaded);
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
@@ -545,6 +439,11 @@
__ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
}
+ // Push bytecode array.
+ __ push(kInterpreterBytecodeArrayRegister);
+ // Push zero for bytecode array offset.
+ __ push(Immediate(0));
+
// Allocate the local and temporary register file on the stack.
{
// Load frame size from the BytecodeArray object.
@@ -578,24 +477,9 @@
// TODO(rmcilroy): List of things not currently dealt with here but done in
// fullcodegen's prologue:
- // - Support profiler (specifically profiling_counter).
// - Call ProfileEntryHookStub when isolate has a function_entry_hook.
- // - Allow simulator stop operations if FLAG_stop_at is set.
// - Code aging of the BytecodeArray object.
- // Perform stack guard check.
- {
- Label ok;
- ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit(masm->isolate());
- __ cmp(esp, Operand::StaticVariable(stack_limit));
- __ j(above_equal, &ok);
- __ push(kInterpreterBytecodeArrayRegister);
- __ CallRuntime(Runtime::kStackGuard);
- __ pop(kInterpreterBytecodeArrayRegister);
- __ bind(&ok);
- }
-
// Load accumulator, register file, bytecode offset, dispatch table into
// registers.
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
@@ -604,10 +488,8 @@
Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
__ mov(kInterpreterBytecodeOffsetRegister,
Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
- // Since the dispatch table root might be set after builtins are generated,
- // load directly from the roots table.
- __ LoadRoot(ebx, Heap::kInterpreterTableRootIndex);
- __ add(ebx, Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ mov(ebx, Immediate(ExternalReference::interpreter_dispatch_table_address(
+ masm->isolate())));
// Push dispatch table as a stack located parameter to the bytecode handler.
DCHECK_EQ(-1, kInterpreterDispatchTableSpillSlot);
@@ -625,8 +507,17 @@
// and header removal.
__ add(ebx, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ call(ebx);
- __ nop(); // Ensure that return address still counts as interpreter entry
- // trampoline.
+
+ // Even though the first bytecode handler was called, we will never return.
+ __ Abort(kUnexpectedReturnFromBytecodeHandler);
+
+ // Load debug copy of the bytecode array.
+ __ bind(&load_debug_bytecode_array);
+ Register debug_info = kInterpreterBytecodeArrayRegister;
+ __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
+ __ mov(kInterpreterBytecodeArrayRegister,
+ FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
+ __ jmp(&bytecode_array_loaded);
}
@@ -671,7 +562,8 @@
// static
-void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndCallImpl(
+ MacroAssembler* masm, TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- ebx : the address of the first argument to be pushed. Subsequent
@@ -694,7 +586,9 @@
// Call the target.
__ Push(edx); // Re-push return address.
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
+ tail_call_mode),
+ RelocInfo::CODE_TARGET);
}
@@ -739,33 +633,16 @@
}
-static void Generate_InterpreterNotifyDeoptimizedHelper(
- MacroAssembler* masm, Deoptimizer::BailoutType type) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(kInterpreterAccumulatorRegister); // Save accumulator register.
-
- // Pass the deoptimization type to the runtime system.
- __ Push(Smi::FromInt(static_cast<int>(type)));
-
- __ CallRuntime(Runtime::kNotifyDeoptimized);
-
- __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register.
- // Tear down internal frame.
- }
-
+static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
// Initialize register file register.
__ mov(kInterpreterRegisterFileRegister, ebp);
__ add(kInterpreterRegisterFileRegister,
Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
// Get the bytecode array pointer from the frame.
- __ mov(ebx, Operand(kInterpreterRegisterFileRegister,
- InterpreterFrameConstants::kFunctionFromRegisterPointer));
- __ mov(ebx, FieldOperand(ebx, JSFunction::kSharedFunctionInfoOffset));
__ mov(kInterpreterBytecodeArrayRegister,
- FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
+ Operand(kInterpreterRegisterFileRegister,
+ InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
@@ -782,12 +659,13 @@
InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
- // Push dispatch table as a stack located parameter to the bytecode handler -
- // overwrite the state slot (we don't use these for interpreter deopts).
- __ LoadRoot(ebx, Heap::kInterpreterTableRootIndex);
- __ add(ebx, Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
+ // Push dispatch table as a stack located parameter to the bytecode handler.
+ __ mov(ebx, Immediate(ExternalReference::interpreter_dispatch_table_address(
+ masm->isolate())));
DCHECK_EQ(-1, kInterpreterDispatchTableSpillSlot);
- __ mov(Operand(esp, kPointerSize), ebx);
+ __ Pop(esi);
+ __ Push(ebx);
+ __ Push(esi);
// Dispatch to the target bytecode.
__ movzx_b(esi, Operand(kInterpreterBytecodeArrayRegister,
@@ -795,8 +673,6 @@
__ mov(ebx, Operand(ebx, esi, times_pointer_size, 0));
// Get the context from the frame.
- // TODO(rmcilroy): Update interpreter frame to expect current context at the
- // context slot instead of the function context.
__ mov(kContextRegister,
Operand(kInterpreterRegisterFileRegister,
InterpreterFrameConstants::kContextFromRegisterPointer));
@@ -808,6 +684,32 @@
}
+static void Generate_InterpreterNotifyDeoptimizedHelper(
+ MacroAssembler* masm, Deoptimizer::BailoutType type) {
+ // Enter an internal frame.
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Pass the deoptimization type to the runtime system.
+ __ Push(Smi::FromInt(static_cast<int>(type)));
+ __ CallRuntime(Runtime::kNotifyDeoptimized);
+ // Tear down internal frame.
+ }
+
+ // Drop state (we don't use these for interpreter deopts) and and pop the
+ // accumulator value into the accumulator register and push PC at top
+ // of stack (to simulate initial call to bytecode handler in interpreter entry
+ // trampoline).
+ __ Pop(ebx);
+ __ Drop(1);
+ __ Pop(kInterpreterAccumulatorRegister);
+ __ Push(ebx);
+
+ // Enter the bytecode dispatch.
+ Generate_EnterBytecodeDispatch(masm);
+}
+
+
void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
@@ -822,22 +724,30 @@
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
}
+void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
+ // Set the address of the interpreter entry trampoline as a return address.
+ // This simulates the initial call to bytecode handlers in interpreter entry
+ // trampoline. The return will never actually be taken, but our stack walker
+ // uses this address to determine whether a frame is interpreted.
+ __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline());
+
+ Generate_EnterBytecodeDispatch(masm);
+}
+
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
- CallRuntimePassFunction(masm, Runtime::kCompileLazy);
- GenerateTailCallToReturnedCode(masm);
+ GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
- CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
- GenerateTailCallToReturnedCode(masm);
+ GenerateTailCallToReturnedCode(masm,
+ Runtime::kCompileOptimized_NotConcurrent);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
- CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
- GenerateTailCallToReturnedCode(masm);
+ GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
}
@@ -1375,6 +1285,140 @@
// static
+void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
+ // ----------- S t a t e -------------
+ // -- eax : number of arguments
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 8] : arg[n] (zero-based)
+ // -- esp[(argc + 1) * 8] : receiver
+ // -----------------------------------
+ Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
+ Heap::RootListIndex const root_index =
+ (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
+ : Heap::kMinusInfinityValueRootIndex;
+ const int reg_sel = (kind == MathMaxMinKind::kMin) ? 1 : 0;
+
+ // Load the accumulator with the default return value (either -Infinity or
+ // +Infinity), with the tagged value in edx and the double value in stx_0.
+ __ LoadRoot(edx, root_index);
+ __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
+ __ Move(ecx, eax);
+
+ Label done_loop, loop;
+ __ bind(&loop);
+ {
+ // Check if all parameters done.
+ __ test(ecx, ecx);
+ __ j(zero, &done_loop);
+
+ // Load the next parameter tagged value into ebx.
+ __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
+
+ // Load the double value of the parameter into stx_1, maybe converting the
+ // parameter to a number first using the ToNumberStub if necessary.
+ Label convert, convert_smi, convert_number, done_convert;
+ __ bind(&convert);
+ __ JumpIfSmi(ebx, &convert_smi);
+ __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex, &convert_number);
+ {
+ // Parameter is not a Number, use the ToNumberStub to convert it.
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(eax);
+ __ SmiTag(ecx);
+ __ Push(eax);
+ __ Push(ecx);
+ __ Push(edx);
+ __ mov(eax, ebx);
+ ToNumberStub stub(masm->isolate());
+ __ CallStub(&stub);
+ __ mov(ebx, eax);
+ __ Pop(edx);
+ __ Pop(ecx);
+ __ Pop(eax);
+ {
+ // Restore the double accumulator value (stX_0).
+ Label restore_smi, done_restore;
+ __ JumpIfSmi(edx, &restore_smi, Label::kNear);
+ __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
+ __ jmp(&done_restore, Label::kNear);
+ __ bind(&restore_smi);
+ __ SmiUntag(edx);
+ __ push(edx);
+ __ fild_s(Operand(esp, 0));
+ __ pop(edx);
+ __ SmiTag(edx);
+ __ bind(&done_restore);
+ }
+ __ SmiUntag(ecx);
+ __ SmiUntag(eax);
+ }
+ __ jmp(&convert);
+ __ bind(&convert_number);
+ // Load another value into stx_1
+ __ fld_d(FieldOperand(ebx, HeapNumber::kValueOffset));
+ __ fxch();
+ __ jmp(&done_convert, Label::kNear);
+ __ bind(&convert_smi);
+ __ SmiUntag(ebx);
+ __ push(ebx);
+ __ fild_s(Operand(esp, 0));
+ __ pop(ebx);
+ __ fxch();
+ __ SmiTag(ebx);
+ __ bind(&done_convert);
+
+ // Perform the actual comparison with the accumulator value on the left hand
+ // side (stx_0) and the next parameter value on the right hand side (stx_1).
+ Label compare_equal, compare_nan, compare_swap, done_compare;
+
+ // Duplicates the 2 float data for FCmp
+ __ fld(1);
+ __ fld(1);
+ __ FCmp();
+ __ j(parity_even, &compare_nan, Label::kNear);
+ __ j(cc, &done_compare, Label::kNear);
+ __ j(equal, &compare_equal, Label::kNear);
+
+ // Result is on the right hand side(stx_0).
+ __ bind(&compare_swap);
+ __ fxch();
+ __ mov(edx, ebx);
+ __ jmp(&done_compare, Label::kNear);
+
+ // At least one side is NaN, which means that the result will be NaN too.
+ __ bind(&compare_nan);
+ // Set the result on the right hand side (stx_0) to nan
+ __ fstp(0);
+ __ LoadRoot(edx, Heap::kNanValueRootIndex);
+ __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
+ __ jmp(&done_compare, Label::kNear);
+
+ // Left and right hand side are equal, check for -0 vs. +0.
+ __ bind(&compare_equal);
+ // Check the sign of the value in reg_sel
+ __ fld(reg_sel);
+ __ FXamSign();
+ __ j(not_zero, &compare_swap);
+
+ __ bind(&done_compare);
+ // The right result is on the right hand side(stx_0)
+ // and can remove the useless stx_1 now.
+ __ fxch();
+ __ fstp(0);
+ __ dec(ecx);
+ __ jmp(&loop);
+ }
+
+ __ bind(&done_loop);
+ __ PopReturnAddressTo(ecx);
+ __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
+ __ PushReturnAddressFrom(ecx);
+ __ mov(eax, edx);
+ __ Ret();
+}
+
+// static
void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : number of arguments
@@ -1472,9 +1516,8 @@
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(ebx); // the first argument
- __ Push(edi); // constructor function
- __ Push(edx); // new target
- __ CallRuntime(Runtime::kNewObject);
+ FastNewObjectStub stub(masm->isolate());
+ __ CallStub(&stub);
__ Pop(FieldOperand(eax, JSValue::kValueOffset));
}
__ Ret();
@@ -1606,9 +1649,8 @@
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(ebx); // the first argument
- __ Push(edi); // constructor function
- __ Push(edx); // new target
- __ CallRuntime(Runtime::kNewObject);
+ FastNewObjectStub stub(masm->isolate());
+ __ CallStub(&stub);
__ Pop(FieldOperand(eax, JSValue::kValueOffset));
}
__ Ret();
@@ -1724,9 +1766,7 @@
// Try to create the list from an arguments object.
__ bind(&create_arguments);
- __ mov(ebx,
- FieldOperand(eax, JSObject::kHeaderSize +
- Heap::kArgumentsLengthIndex * kPointerSize));
+ __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
__ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
__ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
__ j(not_equal, &create_runtime);
@@ -1815,10 +1855,138 @@
}
}
+namespace {
+
+// Drops top JavaScript frame and an arguments adaptor frame below it (if
+// present) preserving all the arguments prepared for current call.
+// Does nothing if debugger is currently active.
+// ES6 14.6.3. PrepareForTailCall
+//
+// Stack structure for the function g() tail calling f():
+//
+// ------- Caller frame: -------
+// | ...
+// | g()'s arg M
+// | ...
+// | g()'s arg 1
+// | g()'s receiver arg
+// | g()'s caller pc
+// ------- g()'s frame: -------
+// | g()'s caller fp <- fp
+// | g()'s context
+// | function pointer: g
+// | -------------------------
+// | ...
+// | ...
+// | f()'s arg N
+// | ...
+// | f()'s arg 1
+// | f()'s receiver arg
+// | f()'s caller pc <- sp
+// ----------------------
+//
+void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
+ Register scratch1, Register scratch2,
+ Register scratch3) {
+ DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
+ Comment cmnt(masm, "[ PrepareForTailCall");
+
+ // Prepare for tail call only if the debugger is not active.
+ Label done;
+ ExternalReference debug_is_active =
+ ExternalReference::debug_is_active_address(masm->isolate());
+ __ movzx_b(scratch1, Operand::StaticVariable(debug_is_active));
+ __ cmp(scratch1, Immediate(0));
+ __ j(not_equal, &done, Label::kNear);
+
+ // Drop possible interpreter handler/stub frame.
+ {
+ Label no_interpreter_frame;
+ __ cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
+ Immediate(Smi::FromInt(StackFrame::STUB)));
+ __ j(not_equal, &no_interpreter_frame, Label::kNear);
+ __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ bind(&no_interpreter_frame);
+ }
+
+ // Check if next frame is an arguments adaptor frame.
+ Label no_arguments_adaptor, formal_parameter_count_loaded;
+ __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ cmp(Operand(scratch2, StandardFrameConstants::kContextOffset),
+ Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ j(not_equal, &no_arguments_adaptor, Label::kNear);
+
+ // Drop arguments adaptor frame and load arguments count.
+ __ mov(ebp, scratch2);
+ __ mov(scratch1, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiUntag(scratch1);
+ __ jmp(&formal_parameter_count_loaded, Label::kNear);
+
+ __ bind(&no_arguments_adaptor);
+ // Load caller's formal parameter count
+ __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ mov(scratch1,
+ FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(
+ scratch1,
+ FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ SmiUntag(scratch1);
+
+ __ bind(&formal_parameter_count_loaded);
+
+ // Calculate the destination address where we will put the return address
+ // after we drop current frame.
+ Register new_sp_reg = scratch2;
+ __ sub(scratch1, args_reg);
+ __ lea(new_sp_reg, Operand(ebp, scratch1, times_pointer_size,
+ StandardFrameConstants::kCallerPCOffset));
+
+ if (FLAG_debug_code) {
+ __ cmp(esp, new_sp_reg);
+ __ Check(below, kStackAccessBelowStackPointer);
+ }
+
+ // Copy receiver and return address as well.
+ Register count_reg = scratch1;
+ __ lea(count_reg, Operand(args_reg, 2));
+
+ // Copy return address from caller's frame to current frame's return address
+ // to avoid its trashing and let the following loop copy it to the right
+ // place.
+ Register tmp_reg = scratch3;
+ __ mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
+ __ mov(Operand(esp, 0), tmp_reg);
+
+ // Restore caller's frame pointer now as it could be overwritten by
+ // the copying loop.
+ __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+
+ Operand src(esp, count_reg, times_pointer_size, 0);
+ Operand dst(new_sp_reg, count_reg, times_pointer_size, 0);
+
+ // Now copy callee arguments to the caller frame going backwards to avoid
+ // callee arguments corruption (source and destination areas could overlap).
+ Label loop, entry;
+ __ jmp(&entry, Label::kNear);
+ __ bind(&loop);
+ __ dec(count_reg);
+ __ mov(tmp_reg, src);
+ __ mov(dst, tmp_reg);
+ __ bind(&entry);
+ __ cmp(count_reg, Immediate(0));
+ __ j(not_equal, &loop, Label::kNear);
+
+ // Leave current frame.
+ __ mov(esp, new_sp_reg);
+
+ __ bind(&done);
+}
+} // namespace
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm,
- ConvertReceiverMode mode) {
+ ConvertReceiverMode mode,
+ TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the function to call (checked to be a JSFunction)
@@ -1907,6 +2075,12 @@
// -- esi : the function context.
// -----------------------------------
+ if (tail_call_mode == TailCallMode::kAllow) {
+ PrepareForTailCall(masm, eax, ebx, ecx, edx);
+ // Reload shared function info.
+ __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ }
+
__ mov(ebx,
FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
__ SmiUntag(ebx);
@@ -2012,13 +2186,18 @@
// static
-void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
+void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
+ TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the function to call (checked to be a JSBoundFunction)
// -----------------------------------
__ AssertBoundFunction(edi);
+ if (tail_call_mode == TailCallMode::kAllow) {
+ PrepareForTailCall(masm, eax, ebx, ecx, edx);
+ }
+
// Patch the receiver to [[BoundThis]].
__ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
__ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
@@ -2036,7 +2215,8 @@
// static
-void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
+void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
+ TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the target to call (can be any Object).
@@ -2046,14 +2226,24 @@
__ JumpIfSmi(edi, &non_callable);
__ bind(&non_smi);
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
- __ j(equal, masm->isolate()->builtins()->CallFunction(mode),
+ __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
RelocInfo::CODE_TARGET);
__ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
- __ j(equal, masm->isolate()->builtins()->CallBoundFunction(),
+ __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
RelocInfo::CODE_TARGET);
+
+ // Check if target has a [[Call]] internal method.
+ __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1 << Map::kIsCallable);
+ __ j(zero, &non_callable);
+
__ CmpInstanceType(ecx, JS_PROXY_TYPE);
__ j(not_equal, &non_function);
+ // 0. Prepare for tail call if necessary.
+ if (tail_call_mode == TailCallMode::kAllow) {
+ PrepareForTailCall(masm, eax, ebx, ecx, edx);
+ }
+
// 1. Runtime fallback for Proxy [[Call]].
__ PopReturnAddressTo(ecx);
__ Push(edi);
@@ -2068,15 +2258,12 @@
// 2. Call to something else, which might have a [[Call]] internal method (if
// not we raise an exception).
__ bind(&non_function);
- // Check if target has a [[Call]] internal method.
- __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1 << Map::kIsCallable);
- __ j(zero, &non_callable, Label::kNear);
// Overwrite the original receiver with the (original) target.
__ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
// Let the "call_as_function_delegate" take care of the rest.
__ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
__ Jump(masm->isolate()->builtins()->CallFunction(
- ConvertReceiverMode::kNotNullOrUndefined),
+ ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable.
@@ -2394,14 +2581,12 @@
// Load the next prototype.
__ bind(&next_prototype);
__ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
- __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
- // End if the prototype is null or not hidden.
- __ CompareRoot(receiver, Heap::kNullValueRootIndex);
- __ j(equal, receiver_check_failed);
- __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
- __ test(FieldOperand(scratch0, Map::kBitField3Offset),
- Immediate(Map::IsHiddenPrototype::kMask));
+ __ test(FieldOperand(receiver, Map::kBitField3Offset),
+ Immediate(Map::HasHiddenPrototype::kMask));
__ j(zero, receiver_check_failed);
+
+ __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
+ __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
// Iterate.
__ jmp(&prototype_loop_start, Label::kNear);