Upgrade V8 to 5.1.281.57 DO NOT MERGE
FPIIM-449
Change-Id: Id981b686b4d587ac31697662eb98bb34be42ad90
(cherry picked from commit 3b9bc31999c9787eb726ecdbfd5796bfdec32a18)
diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
index 09f4d59..9693a52 100644
--- a/src/mips/builtins-mips.cc
+++ b/src/mips/builtins-mips.cc
@@ -149,17 +149,15 @@
// -- sp[(argc - n) * 8] : arg[n] (zero-based)
// -- sp[(argc + 1) * 8] : receiver
// -----------------------------------
- Condition const cc = (kind == MathMaxMinKind::kMin) ? ge : le;
Heap::RootListIndex const root_index =
(kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
: Heap::kMinusInfinityValueRootIndex;
- DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? f2 : f0;
// Load the accumulator with the default return value (either -Infinity or
// +Infinity), with the tagged value in a1 and the double value in f0.
__ LoadRoot(a1, root_index);
__ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
- __ mov(a3, a0);
+ __ Addu(a3, a0, Operand(1));
Label done_loop, loop;
__ bind(&loop);
@@ -211,21 +209,24 @@
__ SmiToDoubleFPURegister(a2, f2, t0);
__ bind(&done_convert);
- // Perform the actual comparison with the accumulator value on the left hand
- // side (f0) and the next parameter value on the right hand side (f2).
- Label compare_equal, compare_nan, compare_swap;
- __ BranchF(&compare_equal, &compare_nan, eq, f0, f2);
- __ BranchF(&compare_swap, nullptr, cc, f0, f2);
- __ Branch(&loop);
-
- // Left and right hand side are equal, check for -0 vs. +0.
- __ bind(&compare_equal);
- __ FmoveHigh(t0, reg);
- __ Branch(&loop, ne, t0, Operand(0x80000000));
-
- // Result is on the right hand side.
- __ bind(&compare_swap);
- __ mov_d(f0, f2);
+ // Perform the actual comparison with using Min/Max macro instructions the
+ // accumulator value on the left hand side (f0) and the next parameter value
+ // on the right hand side (f2).
+ // We need to work out which HeapNumber (or smi) the result came from.
+ Label compare_nan, set_value;
+ __ BranchF(nullptr, &compare_nan, eq, f0, f2);
+ __ Move(t0, t1, f0);
+ if (kind == MathMaxMinKind::kMin) {
+ __ MinNaNCheck_d(f0, f0, f2);
+ } else {
+ DCHECK(kind == MathMaxMinKind::kMax);
+ __ MaxNaNCheck_d(f0, f0, f2);
+ }
+ __ Move(at, t8, f0);
+ __ Branch(&set_value, ne, t0, Operand(at));
+ __ Branch(&set_value, ne, t1, Operand(t8));
+ __ jmp(&loop);
+ __ bind(&set_value);
__ mov(a1, a2);
__ jmp(&loop);
@@ -238,8 +239,8 @@
__ bind(&done_loop);
__ Lsa(sp, sp, a3, kPointerSizeLog2);
- __ mov(v0, a1);
- __ DropAndRet(1);
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, a1); // In delay slot.
}
// static
@@ -530,6 +531,7 @@
// -- a1 : constructor function
// -- a2 : allocation site or undefined
// -- a3 : new target
+ // -- cp : context
// -- ra : return address
// -- sp[...]: constructor arguments
// -----------------------------------
@@ -543,7 +545,7 @@
// Preserve the incoming parameters on the stack.
__ AssertUndefinedOrAllocationSite(a2, t0);
__ SmiTag(a0);
- __ Push(a2, a0);
+ __ Push(cp, a2, a0);
if (create_implicit_receiver) {
// Allocate the new receiver object.
@@ -618,7 +620,7 @@
}
// Restore context from the frame.
- __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
if (create_implicit_receiver) {
// If the result is an object (in the ECMA sense), we should get rid
@@ -750,9 +752,6 @@
// -----------------------------------
ProfileEntryHookStub::MaybeCallEntryHook(masm);
- // Clear the context before we push it when entering the JS frame.
- __ mov(cp, zero_reg);
-
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -847,9 +846,7 @@
// MANUAL indicates that the scope shouldn't actually generate code to set up
// the frame (that is done below).
FrameScope frame_scope(masm, StackFrame::MANUAL);
-
- __ Push(ra, fp, cp, a1);
- __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
+ __ PushStandardFrame(a1);
// Get the bytecode array from the function object and load the pointer to the
// first entry into kInterpreterBytecodeRegister.
@@ -1205,8 +1202,7 @@
__ MultiPop(saved_regs);
// Perform prologue operations usually performed by the young code stub.
- __ Push(ra, fp, cp, a1);
- __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
+ __ PushStandardFrame(a1);
// Jump to point after the code-age stub.
__ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
@@ -1435,23 +1431,6 @@
}
-void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
- // We check the stack limit as indicator that recompilation might be done.
- Label ok;
- __ LoadRoot(at, Heap::kStackLimitRootIndex);
- __ Branch(&ok, hs, sp, Operand(at));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ CallRuntime(Runtime::kStackGuard);
- }
- __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
- RelocInfo::CODE_TARGET);
-
- __ bind(&ok);
- __ Ret();
-}
-
-
// static
void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
int field_index) {
@@ -1498,6 +1477,27 @@
__ TailCallRuntime(Runtime::kThrowNotDateError);
}
+// static
+void Builtins::Generate_FunctionHasInstance(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : argc
+ // -- sp[0] : first argument (left-hand side)
+ // -- sp[4] : receiver (right-hand side)
+ // -----------------------------------
+
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ lw(InstanceOfDescriptor::LeftRegister(),
+ MemOperand(fp, 2 * kPointerSize)); // Load left-hand side.
+ __ lw(InstanceOfDescriptor::RightRegister(),
+ MemOperand(fp, 3 * kPointerSize)); // Load right-hand side.
+ InstanceOfStub stub(masm->isolate(), true);
+ __ CallStub(&stub);
+ }
+
+ // Pop the argument and the receiver.
+ __ DropAndRet(2);
+}
// static
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
@@ -1961,18 +1961,20 @@
DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
Comment cmnt(masm, "[ PrepareForTailCall");
- // Prepare for tail call only if the debugger is not active.
+ // Prepare for tail call only if ES2015 tail call elimination is enabled.
Label done;
- ExternalReference debug_is_active =
- ExternalReference::debug_is_active_address(masm->isolate());
- __ li(at, Operand(debug_is_active));
+ ExternalReference is_tail_call_elimination_enabled =
+ ExternalReference::is_tail_call_elimination_enabled_address(
+ masm->isolate());
+ __ li(at, Operand(is_tail_call_elimination_enabled));
__ lb(scratch1, MemOperand(at));
- __ Branch(&done, ne, scratch1, Operand(zero_reg));
+ __ Branch(&done, eq, scratch1, Operand(zero_reg));
// Drop possible interpreter handler/stub frame.
{
Label no_interpreter_frame;
- __ lw(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
+ __ lw(scratch3,
+ MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&no_interpreter_frame, ne, scratch3,
Operand(Smi::FromInt(StackFrame::STUB)));
__ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
@@ -1980,72 +1982,37 @@
}
// Check if next frame is an arguments adaptor frame.
+ Register caller_args_count_reg = scratch1;
Label no_arguments_adaptor, formal_parameter_count_loaded;
__ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ lw(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset));
+ __ lw(scratch3,
+ MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&no_arguments_adaptor, ne, scratch3,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- // Drop arguments adaptor frame and load arguments count.
+ // Drop current frame and load arguments count from arguments adaptor frame.
__ mov(fp, scratch2);
- __ lw(scratch1,
+ __ lw(caller_args_count_reg,
MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ SmiUntag(scratch1);
+ __ SmiUntag(caller_args_count_reg);
__ Branch(&formal_parameter_count_loaded);
__ bind(&no_arguments_adaptor);
// Load caller's formal parameter count
- __ lw(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ lw(scratch1,
+ MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
__ lw(scratch1,
FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(scratch1,
+ __ lw(caller_args_count_reg,
FieldMemOperand(scratch1,
SharedFunctionInfo::kFormalParameterCountOffset));
- __ SmiUntag(scratch1);
+ __ SmiUntag(caller_args_count_reg);
__ bind(&formal_parameter_count_loaded);
- // Calculate the end of destination area where we will put the arguments
- // after we drop current frame. We add kPointerSize to count the receiver
- // argument which is not included into formal parameters count.
- Register dst_reg = scratch2;
- __ Lsa(dst_reg, fp, scratch1, kPointerSizeLog2);
- __ Addu(dst_reg, dst_reg,
- Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
-
- Register src_reg = scratch1;
- __ Lsa(src_reg, sp, args_reg, kPointerSizeLog2);
- // Count receiver argument as well (not included in args_reg).
- __ Addu(src_reg, src_reg, Operand(kPointerSize));
-
- if (FLAG_debug_code) {
- __ Check(lo, kStackAccessBelowStackPointer, src_reg, Operand(dst_reg));
- }
-
- // Restore caller's frame pointer and return address now as they will be
- // overwritten by the copying loop.
- __ lw(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
- __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
-
- // Now copy callee arguments to the caller frame going backwards to avoid
- // callee arguments corruption (source and destination areas could overlap).
-
- // Both src_reg and dst_reg are pointing to the word after the one to copy,
- // so they must be pre-decremented in the loop.
- Register tmp_reg = scratch3;
- Label loop, entry;
- __ Branch(&entry);
- __ bind(&loop);
- __ Subu(src_reg, src_reg, Operand(kPointerSize));
- __ Subu(dst_reg, dst_reg, Operand(kPointerSize));
- __ lw(tmp_reg, MemOperand(src_reg));
- __ sw(tmp_reg, MemOperand(dst_reg));
- __ bind(&entry);
- __ Branch(&loop, ne, sp, Operand(src_reg));
-
- // Leave current frame.
- __ mov(sp, dst_reg);
-
+ ParameterCount callee_args_count(args_reg);
+ __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
+ scratch3);
__ bind(&done);
}
} // namespace
@@ -2556,27 +2523,6 @@
{ // Too few parameters: Actual < expected.
__ bind(&too_few);
-
- // If the function is strong we need to throw an error.
- Label no_strong_error;
- __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kCompilerHintsOffset));
- __ And(t3, t2, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
- kSmiTagSize)));
- __ Branch(&no_strong_error, eq, t3, Operand(zero_reg));
-
- // What we really care about is the required number of arguments.
- __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset));
- __ SmiUntag(t2);
- __ Branch(&no_strong_error, ge, a0, Operand(t2));
-
- {
- FrameScope frame(masm, StackFrame::MANUAL);
- EnterArgumentsAdaptorFrame(masm);
- __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
- }
-
- __ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
ArgumentAdaptorStackCheck(masm, &stack_overflow);