Merge V8 5.3.332.45. DO NOT MERGE
Test: Manual
FPIIM-449
Change-Id: Id3254828b068abdea3cb10442e0172a8c9a98e03
(cherry picked from commit 13e2dadd00298019ed862f2b2fc5068bba730bcf)
diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc
index 5702c78..89eff90 100644
--- a/src/mips64/code-stubs-mips64.cc
+++ b/src/mips64/code-stubs-mips64.cc
@@ -20,70 +20,29 @@
namespace v8 {
namespace internal {
+#define __ ACCESS_MASM(masm)
-static void InitializeArrayConstructorDescriptor(
- Isolate* isolate, CodeStubDescriptor* descriptor,
- int constant_stack_parameter_count) {
- Address deopt_handler = Runtime::FunctionForId(
- Runtime::kArrayConstructor)->entry;
-
- if (constant_stack_parameter_count == 0) {
- descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- } else {
- descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- }
+void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
+ __ dsll(t9, a0, kPointerSizeLog2);
+ __ Daddu(t9, sp, t9);
+ __ sd(a1, MemOperand(t9, 0));
+ __ Push(a1);
+ __ Push(a2);
+ __ Daddu(a0, a0, 3);
+ __ TailCallRuntime(Runtime::kNewArray);
}
-
-static void InitializeInternalArrayConstructorDescriptor(
- Isolate* isolate, CodeStubDescriptor* descriptor,
- int constant_stack_parameter_count) {
- Address deopt_handler = Runtime::FunctionForId(
- Runtime::kInternalArrayConstructor)->entry;
-
- if (constant_stack_parameter_count == 0) {
- descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- } else {
- descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- }
-}
-
-
-void ArraySingleArgumentConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
-}
-
-
-void ArrayNArgumentsConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
-}
-
-
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
-void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
+void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
+ Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
+ descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
-
-void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
-}
-
-
-#define __ ACCESS_MASM(masm)
-
static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Condition cc);
static void EmitSmiNonsmiComparison(MacroAssembler* masm,
@@ -1019,7 +978,7 @@
CEntryStub::GenerateAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
- ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
CreateWeakCellStub::GenerateAheadOfTime(isolate);
BinaryOpICStub::GenerateAheadOfTime(isolate);
@@ -1436,7 +1395,6 @@
&miss, // When not a string.
&miss, // When not a number.
&miss, // When index out of range.
- STRING_INDEX_IS_ARRAY_INDEX,
RECEIVER_IS_STRING);
char_at_generator.GenerateFast(masm);
__ Ret();
@@ -1923,6 +1881,7 @@
// a2 : feedback vector
// a3 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
+ Label done_initialize_count, done_increment_count;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
@@ -1942,7 +1901,7 @@
Register feedback_map = a6;
Register weak_value = t0;
__ ld(weak_value, FieldMemOperand(a5, WeakCell::kValueOffset));
- __ Branch(&done, eq, a1, Operand(weak_value));
+ __ Branch(&done_increment_count, eq, a1, Operand(weak_value));
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ Branch(&done, eq, a5, Operand(at));
__ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
@@ -1964,7 +1923,7 @@
// Make sure the function is the Array() function
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&megamorphic, ne, a1, Operand(a5));
- __ jmp(&done);
+ __ jmp(&done_increment_count);
__ bind(&miss);
@@ -1992,12 +1951,31 @@
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub);
- __ Branch(&done);
+ __ Branch(&done_initialize_count);
__ bind(¬_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub);
+
+ __ bind(&done_initialize_count);
+ // Initialize the call counter.
+
+ __ SmiScale(a4, a3, kPointerSizeLog2);
+ __ Daddu(a4, a2, Operand(a4));
+ __ li(a5, Operand(Smi::FromInt(1)));
+ __ Branch(USE_DELAY_SLOT, &done);
+ __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + kPointerSize));
+
+ __ bind(&done_increment_count);
+
+ // Increment the call count for monomorphic function calls.
+ __ SmiScale(a4, a3, kPointerSizeLog2);
+ __ Daddu(a5, a2, Operand(a4));
+ __ ld(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
+ __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
+ __ sd(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
+
__ bind(&done);
}
@@ -2100,7 +2078,7 @@
__ dsrl(t0, a3, 32 - kPointerSizeLog2);
__ Daddu(a3, a2, Operand(t0));
__ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
- __ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
__ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
__ mov(a2, a4);
@@ -2148,7 +2126,7 @@
__ dsrl(t0, a3, 32 - kPointerSizeLog2);
__ Daddu(a3, a2, Operand(t0));
__ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
- __ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
__ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
__ bind(&call_function);
@@ -2221,7 +2199,7 @@
// Initialize the call counter.
__ dsrl(at, a3, 32 - kPointerSizeLog2);
__ Daddu(at, a2, Operand(at));
- __ li(t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ __ li(t0, Operand(Smi::FromInt(1)));
__ sd(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
// Store the function. Use a stub since we need a frame for allocation.
@@ -2282,13 +2260,7 @@
} else {
__ Push(object_, index_);
}
- if (index_flags_ == STRING_INDEX_IS_NUMBER) {
- __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
- } else {
- DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
- // NumberToSmi discards numbers that are not exact integers.
- __ CallRuntime(Runtime::kNumberToSmi);
- }
+ __ CallRuntime(Runtime::kNumberToSmi);
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
@@ -2625,74 +2597,12 @@
// a3: from index (untagged)
__ SmiTag(a3);
StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
- STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
+ RECEIVER_IS_STRING);
generator.GenerateFast(masm);
__ DropAndRet(3);
generator.SkipSlow(masm, &runtime);
}
-
-void ToNumberStub::Generate(MacroAssembler* masm) {
- // The ToNumber stub takes one argument in a0.
- Label not_smi;
- __ JumpIfNotSmi(a0, ¬_smi);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
- __ bind(¬_smi);
-
- Label not_heap_number;
- __ GetObjectType(a0, a1, a1);
- // a0: receiver
- // a1: receiver instance type
- __ Branch(¬_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
- __ bind(¬_heap_number);
-
- NonNumberToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub);
-}
-
-void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
- // The NonNumberToNumber stub takes on argument in a0.
- __ AssertNotNumber(a0);
-
- Label not_string;
- __ GetObjectType(a0, a1, a1);
- // a0: receiver
- // a1: receiver instance type
- __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
- StringToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub);
- __ bind(¬_string);
-
- Label not_oddball;
- __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot.
- __ bind(¬_oddball);
-
- __ Push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kToNumber);
-}
-
-void StringToNumberStub::Generate(MacroAssembler* masm) {
- // The StringToNumber stub takes on argument in a0.
- __ AssertString(a0);
-
- // Check if string has a cached array index.
- Label runtime;
- __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset));
- __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
- __ Branch(&runtime, ne, at, Operand(zero_reg));
- __ IndexFromHash(a2, v0);
- __ Ret();
-
- __ bind(&runtime);
- __ Push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kStringToNumber);
-}
-
void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes on argument in a0.
Label is_number;
@@ -2873,7 +2783,7 @@
// Load a2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ li(a2, handle(isolate()->heap()->undefined_value()));
+ __ li(a2, isolate()->factory()->undefined_value());
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
@@ -3768,14 +3678,14 @@
void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
- LoadICStub stub(isolate(), state());
+ LoadICStub stub(isolate());
stub.GenerateForTrampoline(masm);
}
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
- KeyedLoadICStub stub(isolate(), state());
+ KeyedLoadICStub stub(isolate());
stub.GenerateForTrampoline(masm);
}
@@ -4394,19 +4304,13 @@
}
}
-
-void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
isolate);
ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
isolate);
- ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
- isolate);
-}
-
-
-void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
- Isolate* isolate) {
+ ArrayNArgumentsConstructorStub stub(isolate);
+ stub.GetCode();
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things.
@@ -4414,8 +4318,6 @@
stubh1.GetCode();
InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode();
- InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
- stubh3.GetCode();
}
}
@@ -4434,13 +4336,15 @@
CreateArrayDispatchOneArgument(masm, mode);
__ bind(¬_one_case);
- CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
+ ArrayNArgumentsConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
} else if (argument_count() == NONE) {
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
} else if (argument_count() == ONE) {
CreateArrayDispatchOneArgument(masm, mode);
} else if (argument_count() == MORE_THAN_ONE) {
- CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
+ ArrayNArgumentsConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
} else {
UNREACHABLE();
}
@@ -4524,7 +4428,7 @@
InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0, lo, a0, Operand(1));
- InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
+ ArrayNArgumentsConstructorStub stubN(isolate());
__ TailCallStub(&stubN, hi, a0, Operand(1));
if (IsFastPackedElementsKind(kind)) {
@@ -4750,10 +4654,10 @@
Label rest_parameters;
__ SmiLoadUntag(
a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(a1,
- FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset));
- __ Dsubu(a0, a0, Operand(a1));
+ __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(a3,
+ FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ Dsubu(a0, a0, Operand(a3));
__ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
// Return an empty rest parameter array.
@@ -4800,15 +4704,16 @@
// ----------- S t a t e -------------
// -- cp : context
// -- a0 : number of rest parameters
+ // -- a1 : function
// -- a2 : pointer to first rest parameters
// -- ra : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
- __ li(a1, Operand(JSArray::kSize + FixedArray::kHeaderSize));
- __ Dlsa(a1, a1, a0, kPointerSizeLog2);
- __ Allocate(a1, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
+ __ li(a5, Operand(JSArray::kSize + FixedArray::kHeaderSize));
+ __ Dlsa(a5, a5, a0, kPointerSizeLog2);
+ __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Compute arguments.length in a4.
@@ -4843,18 +4748,26 @@
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot
- // Fall back to %AllocateInNewSpace.
+ // Fall back to %AllocateInNewSpace (if not too big).
+ Label too_big_for_new_space;
__ bind(&allocate);
+ __ Branch(&too_big_for_new_space, gt, a5,
+ Operand(Page::kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a0);
- __ SmiTag(a1);
- __ Push(a0, a2, a1);
+ __ SmiTag(a5);
+ __ Push(a0, a2, a5);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2);
__ SmiUntag(a0);
}
__ jmp(&done_allocate);
+
+ // Fall back to %NewStrictArguments.
+ __ bind(&too_big_for_new_space);
+ __ Push(a1);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
}
}
@@ -5126,9 +5039,9 @@
__ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{
- __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0,
- FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset));
+ FieldMemOperand(a4, SharedFunctionInfo::kFormalParameterCountOffset));
__ Dlsa(a2, a2, a0, kPointerSizeLog2);
__ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
1 * kPointerSize));
@@ -5147,15 +5060,16 @@
// ----------- S t a t e -------------
// -- cp : context
// -- a0 : number of rest parameters
+ // -- a1 : function
// -- a2 : pointer to first rest parameters
// -- ra : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
- __ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
- __ Dlsa(a1, a1, a0, kPointerSizeLog2);
- __ Allocate(a1, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
+ __ li(a5, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
+ __ Dlsa(a5, a5, a0, kPointerSizeLog2);
+ __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Compute arguments.length in a4.
@@ -5190,48 +5104,26 @@
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a3); // In delay slot
- // Fall back to %AllocateInNewSpace.
+ // Fall back to %AllocateInNewSpace (if not too big).
+ Label too_big_for_new_space;
__ bind(&allocate);
+ __ Branch(&too_big_for_new_space, gt, a5,
+ Operand(Page::kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(a0);
- __ SmiTag(a1);
- __ Push(a0, a2, a1);
+ __ SmiTag(a5);
+ __ Push(a0, a2, a5);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(a0, a2);
__ SmiUntag(a0);
}
__ jmp(&done_allocate);
-}
-
-void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
- Register context_reg = cp;
- Register slot_reg = a2;
- Register result_reg = v0;
- Label slow_case;
-
- // Go up context chain to the script context.
- for (int i = 0; i < depth(); ++i) {
- __ ld(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
- context_reg = result_reg;
- }
-
- // Load the PropertyCell value at the specified slot.
- __ Dlsa(at, context_reg, slot_reg, kPointerSizeLog2);
- __ ld(result_reg, ContextMemOperand(at, 0));
- __ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
-
- // Check that value is not the_hole.
- __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ Branch(&slow_case, eq, result_reg, Operand(at));
- __ Ret();
-
- // Fallback to the runtime.
- __ bind(&slow_case);
- __ SmiTag(slot_reg);
- __ Push(slot_reg);
- __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
+ // Fall back to %NewStrictArguments.
+ __ bind(&too_big_for_new_space);
+ __ Push(a1);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
}