Upgrade to V8 3.4
Merge 3.4.14.35
Simple merge required updates to makefiles only.
Bug: 568872
Change-Id: I403a38452c547e06fcfa951c12eca12a1bc40978
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 7075e66..1a6efcb 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -125,18 +125,17 @@
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
// Setup the object header.
- __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
+ __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
__ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
// Setup the fixed slots.
__ Set(rbx, 0); // Set to NULL.
__ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
- __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
- __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
+ __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
__ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
- // Copy the global object from the surrounding context.
+ // Copy the global object from the previous context.
__ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
@@ -152,7 +151,7 @@
// Need to collect. Call into runtime system.
__ bind(&gc);
- __ TailCallRuntime(Runtime::kNewContext, 1, 1);
+ __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
}
@@ -231,8 +230,11 @@
}
+// The stub returns zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
+ const Register map = rdx;
+
__ movq(rax, Operand(rsp, 1 * kPointerSize));
// undefined -> false
@@ -248,51 +250,47 @@
// Smis: 0 -> false, all other -> true
__ Cmp(rax, Smi::FromInt(0));
__ j(equal, &false_result);
- Condition is_smi = __ CheckSmi(rax);
- __ j(is_smi, &true_result);
+ __ JumpIfSmi(rax, &true_result);
- // 'null' => false.
+ // 'null' -> false.
__ CompareRoot(rax, Heap::kNullValueRootIndex);
__ j(equal, &false_result, Label::kNear);
- // Get the map and type of the heap object.
- // We don't use CmpObjectType because we manipulate the type field.
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
- __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
+ // Get the map of the heap object.
+ __ movq(map, FieldOperand(rax, HeapObject::kMapOffset));
- // Undetectable => false.
- __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
- __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
+ // Undetectable -> false.
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, &false_result, Label::kNear);
- // JavaScript object => true.
- __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
+ // JavaScript object -> true.
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &true_result, Label::kNear);
- // String value => false iff empty.
- __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
+ // String value -> false iff empty.
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, ¬_string, Label::kNear);
- __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
- __ SmiTest(rdx);
+ __ cmpq(FieldOperand(rax, String::kLengthOffset), Immediate(0));
__ j(zero, &false_result, Label::kNear);
__ jmp(&true_result, Label::kNear);
__ bind(¬_string);
- __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &true_result, Label::kNear);
- // HeapNumber => false iff +0, -0, or NaN.
+ // HeapNumber -> false iff +0, -0, or NaN.
// These three cases set the zero flag when compared to zero using ucomisd.
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &true_result, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
__ j(zero, &false_result, Label::kNear);
// Fall through to |true_result|.
- // Return 1/0 for true/false in rax.
+ // Return 1/0 for true/false in tos_.
__ bind(&true_result);
- __ Set(rax, 1);
+ __ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&false_result);
- __ Set(rax, 0);
+ __ Set(tos_, 0);
__ ret(1 * kPointerSize);
}
@@ -406,12 +404,6 @@
}
-Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) {
- UnaryOpStub stub(key, type_info);
- return stub.GetCode();
-}
-
-
void UnaryOpStub::Generate(MacroAssembler* masm) {
switch (operand_type_) {
case UnaryOpIC::UNINITIALIZED:
@@ -432,12 +424,10 @@
void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
__ pop(rcx); // Save return address.
- __ push(rax);
- // Left and right arguments are now on top.
- // Push this stub's key. Although the operation and the type info are
- // encoded into the key, the encoding is opaque, so push them too.
- __ Push(Smi::FromInt(MinorKey()));
+
+ __ push(rax); // the operand
__ Push(Smi::FromInt(op_));
+ __ Push(Smi::FromInt(mode_));
__ Push(Smi::FromInt(operand_type_));
__ push(rcx); // Push return address.
@@ -445,10 +435,7 @@
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
- masm->isolate()),
- 4,
- 1);
+ ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
}
@@ -655,33 +642,17 @@
}
-const char* UnaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "UnaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- UnaryOpIC::GetName(operand_type_));
- return name_;
-}
-
-
-Handle<Code> GetBinaryOpStub(int key,
- BinaryOpIC::TypeInfo type_info,
- BinaryOpIC::TypeInfo result_type_info) {
- BinaryOpStub stub(key, type_info, result_type_info);
- return stub.GetCode();
+ stream->Add("UnaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ UnaryOpIC::GetName(operand_type_));
}
@@ -742,12 +713,7 @@
}
-const char* BinaryOpStub::GetName() {
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
+void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
switch (mode_) {
@@ -756,13 +722,10 @@
case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
default: overwrite_name = "UnknownOverwrite"; break;
}
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "BinaryOpStub_%s_%s_%s",
- op_name,
- overwrite_name,
- BinaryOpIC::GetName(operands_type_));
- return name_;
+ stream->Add("BinaryOpStub_%s_%s_%s",
+ op_name,
+ overwrite_name,
+ BinaryOpIC::GetName(operands_type_));
}
@@ -1959,49 +1922,288 @@
}
-void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
+void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
+ // Stack layout:
+ // rsp[0] : return address
+ // rsp[8] : number of parameters (tagged)
+ // rsp[16] : receiver displacement
+ // rsp[24] : function
+ // Registers used over the whole function:
+ // rbx: the mapped parameter count (untagged)
+ // rax: the allocated object (tagged).
+
+ Factory* factory = masm->isolate()->factory();
+
+ __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize));
+ // rbx = parameter count (untagged)
+
+ // Check if the calling frame is an arguments adaptor frame.
+ Label runtime;
+ Label adaptor_frame, try_allocate;
+ __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
+ __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(equal, &adaptor_frame);
+
+ // No adaptor, parameter count = argument count.
+ __ movq(rcx, rbx);
+ __ jmp(&try_allocate, Label::kNear);
+
+ // We have an adaptor frame. Patch the parameters pointer.
+ __ bind(&adaptor_frame);
+ __ SmiToInteger64(rcx,
+ Operand(rdx,
+ ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
+ StandardFrameConstants::kCallerSPOffset));
+ __ movq(Operand(rsp, 2 * kPointerSize), rdx);
+
+ // rbx = parameter count (untagged)
+ // rcx = argument count (untagged)
+ // Compute the mapped parameter count = min(rbx, rcx) in rbx.
+ __ cmpq(rbx, rcx);
+ __ j(less_equal, &try_allocate, Label::kNear);
+ __ movq(rbx, rcx);
+
+ __ bind(&try_allocate);
+
+ // Compute the sizes of backing store, parameter map, and arguments object.
+ // 1. Parameter map, has 2 extra words containing context and backing store.
+ const int kParameterMapHeaderSize =
+ FixedArray::kHeaderSize + 2 * kPointerSize;
+ Label no_parameter_map;
+ __ testq(rbx, rbx);
+ __ j(zero, &no_parameter_map, Label::kNear);
+ __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
+ __ bind(&no_parameter_map);
+
+ // 2. Backing store.
+ __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
+
+ // 3. Arguments object.
+ __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
+
+ // Do the allocation of all three objects in one go.
+ __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
+
+ // rax = address of new object(s) (tagged)
+ // rcx = argument count (untagged)
+ // Get the arguments boilerplate from the current (global) context into rdi.
+ Label has_mapped_parameters, copy;
+ __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
+ __ testq(rbx, rbx);
+ __ j(not_zero, &has_mapped_parameters, Label::kNear);
+
+ const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
+ __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
+ __ jmp(©, Label::kNear);
+
+ const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
+ __ bind(&has_mapped_parameters);
+ __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
+ __ bind(©);
+
+ // rax = address of new object (tagged)
+ // rbx = mapped parameter count (untagged)
+ // rcx = argument count (untagged)
+ // rdi = address of boilerplate object (tagged)
+ // Copy the JS object part.
+ for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
+ __ movq(rdx, FieldOperand(rdi, i));
+ __ movq(FieldOperand(rax, i), rdx);
+ }
+
+ // Setup the callee in-object property.
+ STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
+ __ movq(rdx, Operand(rsp, 3 * kPointerSize));
+ __ movq(FieldOperand(rax, JSObject::kHeaderSize +
+ Heap::kArgumentsCalleeIndex * kPointerSize),
+ rdx);
+
+ // Use the length (smi tagged) and set that as an in-object property too.
+ // Note: rcx is tagged from here on.
+ STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
+ __ Integer32ToSmi(rcx, rcx);
+ __ movq(FieldOperand(rax, JSObject::kHeaderSize +
+ Heap::kArgumentsLengthIndex * kPointerSize),
+ rcx);
+
+ // Setup the elements pointer in the allocated arguments object.
+ // If we allocated a parameter map, edi will point there, otherwise to the
+ // backing store.
+ __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
+ __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
+
+ // rax = address of new object (tagged)
+ // rbx = mapped parameter count (untagged)
+ // rcx = argument count (tagged)
+ // rdi = address of parameter map or backing store (tagged)
+
+ // Initialize parameter map. If there are no mapped arguments, we're done.
+ Label skip_parameter_map;
+ __ testq(rbx, rbx);
+ __ j(zero, &skip_parameter_map);
+
+ __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
+ // rbx contains the untagged argument count. Add 2 and tag to write.
+ __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
+ __ Integer64PlusConstantToSmi(r9, rbx, 2);
+ __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
+ __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
+ __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
+ __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
+
+ // Copy the parameter slots and the holes in the arguments.
+ // We need to fill in mapped_parameter_count slots. They index the context,
+ // where parameters are stored in reverse order, at
+ // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
+ // The mapped parameter thus need to get indices
+ // MIN_CONTEXT_SLOTS+parameter_count-1 ..
+ // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
+ // We loop from right to left.
+ Label parameters_loop, parameters_test;
+
+ // Load tagged parameter count into r9.
+ __ movq(r9, Operand(rsp, 1 * kPointerSize));
+ __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
+ __ addq(r8, Operand(rsp, 3 * kPointerSize));
+ __ subq(r8, r9);
+ __ Move(r11, factory->the_hole_value());
+ __ movq(rdx, rdi);
+ __ SmiToInteger64(kScratchRegister, r9);
+ __ lea(rdi, Operand(rdi, kScratchRegister,
+ times_pointer_size,
+ kParameterMapHeaderSize));
+ // r9 = loop variable (tagged)
+ // r8 = mapping index (tagged)
+ // r11 = the hole value
+ // rdx = address of parameter map (tagged)
+ // rdi = address of backing store (tagged)
+ __ jmp(¶meters_test, Label::kNear);
+
+ __ bind(¶meters_loop);
+ __ SmiSubConstant(r9, r9, Smi::FromInt(1));
+ __ SmiToInteger64(kScratchRegister, r9);
+ __ movq(FieldOperand(rdx, kScratchRegister,
+ times_pointer_size,
+ kParameterMapHeaderSize),
+ r8);
+ __ movq(FieldOperand(rdi, kScratchRegister,
+ times_pointer_size,
+ FixedArray::kHeaderSize),
+ r11);
+ __ SmiAddConstant(r8, r8, Smi::FromInt(1));
+ __ bind(¶meters_test);
+ __ SmiTest(r9);
+ __ j(not_zero, ¶meters_loop, Label::kNear);
+
+ __ bind(&skip_parameter_map);
+
+ // rcx = argument count (tagged)
+ // rdi = address of backing store (tagged)
+ // Copy arguments header and remaining slots (if there are any).
+ __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
+ factory->fixed_array_map());
+ __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
+
+ Label arguments_loop, arguments_test;
+ __ movq(r8, rbx);
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
+ // Untag rcx and r8 for the loop below.
+ __ SmiToInteger64(rcx, rcx);
+ __ SmiToInteger64(r8, r8);
+ __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
+ __ subq(rdx, kScratchRegister);
+ __ jmp(&arguments_test, Label::kNear);
+
+ __ bind(&arguments_loop);
+ __ subq(rdx, Immediate(kPointerSize));
+ __ movq(r9, Operand(rdx, 0));
+ __ movq(FieldOperand(rdi, r8,
+ times_pointer_size,
+ FixedArray::kHeaderSize),
+ r9);
+ __ addq(r8, Immediate(1));
+
+ __ bind(&arguments_test);
+ __ cmpq(r8, rcx);
+ __ j(less, &arguments_loop, Label::kNear);
+
+ // Return and remove the on-stack parameters.
+ __ ret(3 * kPointerSize);
+
+ // Do the runtime call to allocate the arguments object.
+ // rcx = argument count (untagged)
+ __ bind(&runtime);
+ __ Integer32ToSmi(rcx, rcx);
+ __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
+ __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
+}
+
+
+void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
+ // esp[0] : return address
+ // esp[8] : number of parameters
+ // esp[16] : receiver displacement
+ // esp[24] : function
+
+ // Check if the calling frame is an arguments adaptor frame.
+ Label runtime;
+ __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
+ __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(not_equal, &runtime);
+
+ // Patch the arguments.length and the parameters pointer.
+ __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ movq(Operand(rsp, 1 * kPointerSize), rcx);
+ __ SmiToInteger64(rcx, rcx);
+ __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
+ StandardFrameConstants::kCallerSPOffset));
+ __ movq(Operand(rsp, 2 * kPointerSize), rdx);
+
+ __ bind(&runtime);
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
+}
+
+
+void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// rsp[0] : return address
// rsp[8] : number of parameters
// rsp[16] : receiver displacement
// rsp[24] : function
- // The displacement is used for skipping the return address and the
- // frame pointer on the stack. It is the offset of the last
- // parameter (if any) relative to the frame pointer.
- static const int kDisplacement = 2 * kPointerSize;
-
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
+ __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor_frame);
// Get the length from the frame.
- __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
+ __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+ __ SmiToInteger64(rcx, rcx);
__ jmp(&try_allocate);
// Patch the arguments.length and the parameters pointer.
__ bind(&adaptor_frame);
- __ SmiToInteger32(rcx,
- Operand(rdx,
- ArgumentsAdaptorFrameConstants::kLengthOffset));
- // Space on stack must already hold a smi.
- __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
- // Do not clobber the length index for the indexing operation since
- // it is used compute the size for allocation later.
- __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
+ __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ movq(Operand(rsp, 1 * kPointerSize), rcx);
+ __ SmiToInteger64(rcx, rcx);
+ __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
+ StandardFrameConstants::kCallerSPOffset));
__ movq(Operand(rsp, 2 * kPointerSize), rdx);
// Try the new space allocation. Start out with computing the size of
// the arguments object and the elements array.
Label add_arguments_object;
__ bind(&try_allocate);
- __ testl(rcx, rcx);
- __ j(zero, &add_arguments_object);
- __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
+ __ testq(rcx, rcx);
+ __ j(zero, &add_arguments_object, Label::kNear);
+ __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
__ bind(&add_arguments_object);
- __ addl(rcx, Immediate(GetArgumentsObjectSize()));
+ __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
@@ -2009,59 +2211,51 @@
// Get the arguments boilerplate from the current (global) context.
__ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
- __ movq(rdi, Operand(rdi,
- Context::SlotOffset(GetArgumentsBoilerplateIndex())));
+ const int offset =
+ Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
+ __ movq(rdi, Operand(rdi, offset));
// Copy the JS object part.
- STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
- __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
- __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
- __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
- __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
- __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
- __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
-
- if (type_ == NEW_NON_STRICT) {
- // Setup the callee in-object property.
- ASSERT(Heap::kArgumentsCalleeIndex == 1);
- __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
- __ movq(FieldOperand(rax, JSObject::kHeaderSize +
- Heap::kArgumentsCalleeIndex * kPointerSize),
- kScratchRegister);
+ for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
+ __ movq(rbx, FieldOperand(rdi, i));
+ __ movq(FieldOperand(rax, i), rbx);
}
// Get the length (smi tagged) and set that as an in-object property too.
- ASSERT(Heap::kArgumentsLengthIndex == 0);
+ STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
- Heap::kArgumentsLengthIndex * kPointerSize),
+ Heap::kArgumentsLengthIndex * kPointerSize),
rcx);
// If there are no actual arguments, we're done.
Label done;
- __ SmiTest(rcx);
+ __ testq(rcx, rcx);
__ j(zero, &done);
- // Get the parameters pointer from the stack and untag the length.
+ // Get the parameters pointer from the stack.
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
// Setup the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
- __ lea(rdi, Operand(rax, GetArgumentsObjectSize()));
+ __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
__ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
__ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
+
+
__ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
- __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
+ // Untag the length for the loop below.
+ __ SmiToInteger64(rcx, rcx);
// Copy the fixed array slots.
Label loop;
__ bind(&loop);
- __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
- __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
+ __ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
+ __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
__ addq(rdi, Immediate(kPointerSize));
__ subq(rdx, Immediate(kPointerSize));
- __ decl(rcx);
+ __ decq(rcx);
__ j(not_zero, &loop);
// Return and remove the on-stack parameters.
@@ -2070,7 +2264,7 @@
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
- __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
+ __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
}
@@ -2335,6 +2529,7 @@
#else
// Already there in AMD64 calling convention.
ASSERT(arg1.is(rdi));
+ USE(arg1);
#endif
// Locate the code entry and call it.
@@ -2711,8 +2906,8 @@
factory->heap_number_map());
__ j(equal, &heap_number, Label::kNear);
if (cc_ != equal) {
- // Call runtime on identical JSObjects. Otherwise return equal.
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
+ // Call runtime on identical objects. Otherwise return equal.
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
__ j(above_equal, ¬_identical, Label::kNear);
}
__ Set(rax, EQUAL);
@@ -2768,9 +2963,9 @@
// There is no test for undetectability in strict equality.
// If the first object is a JS object, we have done pointer comparison.
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
Label first_non_object;
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
__ j(below, &first_non_object, Label::kNear);
// Return non-zero (eax (not rax) is not zero)
Label return_not_equal;
@@ -2783,7 +2978,7 @@
__ CmpInstanceType(rcx, ODDBALL_TYPE);
__ j(equal, &return_not_equal);
- __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
+ __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
__ j(above_equal, &return_not_equal);
// Check for oddballs: true, false, null, undefined.
@@ -2879,9 +3074,9 @@
__ lea(rcx, Operand(rax, rdx, times_1, 0));
__ testb(rcx, Immediate(kSmiTagMask));
__ j(not_zero, ¬_both_objects, Label::kNear);
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
__ j(below, ¬_both_objects, Label::kNear);
- __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
+ __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
__ j(below, ¬_both_objects, Label::kNear);
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
@@ -3004,6 +3199,7 @@
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Handle<Code> adaptor =
Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
+ __ SetCallKind(rcx, CALL_AS_METHOD);
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
@@ -3240,9 +3436,7 @@
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
Label invoke, exit;
-#ifdef ENABLE_LOGGING_AND_PROFILING
Label not_outermost_js, not_outermost_js_2;
-#endif
{ // NOLINT. Scope block confuses linter.
MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
// Setup frame.
@@ -3287,7 +3481,6 @@
__ push(c_entry_fp_operand);
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
__ Load(rax, js_entry_sp);
@@ -3301,7 +3494,6 @@
__ bind(¬_outermost_js);
__ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
__ bind(&cont);
-#endif
// Call a faked try-block that does the invoke.
__ call(&invoke);
@@ -3345,7 +3537,6 @@
__ PopTryHandler();
__ bind(&exit);
-#ifdef ENABLE_LOGGING_AND_PROFILING
// Check if the current stack frame is marked as the outermost JS frame.
__ pop(rbx);
__ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
@@ -3353,7 +3544,6 @@
__ movq(kScratchRegister, js_entry_sp);
__ movq(Operand(kScratchRegister, 0), Immediate(0));
__ bind(¬_outermost_js_2);
-#endif
// Restore the top frame descriptor from the stack.
{ Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
@@ -3417,9 +3607,9 @@
__ JumpIfSmi(rax, &slow);
// Check that the left hand is a JS object. Leave its map in rax.
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
__ j(below, &slow);
- __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
+ __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
__ j(above, &slow);
// Get the prototype of the function.
@@ -3444,9 +3634,9 @@
// Check that the function prototype is a JS object.
__ JumpIfSmi(rbx, &slow);
- __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
+ __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
__ j(below, &slow);
- __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
+ __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
__ j(above, &slow);
// Register mapping:
@@ -3562,15 +3752,8 @@
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
-const char* CompareStub::GetName() {
+void CompareStub::PrintName(StringStream* stream) {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
-
- if (name_ != NULL) return name_;
- const int kMaxNameLength = 100;
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
- kMaxNameLength);
- if (name_ == NULL) return "OOM";
-
const char* cc_name;
switch (cc_) {
case less: cc_name = "LT"; break;
@@ -3581,35 +3764,12 @@
case not_equal: cc_name = "NE"; break;
default: cc_name = "UnknownCondition"; break;
}
-
- const char* strict_name = "";
- if (strict_ && (cc_ == equal || cc_ == not_equal)) {
- strict_name = "_STRICT";
- }
-
- const char* never_nan_nan_name = "";
- if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
- never_nan_nan_name = "_NO_NAN";
- }
-
- const char* include_number_compare_name = "";
- if (!include_number_compare_) {
- include_number_compare_name = "_NO_NUMBER";
- }
-
- const char* include_smi_compare_name = "";
- if (!include_smi_compare_) {
- include_smi_compare_name = "_NO_SMI";
- }
-
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
- "CompareStub_%s%s%s%s",
- cc_name,
- strict_name,
- never_nan_nan_name,
- include_number_compare_name,
- include_smi_compare_name);
- return name_;
+ bool is_equality = cc_ == equal || cc_ == not_equal;
+ stream->Add("CompareStub_%s", cc_name);
+ if (strict_ && is_equality) stream->Add("_STRICT");
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
}
@@ -3815,15 +3975,12 @@
// Make sure that both arguments are strings if not known in advance.
if (flags_ == NO_STRING_ADD_FLAGS) {
- Condition is_smi;
- is_smi = masm->CheckSmi(rax);
- __ j(is_smi, &string_add_runtime);
+ __ JumpIfSmi(rax, &string_add_runtime);
__ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
__ j(above_equal, &string_add_runtime);
// First argument is a a string, test second.
- is_smi = masm->CheckSmi(rdx);
- __ j(is_smi, &string_add_runtime);
+ __ JumpIfSmi(rdx, &string_add_runtime);
__ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
__ j(above_equal, &string_add_runtime);
} else {