Revert "Revert "Upgrade to 5.0.71.48"" DO NOT MERGE
This reverts commit f2e3994fa5148cc3d9946666f0b0596290192b0e,
and updates the x64 makefile properly so it doesn't break that
build.
FPIIM-449
Change-Id: Ib83e35bfbae6af627451c926a9650ec57c045605
(cherry picked from commit 109988c7ccb6f3fd1a58574fa3dfb88beaef6632)
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 2141333..82fb51d 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -92,9 +92,8 @@
#define __ ACCESS_MASM(masm)
-
static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
- Condition cond, Strength strength);
+ Condition cond);
static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Register lhs,
Register rhs,
@@ -238,7 +237,7 @@
// Equality is almost reflexive (everything but NaN), so this is a test
// for "identity and not NaN".
static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
- Condition cond, Strength strength) {
+ Condition cond) {
Label not_identical;
Label heap_number, return_equal;
__ cmp(r0, r1);
@@ -258,14 +257,6 @@
// Call runtime on identical SIMD values since we must throw a TypeError.
__ cmp(r4, Operand(SIMD128_VALUE_TYPE));
__ b(eq, slow);
- if (is_strong(strength)) {
- // Call the runtime on anything that is converted in the semantics, since
- // we need to throw a TypeError. Smis have already been ruled out.
- __ cmp(r4, Operand(HEAP_NUMBER_TYPE));
- __ b(eq, &return_equal);
- __ tst(r4, Operand(kIsNotStringMask));
- __ b(ne, slow);
- }
} else {
__ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
__ b(eq, &heap_number);
@@ -279,13 +270,6 @@
// Call runtime on identical SIMD values since we must throw a TypeError.
__ cmp(r4, Operand(SIMD128_VALUE_TYPE));
__ b(eq, slow);
- if (is_strong(strength)) {
- // Call the runtime on anything that is converted in the semantics,
- // since we need to throw a TypeError. Smis and heap numbers have
- // already been ruled out.
- __ tst(r4, Operand(kIsNotStringMask));
- __ b(ne, slow);
- }
// Normally here we fall through to return_equal, but undefined is
// special: (undefined == undefined) == true, but
// (undefined <= undefined) == false! See ECMAScript 11.8.5.
@@ -495,44 +479,52 @@
// Fast negative check for internalized-to-internalized equality.
static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
- Register lhs,
- Register rhs,
+ Register lhs, Register rhs,
Label* possible_strings,
- Label* not_both_strings) {
+ Label* runtime_call) {
DCHECK((lhs.is(r0) && rhs.is(r1)) ||
(lhs.is(r1) && rhs.is(r0)));
// r2 is object type of rhs.
- Label object_test;
+ Label object_test, return_unequal, undetectable;
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
__ tst(r2, Operand(kIsNotStringMask));
__ b(ne, &object_test);
__ tst(r2, Operand(kIsNotInternalizedMask));
__ b(ne, possible_strings);
__ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
- __ b(ge, not_both_strings);
+ __ b(ge, runtime_call);
__ tst(r3, Operand(kIsNotInternalizedMask));
__ b(ne, possible_strings);
- // Both are internalized. We already checked they weren't the same pointer
- // so they are not equal.
- __ mov(r0, Operand(NOT_EQUAL));
+ // Both are internalized. We already checked they weren't the same pointer so
+ // they are not equal. Return non-equal by returning the non-zero object
+ // pointer in r0.
__ Ret();
__ bind(&object_test);
- __ cmp(r2, Operand(FIRST_JS_RECEIVER_TYPE));
- __ b(lt, not_both_strings);
- __ CompareObjectType(lhs, r2, r3, FIRST_JS_RECEIVER_TYPE);
- __ b(lt, not_both_strings);
- // If both objects are undetectable, they are equal. Otherwise, they
- // are not equal, since they are different objects and an object is not
- // equal to undefined.
+ __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
__ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset));
- __ ldrb(r2, FieldMemOperand(r2, Map::kBitFieldOffset));
- __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
- __ and_(r0, r2, Operand(r3));
- __ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
- __ eor(r0, r0, Operand(1 << Map::kIsUndetectable));
+ __ ldrb(r4, FieldMemOperand(r2, Map::kBitFieldOffset));
+ __ ldrb(r5, FieldMemOperand(r3, Map::kBitFieldOffset));
+ __ tst(r4, Operand(1 << Map::kIsUndetectable));
+ __ b(ne, &undetectable);
+ __ tst(r5, Operand(1 << Map::kIsUndetectable));
+ __ b(ne, &return_unequal);
+
+ __ CompareInstanceType(r2, r2, FIRST_JS_RECEIVER_TYPE);
+ __ b(lt, runtime_call);
+ __ CompareInstanceType(r3, r3, FIRST_JS_RECEIVER_TYPE);
+ __ b(lt, runtime_call);
+
+ __ bind(&return_unequal);
+ // Return non-equal by returning the non-zero object pointer in r0.
+ __ Ret();
+
+ __ bind(&undetectable);
+ __ tst(r5, Operand(1 << Map::kIsUndetectable));
+ __ b(eq, &return_unequal);
+ __ mov(r0, Operand(EQUAL));
__ Ret();
}
@@ -583,7 +575,7 @@
// Handle the case where the objects are identical. Either returns the answer
// or goes to slow. Only falls through if the objects were not identical.
- EmitIdenticalObjectComparison(masm, &slow, cc, strength());
+ EmitIdenticalObjectComparison(masm, &slow, cc);
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
@@ -679,11 +671,19 @@
__ bind(&slow);
- __ Push(lhs, rhs);
- // Figure out which native to call and setup the arguments.
if (cc == eq) {
- __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ __ Push(lhs, rhs);
+ __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
+ }
+ // Turn true into 0 and false into some non-zero value.
+ STATIC_ASSERT(EQUAL == 0);
+ __ LoadRoot(r1, Heap::kTrueValueRootIndex);
+ __ sub(r0, r0, r1);
+ __ Ret();
} else {
+ __ Push(lhs, rhs);
int ncr; // NaN compare result
if (cc == lt || cc == le) {
ncr = GREATER;
@@ -696,8 +696,7 @@
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
- __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
- : Runtime::kCompare);
+ __ TailCallRuntime(Runtime::kCompare);
}
__ bind(&miss);
@@ -895,7 +894,6 @@
__ vcvt_f64_s32(double_exponent, single_scratch);
// Returning or bailing out.
- Counters* counters = isolate()->counters();
if (exponent_type() == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
@@ -909,7 +907,6 @@
__ vstr(double_result,
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
DCHECK(heapnumber.is(r0));
- __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
__ Ret(2);
} else {
__ push(lr);
@@ -925,7 +922,6 @@
__ MovFromFloatResult(double_result);
__ bind(&done);
- __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
__ Ret();
}
}
@@ -999,11 +995,9 @@
// r1: pointer to the first argument (C callee-saved)
// r5: pointer to builtin function (C callee-saved)
- // Result returned in r0 or r0+r1 by default.
-
-#if V8_HOST_ARCH_ARM
int frame_alignment = MacroAssembler::ActivationFrameAlignment();
int frame_alignment_mask = frame_alignment - 1;
+#if V8_HOST_ARCH_ARM
if (FLAG_debug_code) {
if (frame_alignment > kPointerSize) {
Label alignment_as_expected;
@@ -1018,8 +1012,25 @@
#endif
// Call C built-in.
- // r0 = argc, r1 = argv
- __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
+ int result_stack_size;
+ if (result_size() <= 2) {
+ // r0 = argc, r1 = argv, r2 = isolate
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
+ result_stack_size = 0;
+ } else {
+ DCHECK_EQ(3, result_size());
+ // Allocate additional space for the result.
+ result_stack_size =
+ ((result_size() * kPointerSize) + frame_alignment_mask) &
+ ~frame_alignment_mask;
+ __ sub(sp, sp, Operand(result_stack_size));
+
+ // r0 = hidden result argument, r1 = argc, r2 = argv, r3 = isolate.
+ __ mov(r3, Operand(ExternalReference::isolate_address(isolate())));
+ __ mov(r2, Operand(r1));
+ __ mov(r1, Operand(r0));
+ __ mov(r0, Operand(sp));
+ }
// To let the GC traverse the return address of the exit frames, we need to
// know where the return address is. The CEntryStub is unmovable, so
@@ -1032,11 +1043,19 @@
// Prevent literal pool emission before return address.
Assembler::BlockConstPoolScope block_const_pool(masm);
__ add(lr, pc, Operand(4));
- __ str(lr, MemOperand(sp, 0));
+ __ str(lr, MemOperand(sp, result_stack_size));
__ Call(r5);
}
+ if (result_size() > 2) {
+ DCHECK_EQ(3, result_size());
+ // Read result values stored on stack.
+ __ ldr(r2, MemOperand(r0, 2 * kPointerSize));
+ __ ldr(r1, MemOperand(r0, 1 * kPointerSize));
+ __ ldr(r0, MemOperand(r0, 0 * kPointerSize));
+ }
+ // Result returned in r0, r1:r0 or r2:r1:r0 - do not destroy these registers!
- __ VFPEnsureFPSCRState(r2);
+ __ VFPEnsureFPSCRState(r3);
// Check result for exception sentinel.
Label exception_returned;
@@ -1049,9 +1068,9 @@
Label okay;
ExternalReference pending_exception_address(
Isolate::kPendingExceptionAddress, isolate());
- __ mov(r2, Operand(pending_exception_address));
- __ ldr(r2, MemOperand(r2));
- __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
+ __ mov(r3, Operand(pending_exception_address));
+ __ ldr(r3, MemOperand(r3));
+ __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
// Cannot use check here as it attempts to generate call into runtime.
__ b(eq, &okay);
__ stop("Unexpected pending exception");
@@ -1461,286 +1480,6 @@
}
-void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
- // The displacement is the offset of the last parameter (if any)
- // relative to the frame pointer.
- const int kDisplacement =
- StandardFrameConstants::kCallerSPOffset - kPointerSize;
- DCHECK(r1.is(ArgumentsAccessReadDescriptor::index()));
- DCHECK(r0.is(ArgumentsAccessReadDescriptor::parameter_count()));
-
- // Check that the key is a smi.
- Label slow;
- __ JumpIfNotSmi(r1, &slow);
-
- // Check if the calling frame is an arguments adaptor frame.
- Label adaptor;
- __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
- __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ b(eq, &adaptor);
-
- // Check index against formal parameters count limit passed in
- // through register r0. Use unsigned comparison to get negative
- // check for free.
- __ cmp(r1, r0);
- __ b(hs, &slow);
-
- // Read the argument from the stack and return it.
- __ sub(r3, r0, r1);
- __ add(r3, fp, Operand::PointerOffsetFromSmiKey(r3));
- __ ldr(r0, MemOperand(r3, kDisplacement));
- __ Jump(lr);
-
- // Arguments adaptor case: Check index against actual arguments
- // limit found in the arguments adaptor frame. Use unsigned
- // comparison to get negative check for free.
- __ bind(&adaptor);
- __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ cmp(r1, r0);
- __ b(cs, &slow);
-
- // Read the argument from the adaptor frame and return it.
- __ sub(r3, r0, r1);
- __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3));
- __ ldr(r0, MemOperand(r3, kDisplacement));
- __ Jump(lr);
-
- // Slow-case: Handle non-smi or out-of-bounds access to arguments
- // by calling the runtime system.
- __ bind(&slow);
- __ push(r1);
- __ TailCallRuntime(Runtime::kArguments);
-}
-
-
-void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
- // r1 : function
- // r2 : number of parameters (tagged)
- // r3 : parameters pointer
-
- DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
-
- // Check if the calling frame is an arguments adaptor frame.
- Label runtime;
- __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
- __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ b(ne, &runtime);
-
- // Patch the arguments.length and the parameters pointer in the current frame.
- __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ add(r4, r4, Operand(r2, LSL, 1));
- __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
-
- __ bind(&runtime);
- __ Push(r1, r3, r2);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
-}
-
-
-void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
- // r1 : function
- // r2 : number of parameters (tagged)
- // r3 : parameters pointer
- // Registers used over whole function:
- // r5 : arguments count (tagged)
- // r6 : mapped parameter count (tagged)
-
- DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
-
- // Check if the calling frame is an arguments adaptor frame.
- Label adaptor_frame, try_allocate, runtime;
- __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
- __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ b(eq, &adaptor_frame);
-
- // No adaptor, parameter count = argument count.
- __ mov(r5, r2);
- __ mov(r6, r2);
- __ b(&try_allocate);
-
- // We have an adaptor frame. Patch the parameters pointer.
- __ bind(&adaptor_frame);
- __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ add(r4, r4, Operand(r5, LSL, 1));
- __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
-
- // r5 = argument count (tagged)
- // r6 = parameter count (tagged)
- // Compute the mapped parameter count = min(r6, r5) in r6.
- __ mov(r6, r2);
- __ cmp(r6, Operand(r5));
- __ mov(r6, Operand(r5), LeaveCC, gt);
-
- __ bind(&try_allocate);
-
- // Compute the sizes of backing store, parameter map, and arguments object.
- // 1. Parameter map, has 2 extra words containing context and backing store.
- const int kParameterMapHeaderSize =
- FixedArray::kHeaderSize + 2 * kPointerSize;
- // If there are no mapped parameters, we do not need the parameter_map.
- __ cmp(r6, Operand(Smi::FromInt(0)));
- __ mov(r9, Operand::Zero(), LeaveCC, eq);
- __ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
- __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
-
- // 2. Backing store.
- __ add(r9, r9, Operand(r5, LSL, 1));
- __ add(r9, r9, Operand(FixedArray::kHeaderSize));
-
- // 3. Arguments object.
- __ add(r9, r9, Operand(Heap::kSloppyArgumentsObjectSize));
-
- // Do the allocation of all three objects in one go.
- __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
-
- // r0 = address of new object(s) (tagged)
- // r2 = argument count (smi-tagged)
- // Get the arguments boilerplate from the current native context into r4.
- const int kNormalOffset =
- Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
- const int kAliasedOffset =
- Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
-
- __ ldr(r4, NativeContextMemOperand());
- __ cmp(r6, Operand::Zero());
- __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
- __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
-
- // r0 = address of new object (tagged)
- // r2 = argument count (smi-tagged)
- // r4 = address of arguments map (tagged)
- // r6 = mapped parameter count (tagged)
- __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
- __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
- __ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
- __ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
-
- // Set up the callee in-object property.
- STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
- __ AssertNotSmi(r1);
- const int kCalleeOffset = JSObject::kHeaderSize +
- Heap::kArgumentsCalleeIndex * kPointerSize;
- __ str(r1, FieldMemOperand(r0, kCalleeOffset));
-
- // Use the length (smi tagged) and set that as an in-object property too.
- __ AssertSmi(r5);
- STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
- const int kLengthOffset = JSObject::kHeaderSize +
- Heap::kArgumentsLengthIndex * kPointerSize;
- __ str(r5, FieldMemOperand(r0, kLengthOffset));
-
- // Set up the elements pointer in the allocated arguments object.
- // If we allocated a parameter map, r4 will point there, otherwise
- // it will point to the backing store.
- __ add(r4, r0, Operand(Heap::kSloppyArgumentsObjectSize));
- __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
-
- // r0 = address of new object (tagged)
- // r2 = argument count (tagged)
- // r4 = address of parameter map or backing store (tagged)
- // r6 = mapped parameter count (tagged)
- // Initialize parameter map. If there are no mapped arguments, we're done.
- Label skip_parameter_map;
- __ cmp(r6, Operand(Smi::FromInt(0)));
- // Move backing store address to r1, because it is
- // expected there when filling in the unmapped arguments.
- __ mov(r1, r4, LeaveCC, eq);
- __ b(eq, &skip_parameter_map);
-
- __ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
- __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
- __ add(r5, r6, Operand(Smi::FromInt(2)));
- __ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
- __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
- __ add(r5, r4, Operand(r6, LSL, 1));
- __ add(r5, r5, Operand(kParameterMapHeaderSize));
- __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
-
- // Copy the parameter slots and the holes in the arguments.
- // We need to fill in mapped_parameter_count slots. They index the context,
- // where parameters are stored in reverse order, at
- // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
- // The mapped parameter thus need to get indices
- // MIN_CONTEXT_SLOTS+parameter_count-1 ..
- // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
- // We loop from right to left.
- Label parameters_loop, parameters_test;
- __ mov(r5, r6);
- __ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
- __ sub(r9, r9, Operand(r6));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ add(r1, r4, Operand(r5, LSL, 1));
- __ add(r1, r1, Operand(kParameterMapHeaderSize));
-
- // r1 = address of backing store (tagged)
- // r4 = address of parameter map (tagged), which is also the address of new
- // object + Heap::kSloppyArgumentsObjectSize (tagged)
- // r0 = temporary scratch (a.o., for address calculation)
- // r5 = loop variable (tagged)
- // ip = the hole value
- __ jmp(¶meters_test);
-
- __ bind(¶meters_loop);
- __ sub(r5, r5, Operand(Smi::FromInt(1)));
- __ mov(r0, Operand(r5, LSL, 1));
- __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
- __ str(r9, MemOperand(r4, r0));
- __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
- __ str(ip, MemOperand(r1, r0));
- __ add(r9, r9, Operand(Smi::FromInt(1)));
- __ bind(¶meters_test);
- __ cmp(r5, Operand(Smi::FromInt(0)));
- __ b(ne, ¶meters_loop);
-
- // Restore r0 = new object (tagged) and r5 = argument count (tagged).
- __ sub(r0, r4, Operand(Heap::kSloppyArgumentsObjectSize));
- __ ldr(r5, FieldMemOperand(r0, kLengthOffset));
-
- __ bind(&skip_parameter_map);
- // r0 = address of new object (tagged)
- // r1 = address of backing store (tagged)
- // r5 = argument count (tagged)
- // r6 = mapped parameter count (tagged)
- // r9 = scratch
- // Copy arguments header and remaining slots (if there are any).
- __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
- __ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
- __ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
-
- Label arguments_loop, arguments_test;
- __ sub(r3, r3, Operand(r6, LSL, 1));
- __ jmp(&arguments_test);
-
- __ bind(&arguments_loop);
- __ sub(r3, r3, Operand(kPointerSize));
- __ ldr(r4, MemOperand(r3, 0));
- __ add(r9, r1, Operand(r6, LSL, 1));
- __ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
- __ add(r6, r6, Operand(Smi::FromInt(1)));
-
- __ bind(&arguments_test);
- __ cmp(r6, Operand(r5));
- __ b(lt, &arguments_loop);
-
- // Return.
- __ Ret();
-
- // Do the runtime call to allocate the arguments object.
- // r0 = address of new object (tagged)
- // r5 = argument count (tagged)
- __ bind(&runtime);
- __ Push(r1, r3, r5);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
-}
-
-
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is in lr.
Label slow;
@@ -1764,117 +1503,6 @@
}
-void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
- // r1 : function
- // r2 : number of parameters (tagged)
- // r3 : parameters pointer
-
- DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
-
- // Check if the calling frame is an arguments adaptor frame.
- Label try_allocate, runtime;
- __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
- __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ b(ne, &try_allocate);
-
- // Patch the arguments.length and the parameters pointer.
- __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ add(r4, r4, Operand::PointerOffsetFromSmiKey(r2));
- __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
-
- // Try the new space allocation. Start out with computing the size
- // of the arguments object and the elements array in words.
- Label add_arguments_object;
- __ bind(&try_allocate);
- __ SmiUntag(r9, r2, SetCC);
- __ b(eq, &add_arguments_object);
- __ add(r9, r9, Operand(FixedArray::kHeaderSize / kPointerSize));
- __ bind(&add_arguments_object);
- __ add(r9, r9, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize));
-
- // Do the allocation of both objects in one go.
- __ Allocate(r9, r0, r4, r5, &runtime,
- static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
-
- // Get the arguments boilerplate from the current native context.
- __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r4);
-
- __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
- __ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex);
- __ str(r5, FieldMemOperand(r0, JSObject::kPropertiesOffset));
- __ str(r5, FieldMemOperand(r0, JSObject::kElementsOffset));
-
- // Get the length (smi tagged) and set that as an in-object property too.
- STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
- __ AssertSmi(r2);
- __ str(r2,
- FieldMemOperand(r0, JSObject::kHeaderSize +
- Heap::kArgumentsLengthIndex * kPointerSize));
-
- // If there are no actual arguments, we're done.
- Label done;
- __ cmp(r2, Operand::Zero());
- __ b(eq, &done);
-
- // Set up the elements pointer in the allocated arguments object and
- // initialize the header in the elements fixed array.
- __ add(r4, r0, Operand(Heap::kStrictArgumentsObjectSize));
- __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
- __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex);
- __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
- __ str(r2, FieldMemOperand(r4, FixedArray::kLengthOffset));
- __ SmiUntag(r2);
-
- // Copy the fixed array slots.
- Label loop;
- // Set up r4 to point to the first array slot.
- __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ bind(&loop);
- // Pre-decrement r3 with kPointerSize on each iteration.
- // Pre-decrement in order to skip receiver.
- __ ldr(r5, MemOperand(r3, kPointerSize, NegPreIndex));
- // Post-increment r4 with kPointerSize on each iteration.
- __ str(r5, MemOperand(r4, kPointerSize, PostIndex));
- __ sub(r2, r2, Operand(1));
- __ cmp(r2, Operand::Zero());
- __ b(ne, &loop);
-
- // Return.
- __ bind(&done);
- __ Ret();
-
- // Do the runtime call to allocate the arguments object.
- __ bind(&runtime);
- __ Push(r1, r3, r2);
- __ TailCallRuntime(Runtime::kNewStrictArguments);
-}
-
-
-void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
- // r2 : number of parameters (tagged)
- // r3 : parameters pointer
- // r4 : rest parameter index (tagged)
-
- Label runtime;
- __ ldr(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r0, MemOperand(r5, StandardFrameConstants::kContextOffset));
- __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ b(ne, &runtime);
-
- // Patch the arguments.length and the parameters pointer.
- __ ldr(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ add(r3, r5, Operand::PointerOffsetFromSmiKey(r2));
- __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
-
- __ bind(&runtime);
- __ Push(r2, r3, r4);
- __ TailCallRuntime(Runtime::kNewRestParam);
-}
-
-
void RegExpExecStub::Generate(MacroAssembler* masm) {
// Just jump directly to runtime if native RegExp is not selected at compile
// time or if regexp entry in generated code is turned off runtime switch or
@@ -1953,34 +1581,33 @@
__ ldr(subject, MemOperand(sp, kSubjectOffset));
__ JumpIfSmi(subject, &runtime);
__ mov(r3, subject); // Make a copy of the original subject string.
- __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
- __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
// subject: subject string
// r3: subject string
- // r0: subject string instance type
// regexp_data: RegExp data (FixedArray)
// Handle subject string according to its encoding and representation:
- // (1) Sequential string? If yes, go to (5).
- // (2) Anything but sequential or cons? If yes, go to (6).
- // (3) Cons string. If the string is flat, replace subject with first string.
- // Otherwise bailout.
- // (4) Is subject external? If yes, go to (7).
- // (5) Sequential string. Load regexp code according to encoding.
+ // (1) Sequential string? If yes, go to (4).
+ // (2) Sequential or cons? If not, go to (5).
+ // (3) Cons string. If the string is flat, replace subject with first string
+ // and go to (1). Otherwise bail out to runtime.
+ // (4) Sequential string. Load regexp code according to encoding.
// (E) Carry on.
/// [...]
// Deferred code at the end of the stub:
- // (6) Not a long external string? If yes, go to (8).
- // (7) External string. Make it, offset-wise, look like a sequential string.
- // Go to (5).
- // (8) Short external string or not a string? If yes, bail out to runtime.
- // (9) Sliced string. Replace subject with parent. Go to (4).
+ // (5) Long external string? If not, go to (7).
+ // (6) External string. Make it, offset-wise, look like a sequential string.
+ // Go to (4).
+ // (7) Short external string or not a string? If yes, bail out to runtime.
+ // (8) Sliced string. Replace subject with parent. Go to (1).
- Label seq_string /* 5 */, external_string /* 7 */,
- check_underlying /* 4 */, not_seq_nor_cons /* 6 */,
- not_long_external /* 8 */;
+ Label seq_string /* 4 */, external_string /* 6 */, check_underlying /* 1 */,
+ not_seq_nor_cons /* 5 */, not_long_external /* 7 */;
- // (1) Sequential string? If yes, go to (5).
+ __ bind(&check_underlying);
+ __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
+ __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
+
+ // (1) Sequential string? If yes, go to (4).
__ and_(r1,
r0,
Operand(kIsNotStringMask |
@@ -1988,15 +1615,15 @@
kShortExternalStringMask),
SetCC);
STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
- __ b(eq, &seq_string); // Go to (5).
+ __ b(eq, &seq_string); // Go to (4).
- // (2) Anything but sequential or cons? If yes, go to (6).
+ // (2) Sequential or cons? If not, go to (5).
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
__ cmp(r1, Operand(kExternalStringTag));
- __ b(ge, ¬_seq_nor_cons); // Go to (6).
+ __ b(ge, ¬_seq_nor_cons); // Go to (5).
// (3) Cons string. Check that it's flat.
// Replace subject with first string and reload instance type.
@@ -2004,19 +1631,9 @@
__ CompareRoot(r0, Heap::kempty_stringRootIndex);
__ b(ne, &runtime);
__ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
+ __ jmp(&check_underlying);
- // (4) Is subject external? If yes, go to (7).
- __ bind(&check_underlying);
- __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
- __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
- STATIC_ASSERT(kSeqStringTag == 0);
- __ tst(r0, Operand(kStringRepresentationMask));
- // The underlying external string is never a short external string.
- STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
- STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
- __ b(ne, &external_string); // Go to (7).
-
- // (5) Sequential string. Load regexp code according to encoding.
+ // (4) Sequential string. Load regexp code according to encoding.
__ bind(&seq_string);
// subject: sequential subject string (or look-alike, external string)
// r3: original subject string
@@ -2249,12 +1866,12 @@
__ TailCallRuntime(Runtime::kRegExpExec);
// Deferred code for string handling.
- // (6) Not a long external string? If yes, go to (8).
+ // (5) Long external string? If not, go to (7).
__ bind(¬_seq_nor_cons);
// Compare flags are still set.
- __ b(gt, ¬_long_external); // Go to (8).
+ __ b(gt, ¬_long_external); // Go to (7).
- // (7) External string. Make it, offset-wise, look like a sequential string.
+ // (6) External string. Make it, offset-wise, look like a sequential string.
__ bind(&external_string);
__ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
@@ -2271,15 +1888,15 @@
__ sub(subject,
subject,
Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- __ jmp(&seq_string); // Go to (5).
+ __ jmp(&seq_string); // Go to (4).
- // (8) Short external string or not a string? If yes, bail out to runtime.
+ // (7) Short external string or not a string? If yes, bail out to runtime.
__ bind(¬_long_external);
STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
__ tst(r1, Operand(kIsNotStringMask | kShortExternalStringMask));
__ b(ne, &runtime);
- // (9) Sliced string. Replace subject with parent. Go to (4).
+ // (8) Sliced string. Replace subject with parent. Go to (4).
// Load offset into r9 and replace subject string with parent.
__ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
__ SmiUntag(r9);
@@ -2506,7 +2123,8 @@
__ bind(&call_function);
__ mov(r0, Operand(argc));
- __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
+ __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
+ tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
@@ -2545,7 +2163,7 @@
__ bind(&call);
__ mov(r0, Operand(argc));
- __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
+ __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&uninitialized);
@@ -3096,6 +2714,37 @@
}
+void ToNameStub::Generate(MacroAssembler* masm) {
+ // The ToName stub takes one argument in r0.
+ Label is_number;
+ __ JumpIfSmi(r0, &is_number);
+
+ STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
+ __ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
+ // r0: receiver
+ // r1: receiver instance type
+ __ Ret(ls);
+
+ Label not_heap_number;
+ __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
+ __ b(ne, ¬_heap_number);
+ __ bind(&is_number);
+ NumberToStringStub stub(isolate());
+ __ TailCallStub(&stub);
+ __ bind(¬_heap_number);
+
+ Label not_oddball;
+ __ cmp(r1, Operand(ODDBALL_TYPE));
+ __ b(ne, ¬_oddball);
+ __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
+ __ Ret();
+ __ bind(¬_oddball);
+
+ __ push(r0); // Push argument.
+ __ TailCallRuntime(Runtime::kToName);
+}
+
+
void StringHelper::GenerateFlatOneByteStringEquals(
MacroAssembler* masm, Register left, Register right, Register scratch1,
Register scratch2, Register scratch3) {
@@ -3263,18 +2912,14 @@
__ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
__ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
- if (op() != Token::EQ_STRICT && is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
- } else {
- if (!Token::IsEqualityOp(op())) {
- __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
- __ AssertSmi(r1);
- __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
- __ AssertSmi(r0);
- }
- __ sub(r0, r1, r0);
- __ Ret();
+ if (!Token::IsEqualityOp(op())) {
+ __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
+ __ AssertSmi(r1);
+ __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
+ __ AssertSmi(r0);
}
+ __ sub(r0, r1, r0);
+ __ Ret();
__ bind(&miss);
GenerateMiss(masm);
@@ -3354,7 +2999,7 @@
__ bind(&unordered);
__ bind(&generic_stub);
- CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC,
+ CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
CompareICState::GENERIC, CompareICState::GENERIC);
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
@@ -3572,8 +3217,6 @@
if (Token::IsEqualityOp(op())) {
__ sub(r0, r0, Operand(r1));
__ Ret();
- } else if (is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
if (op() == Token::LT || op() == Token::LTE) {
__ mov(r2, Operand(Smi::FromInt(GREATER)));
@@ -3939,11 +3582,8 @@
regs_.scratch0(),
&dont_need_remembered_set);
- __ CheckPageFlag(regs_.object(),
- regs_.scratch0(),
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- ne,
- &dont_need_remembered_set);
+ __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
+ &dont_need_remembered_set);
// First notify the incremental marker if necessary, then update the
// remembered set.
@@ -4918,6 +4558,584 @@
}
+void FastNewObjectStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r1 : target
+ // -- r3 : new target
+ // -- cp : context
+ // -- lr : return address
+ // -----------------------------------
+ __ AssertFunction(r1);
+ __ AssertReceiver(r3);
+
+ // Verify that the new target is a JSFunction.
+ Label new_object;
+ __ CompareObjectType(r3, r2, r2, JS_FUNCTION_TYPE);
+ __ b(ne, &new_object);
+
+ // Load the initial map and verify that it's in fact a map.
+ __ ldr(r2, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
+ __ JumpIfSmi(r2, &new_object);
+ __ CompareObjectType(r2, r0, r0, MAP_TYPE);
+ __ b(ne, &new_object);
+
+ // Fall back to runtime if the target differs from the new target's
+ // initial map constructor.
+ __ ldr(r0, FieldMemOperand(r2, Map::kConstructorOrBackPointerOffset));
+ __ cmp(r0, r1);
+ __ b(ne, &new_object);
+
+ // Allocate the JSObject on the heap.
+ Label allocate, done_allocate;
+ __ ldrb(r4, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+ __ Allocate(r4, r0, r5, r6, &allocate, SIZE_IN_WORDS);
+ __ bind(&done_allocate);
+
+ // Initialize the JSObject fields.
+ __ str(r2, MemOperand(r0, JSObject::kMapOffset));
+ __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
+ __ str(r3, MemOperand(r0, JSObject::kPropertiesOffset));
+ __ str(r3, MemOperand(r0, JSObject::kElementsOffset));
+ STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
+ __ add(r1, r0, Operand(JSObject::kHeaderSize));
+
+ // ----------- S t a t e -------------
+ // -- r0 : result (untagged)
+ // -- r1 : result fields (untagged)
+ // -- r5 : result end (untagged)
+ // -- r2 : initial map
+ // -- cp : context
+ // -- lr : return address
+ // -----------------------------------
+
+ // Perform in-object slack tracking if requested.
+ Label slack_tracking;
+ STATIC_ASSERT(Map::kNoSlackTracking == 0);
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
+ __ ldr(r3, FieldMemOperand(r2, Map::kBitField3Offset));
+ __ tst(r3, Operand(Map::ConstructionCounter::kMask));
+ __ b(ne, &slack_tracking);
+ {
+ // Initialize all in-object fields with undefined.
+ __ InitializeFieldsWithFiller(r1, r5, r6);
+
+ // Add the object tag to make the JSObject real.
+ STATIC_ASSERT(kHeapObjectTag == 1);
+ __ add(r0, r0, Operand(kHeapObjectTag));
+ __ Ret();
+ }
+ __ bind(&slack_tracking);
+ {
+ // Decrease generous allocation count.
+ STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
+ __ sub(r3, r3, Operand(1 << Map::ConstructionCounter::kShift));
+ __ str(r3, FieldMemOperand(r2, Map::kBitField3Offset));
+
+ // Initialize the in-object fields with undefined.
+ __ ldrb(r4, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
+ __ sub(r4, r5, Operand(r4, LSL, kPointerSizeLog2));
+ __ InitializeFieldsWithFiller(r1, r4, r6);
+
+ // Initialize the remaining (reserved) fields with one pointer filler map.
+ __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
+ __ InitializeFieldsWithFiller(r1, r5, r6);
+
+ // Add the object tag to make the JSObject real.
+ STATIC_ASSERT(kHeapObjectTag == 1);
+ __ add(r0, r0, Operand(kHeapObjectTag));
+
+ // Check if we can finalize the instance size.
+ STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
+ __ tst(r3, Operand(Map::ConstructionCounter::kMask));
+ __ Ret(ne);
+
+ // Finalize the instance size.
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ __ Push(r0, r2);
+ __ CallRuntime(Runtime::kFinalizeInstanceSize);
+ __ Pop(r0);
+ }
+ __ Ret();
+ }
+
+ // Fall back to %AllocateInNewSpace.
+ __ bind(&allocate);
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1);
+ __ mov(r4, Operand(r4, LSL, kPointerSizeLog2 + 1));
+ __ Push(r2, r4);
+ __ CallRuntime(Runtime::kAllocateInNewSpace);
+ __ Pop(r2);
+ }
+ STATIC_ASSERT(kHeapObjectTag == 1);
+ __ sub(r0, r0, Operand(kHeapObjectTag));
+ __ ldrb(r5, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+ __ add(r5, r0, Operand(r5, LSL, kPointerSizeLog2));
+ __ b(&done_allocate);
+
+ // Fall back to %NewObject.
+ __ bind(&new_object);
+ __ Push(r1, r3);
+ __ TailCallRuntime(Runtime::kNewObject);
+}
+
+
+void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r1 : function
+ // -- cp : context
+ // -- fp : frame pointer
+ // -- lr : return address
+ // -----------------------------------
+ __ AssertFunction(r1);
+
+ // For Ignition we need to skip all possible handler/stub frames until
+ // we reach the JavaScript frame for the function (similar to what the
+ // runtime fallback implementation does). So make r2 point to that
+ // JavaScript frame.
+ {
+ Label loop, loop_entry;
+ __ mov(r2, fp);
+ __ b(&loop_entry);
+ __ bind(&loop);
+ __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
+ __ bind(&loop_entry);
+ __ ldr(ip, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
+ __ cmp(ip, r1);
+ __ b(ne, &loop);
+ }
+
+ // Check if we have rest parameters (only possible if we have an
+ // arguments adaptor frame below the function frame).
+ Label no_rest_parameters;
+ __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(ip, MemOperand(r2, StandardFrameConstants::kContextOffset));
+ __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ b(ne, &no_rest_parameters);
+
+ // Check if the arguments adaptor frame contains more arguments than
+ // specified by the function's internal formal parameter count.
+ Label rest_parameters;
+ __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r1,
+ FieldMemOperand(r1, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ sub(r0, r0, r1, SetCC);
+ __ b(gt, &rest_parameters);
+
+ // Return an empty rest parameter array.
+ __ bind(&no_rest_parameters);
+ {
+ // ----------- S t a t e -------------
+ // -- cp : context
+ // -- lr : return address
+ // -----------------------------------
+
+ // Allocate an empty rest parameter array.
+ Label allocate, done_allocate;
+ __ Allocate(JSArray::kSize, r0, r1, r2, &allocate, TAG_OBJECT);
+ __ bind(&done_allocate);
+
+ // Setup the rest parameter array in r0.
+ __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
+ __ str(r1, FieldMemOperand(r0, JSArray::kMapOffset));
+ __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
+ __ str(r1, FieldMemOperand(r0, JSArray::kPropertiesOffset));
+ __ str(r1, FieldMemOperand(r0, JSArray::kElementsOffset));
+ __ mov(r1, Operand(0));
+ __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
+ STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
+ __ Ret();
+
+ // Fall back to %AllocateInNewSpace.
+ __ bind(&allocate);
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ __ Push(Smi::FromInt(JSArray::kSize));
+ __ CallRuntime(Runtime::kAllocateInNewSpace);
+ }
+ __ jmp(&done_allocate);
+ }
+
+ __ bind(&rest_parameters);
+ {
+ // Compute the pointer to the first rest parameter (skippping the receiver).
+ __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ add(r2, r2,
+ Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
+
+ // ----------- S t a t e -------------
+ // -- cp : context
+ // -- r0 : number of rest parameters (tagged)
+ // -- r2 : pointer to first rest parameters
+ // -- lr : return address
+ // -----------------------------------
+
+ // Allocate space for the rest parameter array plus the backing store.
+ Label allocate, done_allocate;
+ __ mov(r1, Operand(JSArray::kSize + FixedArray::kHeaderSize));
+ __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ Allocate(r1, r3, r4, r5, &allocate, TAG_OBJECT);
+ __ bind(&done_allocate);
+
+ // Setup the elements array in r3.
+ __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
+ __ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
+ __ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
+ __ add(r4, r3, Operand(FixedArray::kHeaderSize));
+ {
+ Label loop, done_loop;
+ __ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ bind(&loop);
+ __ cmp(r4, r1);
+ __ b(eq, &done_loop);
+ __ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
+ __ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
+ __ add(r4, r4, Operand(1 * kPointerSize));
+ __ b(&loop);
+ __ bind(&done_loop);
+ }
+
+ // Setup the rest parameter array in r4.
+ __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
+ __ str(r1, FieldMemOperand(r4, JSArray::kMapOffset));
+ __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
+ __ str(r1, FieldMemOperand(r4, JSArray::kPropertiesOffset));
+ __ str(r3, FieldMemOperand(r4, JSArray::kElementsOffset));
+ __ str(r0, FieldMemOperand(r4, JSArray::kLengthOffset));
+ STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
+ __ mov(r0, r4);
+ __ Ret();
+
+ // Fall back to %AllocateInNewSpace.
+ __ bind(&allocate);
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(r1);
+ __ Push(r0, r2, r1);
+ __ CallRuntime(Runtime::kAllocateInNewSpace);
+ __ mov(r3, r0);
+ __ Pop(r0, r2);
+ }
+ __ jmp(&done_allocate);
+ }
+}
+
+
+void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r1 : function
+ // -- cp : context
+ // -- fp : frame pointer
+ // -- lr : return address
+ // -----------------------------------
+ __ AssertFunction(r1);
+
+ // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r2,
+ FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ add(r3, fp, Operand(r2, LSL, kPointerSizeLog2 - 1));
+ __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
+
+ // r1 : function
+ // r2 : number of parameters (tagged)
+ // r3 : parameters pointer
+ // Registers used over whole function:
+ // r5 : arguments count (tagged)
+ // r6 : mapped parameter count (tagged)
+
+ // Check if the calling frame is an arguments adaptor frame.
+ Label adaptor_frame, try_allocate, runtime;
+ __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
+ __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ b(eq, &adaptor_frame);
+
+ // No adaptor, parameter count = argument count.
+ __ mov(r5, r2);
+ __ mov(r6, r2);
+ __ b(&try_allocate);
+
+ // We have an adaptor frame. Patch the parameters pointer.
+ __ bind(&adaptor_frame);
+ __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ add(r4, r4, Operand(r5, LSL, 1));
+ __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
+
+ // r5 = argument count (tagged)
+ // r6 = parameter count (tagged)
+ // Compute the mapped parameter count = min(r6, r5) in r6.
+ __ mov(r6, r2);
+ __ cmp(r6, Operand(r5));
+ __ mov(r6, Operand(r5), LeaveCC, gt);
+
+ __ bind(&try_allocate);
+
+ // Compute the sizes of backing store, parameter map, and arguments object.
+ // 1. Parameter map, has 2 extra words containing context and backing store.
+ const int kParameterMapHeaderSize =
+ FixedArray::kHeaderSize + 2 * kPointerSize;
+ // If there are no mapped parameters, we do not need the parameter_map.
+ __ cmp(r6, Operand(Smi::FromInt(0)));
+ __ mov(r9, Operand::Zero(), LeaveCC, eq);
+ __ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
+ __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
+
+ // 2. Backing store.
+ __ add(r9, r9, Operand(r5, LSL, 1));
+ __ add(r9, r9, Operand(FixedArray::kHeaderSize));
+
+ // 3. Arguments object.
+ __ add(r9, r9, Operand(JSSloppyArgumentsObject::kSize));
+
+ // Do the allocation of all three objects in one go.
+ __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
+
+ // r0 = address of new object(s) (tagged)
+ // r2 = argument count (smi-tagged)
+ // Get the arguments boilerplate from the current native context into r4.
+ const int kNormalOffset =
+ Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
+ const int kAliasedOffset =
+ Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
+
+ __ ldr(r4, NativeContextMemOperand());
+ __ cmp(r6, Operand::Zero());
+ __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
+ __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
+
+ // r0 = address of new object (tagged)
+ // r2 = argument count (smi-tagged)
+ // r4 = address of arguments map (tagged)
+ // r6 = mapped parameter count (tagged)
+ __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
+ __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
+ __ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
+ __ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
+
+ // Set up the callee in-object property.
+ __ AssertNotSmi(r1);
+ __ str(r1, FieldMemOperand(r0, JSSloppyArgumentsObject::kCalleeOffset));
+
+ // Use the length (smi tagged) and set that as an in-object property too.
+ __ AssertSmi(r5);
+ __ str(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
+
+ // Set up the elements pointer in the allocated arguments object.
+ // If we allocated a parameter map, r4 will point there, otherwise
+ // it will point to the backing store.
+ __ add(r4, r0, Operand(JSSloppyArgumentsObject::kSize));
+ __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
+
+ // r0 = address of new object (tagged)
+ // r2 = argument count (tagged)
+ // r4 = address of parameter map or backing store (tagged)
+ // r6 = mapped parameter count (tagged)
+ // Initialize parameter map. If there are no mapped arguments, we're done.
+ Label skip_parameter_map;
+ __ cmp(r6, Operand(Smi::FromInt(0)));
+ // Move backing store address to r1, because it is
+ // expected there when filling in the unmapped arguments.
+ __ mov(r1, r4, LeaveCC, eq);
+ __ b(eq, &skip_parameter_map);
+
+ __ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
+ __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
+ __ add(r5, r6, Operand(Smi::FromInt(2)));
+ __ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
+ __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
+ __ add(r5, r4, Operand(r6, LSL, 1));
+ __ add(r5, r5, Operand(kParameterMapHeaderSize));
+ __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
+
+ // Copy the parameter slots and the holes in the arguments.
+ // We need to fill in mapped_parameter_count slots. They index the context,
+ // where parameters are stored in reverse order, at
+ // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
+ // The mapped parameter thus need to get indices
+ // MIN_CONTEXT_SLOTS+parameter_count-1 ..
+ // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
+ // We loop from right to left.
+ Label parameters_loop, parameters_test;
+ __ mov(r5, r6);
+ __ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
+ __ sub(r9, r9, Operand(r6));
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ add(r1, r4, Operand(r5, LSL, 1));
+ __ add(r1, r1, Operand(kParameterMapHeaderSize));
+
+ // r1 = address of backing store (tagged)
+ // r4 = address of parameter map (tagged), which is also the address of new
+ // object + Heap::kSloppyArgumentsObjectSize (tagged)
+ // r0 = temporary scratch (a.o., for address calculation)
+ // r5 = loop variable (tagged)
+ // ip = the hole value
+ __ jmp(¶meters_test);
+
+ __ bind(¶meters_loop);
+ __ sub(r5, r5, Operand(Smi::FromInt(1)));
+ __ mov(r0, Operand(r5, LSL, 1));
+ __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
+ __ str(r9, MemOperand(r4, r0));
+ __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
+ __ str(ip, MemOperand(r1, r0));
+ __ add(r9, r9, Operand(Smi::FromInt(1)));
+ __ bind(¶meters_test);
+ __ cmp(r5, Operand(Smi::FromInt(0)));
+ __ b(ne, ¶meters_loop);
+
+ // Restore r0 = new object (tagged) and r5 = argument count (tagged).
+ __ sub(r0, r4, Operand(JSSloppyArgumentsObject::kSize));
+ __ ldr(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
+
+ __ bind(&skip_parameter_map);
+ // r0 = address of new object (tagged)
+ // r1 = address of backing store (tagged)
+ // r5 = argument count (tagged)
+ // r6 = mapped parameter count (tagged)
+ // r9 = scratch
+ // Copy arguments header and remaining slots (if there are any).
+ __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
+ __ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
+ __ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
+
+ Label arguments_loop, arguments_test;
+ __ sub(r3, r3, Operand(r6, LSL, 1));
+ __ jmp(&arguments_test);
+
+ __ bind(&arguments_loop);
+ __ sub(r3, r3, Operand(kPointerSize));
+ __ ldr(r4, MemOperand(r3, 0));
+ __ add(r9, r1, Operand(r6, LSL, 1));
+ __ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
+ __ add(r6, r6, Operand(Smi::FromInt(1)));
+
+ __ bind(&arguments_test);
+ __ cmp(r6, Operand(r5));
+ __ b(lt, &arguments_loop);
+
+ // Return.
+ __ Ret();
+
+ // Do the runtime call to allocate the arguments object.
+ // r0 = address of new object (tagged)
+ // r5 = argument count (tagged)
+ __ bind(&runtime);
+ __ Push(r1, r3, r5);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
+}
+
+
+void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r1 : function
+ // -- cp : context
+ // -- fp : frame pointer
+ // -- lr : return address
+ // -----------------------------------
+ __ AssertFunction(r1);
+
+ // For Ignition we need to skip all possible handler/stub frames until
+ // we reach the JavaScript frame for the function (similar to what the
+ // runtime fallback implementation does). So make r2 point to that
+ // JavaScript frame.
+ {
+ Label loop, loop_entry;
+ __ mov(r2, fp);
+ __ b(&loop_entry);
+ __ bind(&loop);
+ __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
+ __ bind(&loop_entry);
+ __ ldr(ip, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
+ __ cmp(ip, r1);
+ __ b(ne, &loop);
+ }
+
+ // Check if we have an arguments adaptor frame below the function frame.
+ Label arguments_adaptor, arguments_done;
+ __ ldr(r3, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(ip, MemOperand(r3, StandardFrameConstants::kContextOffset));
+ __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ b(eq, &arguments_adaptor);
+ {
+ __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r0, FieldMemOperand(
+ r1, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ add(r2, r2,
+ Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
+ }
+ __ b(&arguments_done);
+ __ bind(&arguments_adaptor);
+ {
+ __ ldr(r0, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ add(r2, r3, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ add(r2, r2,
+ Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
+ }
+ __ bind(&arguments_done);
+
+ // ----------- S t a t e -------------
+ // -- cp : context
+ // -- r0 : number of rest parameters (tagged)
+ // -- r2 : pointer to first rest parameters
+ // -- lr : return address
+ // -----------------------------------
+
+ // Allocate space for the strict arguments object plus the backing store.
+ Label allocate, done_allocate;
+ __ mov(r1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
+ __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ Allocate(r1, r3, r4, r5, &allocate, TAG_OBJECT);
+ __ bind(&done_allocate);
+
+ // Setup the elements array in r3.
+ __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
+ __ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
+ __ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
+ __ add(r4, r3, Operand(FixedArray::kHeaderSize));
+ {
+ Label loop, done_loop;
+ __ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ bind(&loop);
+ __ cmp(r4, r1);
+ __ b(eq, &done_loop);
+ __ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
+ __ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
+ __ add(r4, r4, Operand(1 * kPointerSize));
+ __ b(&loop);
+ __ bind(&done_loop);
+ }
+
+ // Setup the strict arguments object in r4.
+ __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r1);
+ __ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kMapOffset));
+ __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
+ __ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kPropertiesOffset));
+ __ str(r3, FieldMemOperand(r4, JSStrictArgumentsObject::kElementsOffset));
+ __ str(r0, FieldMemOperand(r4, JSStrictArgumentsObject::kLengthOffset));
+ STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
+ __ mov(r0, r4);
+ __ Ret();
+
+ // Fall back to %AllocateInNewSpace.
+ __ bind(&allocate);
+ {
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(r1);
+ __ Push(r0, r2, r1);
+ __ CallRuntime(Runtime::kAllocateInNewSpace);
+ __ mov(r3, r0);
+ __ Pop(r0, r2);
+ }
+ __ b(&done_allocate);
+}
+
+
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context = cp;
Register result = r0;
@@ -5206,11 +5424,10 @@
__ jmp(&leave_exit_frame);
}
-
static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc,
bool return_first_arg,
- bool call_data_undefined) {
+ bool call_data_undefined, bool is_lazy) {
// ----------- S t a t e -------------
// -- r0 : callee
// -- r4 : call_data
@@ -5246,8 +5463,10 @@
// context save
__ push(context);
- // load context from callee
- __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
+ if (!is_lazy) {
+ // load context from callee
+ __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
+ }
// callee
__ push(callee);
@@ -5339,7 +5558,7 @@
void CallApiFunctionStub::Generate(MacroAssembler* masm) {
bool call_data_undefined = this->call_data_undefined();
CallApiFunctionStubHelper(masm, ParameterCount(r3), false,
- call_data_undefined);
+ call_data_undefined, false);
}
@@ -5347,41 +5566,47 @@
bool is_store = this->is_store();
int argc = this->argc();
bool call_data_undefined = this->call_data_undefined();
+ bool is_lazy = this->is_lazy();
CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
- call_data_undefined);
+ call_data_undefined, is_lazy);
}
void CallApiGetterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- sp[0] : name
- // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object
+ // -- sp[0] : name
+ // -- sp[4 .. (4 + kArgsLength*4)] : v8::PropertyCallbackInfo::args_
// -- ...
- // -- r2 : api_function_address
+ // -- r2 : api_function_address
// -----------------------------------
Register api_function_address = ApiGetterDescriptor::function_address();
DCHECK(api_function_address.is(r2));
- __ mov(r0, sp); // r0 = Handle<Name>
- __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = PCA
+ // v8::PropertyCallbackInfo::args_ array and name handle.
+ const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
+
+ // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
+ __ mov(r0, sp); // r0 = Handle<Name>
+ __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = v8::PCI::args_
const int kApiStackSpace = 1;
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ EnterExitFrame(false, kApiStackSpace);
- // Create PropertyAccessorInfo instance on the stack above the exit frame with
- // r1 (internal::Object** args_) as the data.
+ // Create v8::PropertyCallbackInfo object on the stack and initialize
+ // it's args_ field.
__ str(r1, MemOperand(sp, 1 * kPointerSize));
- __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
-
- const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
+ __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = v8::PropertyCallbackInfo&
ExternalReference thunk_ref =
ExternalReference::invoke_accessor_getter_callback(isolate());
+
+ // +3 is to skip prolog, return address and name handle.
+ MemOperand return_value_operand(
+ fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
- kStackUnwindSpace, NULL,
- MemOperand(fp, 6 * kPointerSize), NULL);
+ kStackUnwindSpace, NULL, return_value_operand, NULL);
}