Upgrade V8 to 5.1.281.57 DO NOT MERGE
FPIIM-449
Change-Id: Id981b686b4d587ac31697662eb98bb34be42ad90
(cherry picked from commit 3b9bc31999c9787eb726ecdbfd5796bfdec32a18)
diff --git a/src/crankshaft/x64/lithium-codegen-x64.cc b/src/crankshaft/x64/lithium-codegen-x64.cc
index 849b4b3..fbda59b 100644
--- a/src/crankshaft/x64/lithium-codegen-x64.cc
+++ b/src/crankshaft/x64/lithium-codegen-x64.cc
@@ -125,7 +125,7 @@
DCHECK(!frame_is_built_);
frame_is_built_ = true;
if (info()->IsStub()) {
- __ StubPrologue();
+ __ StubPrologue(StackFrame::STUB);
} else {
__ Prologue(info()->GeneratePreagedPrologue());
}
@@ -306,34 +306,27 @@
if (needs_frame.is_linked()) {
__ bind(&needs_frame);
/* stack layout
- 4: return address <-- rsp
- 3: garbage
+ 3: return address <-- rsp
2: garbage
1: garbage
0: garbage
*/
- // Reserve space for context and stub marker.
- __ subp(rsp, Immediate(2 * kPointerSize));
- __ Push(MemOperand(rsp, 2 * kPointerSize)); // Copy return address.
- __ Push(kScratchRegister); // Save entry address for ret(0)
+ // Reserve space for stub marker.
+ __ subp(rsp, Immediate(TypedFrameConstants::kFrameTypeSize));
+ __ Push(MemOperand(
+ rsp, TypedFrameConstants::kFrameTypeSize)); // Copy return address.
+ __ Push(kScratchRegister);
/* stack layout
- 4: return address
- 3: garbage
+ 3: return address
2: garbage
1: return address
0: entry address <-- rsp
*/
- // Remember context pointer.
- __ movp(kScratchRegister,
- MemOperand(rbp, StandardFrameConstants::kContextOffset));
- // Save context pointer into the stack frame.
- __ movp(MemOperand(rsp, 3 * kPointerSize), kScratchRegister);
-
// Create a stack frame.
- __ movp(MemOperand(rsp, 4 * kPointerSize), rbp);
- __ leap(rbp, MemOperand(rsp, 4 * kPointerSize));
+ __ movp(MemOperand(rsp, 3 * kPointerSize), rbp);
+ __ leap(rbp, MemOperand(rsp, 3 * kPointerSize));
// This variant of deopt can only be used with stubs. Since we don't
// have a function pointer to install in the stack frame that we're
@@ -342,8 +335,7 @@
__ Move(MemOperand(rsp, 2 * kPointerSize), Smi::FromInt(StackFrame::STUB));
/* stack layout
- 4: old rbp
- 3: context pointer
+ 3: old rbp
2: stub marker
1: return address
0: entry address <-- rsp
@@ -379,9 +371,8 @@
frame_is_built_ = true;
// Build the frame in such a way that esi isn't trashed.
__ pushq(rbp); // Caller's frame pointer.
- __ Push(Operand(rbp, StandardFrameConstants::kContextOffset));
__ Push(Smi::FromInt(StackFrame::STUB));
- __ leap(rbp, Operand(rsp, 2 * kPointerSize));
+ __ leap(rbp, Operand(rsp, TypedFrameConstants::kFixedFrameSizeFromFp));
Comment(";;; Deferred code");
}
code->Generate();
@@ -2012,16 +2003,17 @@
__ cmpp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
EmitBranch(instr, not_equal);
} else {
- ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
+ ToBooleanICStub::Types expected =
+ instr->hydrogen()->expected_input_types();
// Avoid deopts in the case where we've never executed this path before.
- if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
+ if (expected.IsEmpty()) expected = ToBooleanICStub::Types::Generic();
- if (expected.Contains(ToBooleanStub::UNDEFINED)) {
+ if (expected.Contains(ToBooleanICStub::UNDEFINED)) {
// undefined -> false.
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
__ j(equal, instr->FalseLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ if (expected.Contains(ToBooleanICStub::BOOLEAN)) {
// true -> true.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
__ j(equal, instr->TrueLabel(chunk_));
@@ -2029,13 +2021,13 @@
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, instr->FalseLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
+ if (expected.Contains(ToBooleanICStub::NULL_TYPE)) {
// 'null' -> false.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
__ j(equal, instr->FalseLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::SMI)) {
+ if (expected.Contains(ToBooleanICStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ Cmp(reg, Smi::FromInt(0));
__ j(equal, instr->FalseLabel(chunk_));
@@ -2058,13 +2050,13 @@
}
}
- if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
+ if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) {
// spec object -> true.
__ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
__ j(above_equal, instr->TrueLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::STRING)) {
+ if (expected.Contains(ToBooleanICStub::STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
@@ -2075,19 +2067,19 @@
__ bind(¬_string);
}
- if (expected.Contains(ToBooleanStub::SYMBOL)) {
+ if (expected.Contains(ToBooleanICStub::SYMBOL)) {
// Symbol value -> true.
__ CmpInstanceType(map, SYMBOL_TYPE);
__ j(equal, instr->TrueLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::SIMD_VALUE)) {
+ if (expected.Contains(ToBooleanICStub::SIMD_VALUE)) {
// SIMD value -> true.
__ CmpInstanceType(map, SIMD128_VALUE_TYPE);
__ j(equal, instr->TrueLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
+ if (expected.Contains(ToBooleanICStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
@@ -2317,11 +2309,10 @@
DCHECK(ToRegister(instr->left()).is(rdx));
DCHECK(ToRegister(instr->right()).is(rax));
- Handle<Code> code = CodeFactory::StringCompare(isolate()).code();
+ Handle<Code> code = CodeFactory::StringCompare(isolate(), instr->op()).code();
CallCode(code, RelocInfo::CODE_TARGET, instr);
- __ testp(rax, rax);
-
- EmitBranch(instr, TokenToCondition(instr->op(), false));
+ __ CompareRoot(rax, Heap::kTrueValueRootIndex);
+ EmitBranch(instr, equal);
}
@@ -2393,11 +2384,12 @@
__ JumpIfSmi(input, is_false);
- __ CmpObjectType(input, JS_FUNCTION_TYPE, temp);
+ __ CmpObjectType(input, FIRST_FUNCTION_TYPE, temp);
+ STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
if (String::Equals(isolate()->factory()->Function_string(), class_name)) {
- __ j(equal, is_true);
+ __ j(above_equal, is_true);
} else {
- __ j(equal, is_false);
+ __ j(above_equal, is_false);
}
// Check if the constructor in the map is a function.
@@ -3010,11 +3002,11 @@
if (instr->hydrogen()->from_inlined()) {
__ leap(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize));
- } else {
+ } else if (instr->hydrogen()->arguments_adaptor()) {
// Check for arguments adapter frame.
Label done, adapted;
__ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
+ __ Cmp(Operand(result, CommonFrameConstants::kContextOrFrameTypeOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adapted, Label::kNear);
@@ -3029,6 +3021,8 @@
// Result is the frame pointer for the frame if not adapted and for the real
// frame below the adaptor frame if adapted.
__ bind(&done);
+ } else {
+ __ movp(result, rbp);
}
}
@@ -3140,13 +3134,24 @@
// Invoke the function.
__ bind(&invoke);
+
+ InvokeFlag flag = CALL_FUNCTION;
+ if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) {
+ DCHECK(!info()->saves_caller_doubles());
+ // TODO(ishell): drop current frame before pushing arguments to the stack.
+ flag = JUMP_FUNCTION;
+ ParameterCount actual(rax);
+ // It is safe to use rbx, rcx and r8 as scratch registers here given that
+ // 1) we are not going to return to caller function anyway,
+ // 2) rbx (expected number of arguments) will be initialized below.
+ PrepareForTailCall(actual, rbx, rcx, r8);
+ }
+
DCHECK(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
- SafepointGenerator safepoint_generator(
- this, pointers, Safepoint::kLazyDeopt);
+ SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
ParameterCount actual(rax);
- __ InvokeFunction(function, no_reg, actual, CALL_FUNCTION,
- safepoint_generator);
+ __ InvokeFunction(function, no_reg, actual, flag, safepoint_generator);
}
@@ -3185,10 +3190,9 @@
CallRuntime(Runtime::kDeclareGlobals, instr);
}
-
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int formal_parameter_count, int arity,
- LInstruction* instr) {
+ bool is_tail_call, LInstruction* instr) {
bool dont_adapt_arguments =
formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
bool can_invoke_directly =
@@ -3205,23 +3209,36 @@
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
__ Set(rax, arity);
+ bool is_self_call = function.is_identical_to(info()->closure());
+
// Invoke function.
- if (function.is_identical_to(info()->closure())) {
- __ CallSelf();
+ if (is_self_call) {
+ Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location()));
+ if (is_tail_call) {
+ __ Jump(self, RelocInfo::CODE_TARGET);
+ } else {
+ __ Call(self, RelocInfo::CODE_TARGET);
+ }
} else {
- __ Call(FieldOperand(function_reg, JSFunction::kCodeEntryOffset));
+ Operand target = FieldOperand(function_reg, JSFunction::kCodeEntryOffset);
+ if (is_tail_call) {
+ __ Jump(target);
+ } else {
+ __ Call(target);
+ }
}
- // Set up deoptimization.
- RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
+ if (!is_tail_call) {
+ // Set up deoptimization.
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
+ }
} else {
// We need to adapt arguments.
- SafepointGenerator generator(
- this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(arity);
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount actual(arity);
ParameterCount expected(formal_parameter_count);
- __ InvokeFunction(function_reg, no_reg, expected, count, CALL_FUNCTION,
- generator);
+ InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
+ __ InvokeFunction(function_reg, no_reg, expected, actual, flag, generator);
}
}
@@ -3263,39 +3280,6 @@
}
-void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
- DCHECK(ToRegister(instr->function()).is(rdi));
- DCHECK(ToRegister(instr->result()).is(rax));
-
- // Change context.
- __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-
- // Always initialize new target and number of actual arguments.
- __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
- __ Set(rax, instr->arity());
-
- LPointerMap* pointers = instr->pointer_map();
- SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
-
- bool is_self_call = false;
- if (instr->hydrogen()->function()->IsConstant()) {
- Handle<JSFunction> jsfun = Handle<JSFunction>::null();
- HConstant* fun_const = HConstant::cast(instr->hydrogen()->function());
- jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate()));
- is_self_call = jsfun.is_identical_to(info()->closure());
- }
-
- if (is_self_call) {
- __ CallSelf();
- } else {
- Operand target = FieldOperand(rdi, JSFunction::kCodeEntryOffset);
- generator.BeforeCall(__ CallSize(target));
- __ Call(target);
- }
- generator.AfterCall();
-}
-
-
void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
Register input_reg = ToRegister(instr->value());
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
@@ -3401,8 +3385,14 @@
}
}
+void LCodeGen::DoMathFloorD(LMathFloorD* instr) {
+ XMMRegister output_reg = ToDoubleRegister(instr->result());
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
+ CpuFeatureScope scope(masm(), SSE4_1);
+ __ Roundsd(output_reg, input_reg, kRoundDown);
+}
-void LCodeGen::DoMathFloor(LMathFloor* instr) {
+void LCodeGen::DoMathFloorI(LMathFloorI* instr) {
XMMRegister xmm_scratch = double_scratch0();
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->value());
@@ -3460,8 +3450,23 @@
}
}
+void LCodeGen::DoMathRoundD(LMathRoundD* instr) {
+ XMMRegister xmm_scratch = double_scratch0();
+ XMMRegister output_reg = ToDoubleRegister(instr->result());
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
+ CpuFeatureScope scope(masm(), SSE4_1);
+ Label done;
+ __ Roundsd(output_reg, input_reg, kRoundUp);
+ __ Move(xmm_scratch, -0.5);
+ __ Addsd(xmm_scratch, output_reg);
+ __ Ucomisd(xmm_scratch, input_reg);
+ __ j(below_equal, &done, Label::kNear);
+ __ Move(xmm_scratch, 1.0);
+ __ Subsd(output_reg, xmm_scratch);
+ __ bind(&done);
+}
-void LCodeGen::DoMathRound(LMathRound* instr) {
+void LCodeGen::DoMathRoundI(LMathRoundI* instr) {
const XMMRegister xmm_scratch = double_scratch0();
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->value());
@@ -3654,54 +3659,77 @@
__ Lzcntl(result, input);
}
+void LCodeGen::PrepareForTailCall(const ParameterCount& actual,
+ Register scratch1, Register scratch2,
+ Register scratch3) {
+#if DEBUG
+ if (actual.is_reg()) {
+ DCHECK(!AreAliased(actual.reg(), scratch1, scratch2, scratch3));
+ } else {
+ DCHECK(!AreAliased(scratch1, scratch2, scratch3));
+ }
+#endif
+ if (FLAG_code_comments) {
+ if (actual.is_reg()) {
+ Comment(";;; PrepareForTailCall, actual: %s {", actual.reg().ToString());
+ } else {
+ Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate());
+ }
+ }
+
+ // Check if next frame is an arguments adaptor frame.
+ Register caller_args_count_reg = scratch1;
+ Label no_arguments_adaptor, formal_parameter_count_loaded;
+ __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ Cmp(Operand(scratch2, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(not_equal, &no_arguments_adaptor, Label::kNear);
+
+ // Drop current frame and load arguments count from arguments adaptor frame.
+ __ movp(rbp, scratch2);
+ __ SmiToInteger32(
+ caller_args_count_reg,
+ Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ jmp(&formal_parameter_count_loaded, Label::kNear);
+
+ __ bind(&no_arguments_adaptor);
+ // Load caller's formal parameter count.
+ __ movp(caller_args_count_reg,
+ Immediate(info()->literal()->parameter_count()));
+
+ __ bind(&formal_parameter_count_loaded);
+ __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3,
+ ReturnAddressState::kNotOnStack);
+ Comment(";;; }");
+}
void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
+ HInvokeFunction* hinstr = instr->hydrogen();
DCHECK(ToRegister(instr->context()).is(rsi));
DCHECK(ToRegister(instr->function()).is(rdi));
DCHECK(instr->HasPointerMap());
- Handle<JSFunction> known_function = instr->hydrogen()->known_function();
+ bool is_tail_call = hinstr->tail_call_mode() == TailCallMode::kAllow;
+
+ if (is_tail_call) {
+ DCHECK(!info()->saves_caller_doubles());
+ ParameterCount actual(instr->arity());
+ // It is safe to use rbx, rcx and r8 as scratch registers here given that
+ // 1) we are not going to return to caller function anyway,
+ // 2) rbx (expected number of arguments) will be initialized below.
+ PrepareForTailCall(actual, rbx, rcx, r8);
+ }
+
+ Handle<JSFunction> known_function = hinstr->known_function();
if (known_function.is_null()) {
LPointerMap* pointers = instr->pointer_map();
SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(instr->arity());
- __ InvokeFunction(rdi, no_reg, count, CALL_FUNCTION, generator);
+ ParameterCount actual(instr->arity());
+ InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
+ __ InvokeFunction(rdi, no_reg, actual, flag, generator);
} else {
- CallKnownFunction(known_function,
- instr->hydrogen()->formal_parameter_count(),
- instr->arity(), instr);
- }
-}
-
-
-void LCodeGen::DoCallFunction(LCallFunction* instr) {
- HCallFunction* hinstr = instr->hydrogen();
- DCHECK(ToRegister(instr->context()).is(rsi));
- DCHECK(ToRegister(instr->function()).is(rdi));
- DCHECK(ToRegister(instr->result()).is(rax));
-
- int arity = instr->arity();
-
- ConvertReceiverMode mode = hinstr->convert_mode();
- if (hinstr->HasVectorAndSlot()) {
- Register slot_register = ToRegister(instr->temp_slot());
- Register vector_register = ToRegister(instr->temp_vector());
- DCHECK(slot_register.is(rdx));
- DCHECK(vector_register.is(rbx));
-
- AllowDeferredHandleDereference vector_structure_check;
- Handle<TypeFeedbackVector> vector = hinstr->feedback_vector();
- int index = vector->GetIndex(hinstr->slot());
-
- __ Move(vector_register, vector);
- __ Move(slot_register, Smi::FromInt(index));
-
- Handle<Code> ic =
- CodeFactory::CallICInOptimizedCode(isolate(), arity, mode).code();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- __ Set(rax, arity);
- CallCode(isolate()->builtins()->Call(mode), RelocInfo::CODE_TARGET, instr);
+ CallKnownFunction(known_function, hinstr->formal_parameter_count(),
+ instr->arity(), is_tail_call, instr);
}
}
@@ -5207,13 +5235,6 @@
}
-void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
- DCHECK(ToRegister(instr->value()).is(rax));
- __ Push(rax);
- CallRuntime(Runtime::kToFastProperties, 1, instr);
-}
-
-
void LCodeGen::DoTypeof(LTypeof* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
DCHECK(ToRegister(instr->value()).is(rbx));
@@ -5573,13 +5594,6 @@
__ bind(&done);
}
-
-void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
- Register context = ToRegister(instr->context());
- __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), context);
-}
-
-
#undef __
} // namespace internal