Update V8 to r6768 as required by WebKit r78450
Change-Id: Ib8868ff7147a76547a8d1d85f257ebe8546a3d3f
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index f95755d..56a2d6f 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -68,7 +68,9 @@
}
-void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
+void MacroAssembler::CompareRoot(const Operand& with,
+ Heap::RootListIndex index) {
+ ASSERT(!with.AddressUsesRegister(kScratchRegister));
LoadRoot(kScratchRegister, index);
cmpq(with, kScratchRegister);
}
@@ -375,6 +377,16 @@
}
+void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
+ Runtime::Function* function = Runtime::FunctionForId(id);
+ Set(rax, function->nargs);
+ movq(rbx, ExternalReference(function));
+ CEntryStub ces(1);
+ ces.SaveDoubles();
+ CallStub(&ces);
+}
+
+
MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
int num_arguments) {
return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
@@ -885,6 +897,13 @@
}
+Condition MacroAssembler::CheckSmi(const Operand& src) {
+ ASSERT_EQ(0, kSmiTag);
+ testb(src, Immediate(kSmiTagMask));
+ return zero;
+}
+
+
Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
ASSERT_EQ(0, kSmiTag);
// Make mask 0x8000000000000001 and test that both bits are zero.
@@ -960,6 +979,27 @@
}
+void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
+ if (dst.is(src)) {
+ andl(dst, Immediate(kSmiTagMask));
+ } else {
+ movl(dst, Immediate(kSmiTagMask));
+ andl(dst, src);
+ }
+}
+
+
+void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
+ if (!(src.AddressUsesRegister(dst))) {
+ movl(dst, Immediate(kSmiTagMask));
+ andl(dst, src);
+ } else {
+ movl(dst, src);
+ andl(dst, Immediate(kSmiTagMask));
+ }
+}
+
+
void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
if (constant->value() == 0) {
if (!dst.is(src)) {
@@ -1386,6 +1426,68 @@
}
+void MacroAssembler::Pushad() {
+ push(rax);
+ push(rcx);
+ push(rdx);
+ push(rbx);
+ // Not pushing rsp or rbp.
+ push(rsi);
+ push(rdi);
+ push(r8);
+ push(r9);
+ // r10 is kScratchRegister.
+ push(r11);
+ push(r12);
+ // r13 is kRootRegister.
+ push(r14);
+ // r15 is kSmiConstantRegister
+}
+
+
+void MacroAssembler::Popad() {
+ pop(r14);
+ pop(r12);
+ pop(r11);
+ pop(r9);
+ pop(r8);
+ pop(rdi);
+ pop(rsi);
+ pop(rbx);
+ pop(rdx);
+ pop(rcx);
+ pop(rax);
+}
+
+
+void MacroAssembler::Dropad() {
+ const int kRegistersPushedByPushad = 11;
+ addq(rsp, Immediate(kRegistersPushedByPushad * kPointerSize));
+}
+
+
+// Order general registers are pushed by Pushad:
+// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
+int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
+ 0,
+ 1,
+ 2,
+ 3,
+ -1,
+ -1,
+ 4,
+ 5,
+ 6,
+ 7,
+ -1,
+ 8,
+ 9,
+ -1,
+ 10,
+ -1
+};
+
+
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.
@@ -1439,6 +1541,18 @@
}
+void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
+ if (is_uint16(bytes_dropped)) {
+ ret(bytes_dropped);
+ } else {
+ pop(scratch);
+ addq(rsp, Immediate(bytes_dropped));
+ push(scratch);
+ ret(0);
+ }
+}
+
+
void MacroAssembler::FCmp() {
fucomip();
fstp(0);
@@ -1670,10 +1784,18 @@
Move(rdi, Handle<JSFunction>(function));
movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- // Invoke the cached code.
- Handle<Code> code(function->code());
- ParameterCount expected(function->shared()->formal_parameter_count());
- InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
+ if (V8::UseCrankshaft()) {
+ // Since Crankshaft can recompile a function, we need to load
+ // the Code object every time we call the function.
+ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ InvokeCode(rdx, expected, actual, flag);
+ } else {
+ // Invoke the cached code.
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
+ }
}
@@ -1734,12 +1856,24 @@
}
-void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
+void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
+ bool save_doubles) {
#ifdef _WIN64
- const int kShaddowSpace = 4;
- arg_stack_space += kShaddowSpace;
+ const int kShadowSpace = 4;
+ arg_stack_space += kShadowSpace;
#endif
- if (arg_stack_space > 0) {
+ // Optionally save all XMM registers.
+ if (save_doubles) {
+ CpuFeatures::Scope scope(SSE2);
+ int space = XMMRegister::kNumRegisters * kDoubleSize +
+ arg_stack_space * kPointerSize;
+ subq(rsp, Immediate(space));
+ int offset = -2 * kPointerSize;
+ for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
+ XMMRegister reg = XMMRegister::FromAllocationIndex(i);
+ movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
+ }
+ } else if (arg_stack_space > 0) {
subq(rsp, Immediate(arg_stack_space * kPointerSize));
}
@@ -1756,7 +1890,7 @@
}
-void MacroAssembler::EnterExitFrame(int arg_stack_space) {
+void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
EnterExitFramePrologue(true);
// Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
@@ -1764,25 +1898,31 @@
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r12, Operand(rbp, r14, times_pointer_size, offset));
- EnterExitFrameEpilogue(arg_stack_space);
+ EnterExitFrameEpilogue(arg_stack_space, save_doubles);
}
void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
EnterExitFramePrologue(false);
- EnterExitFrameEpilogue(arg_stack_space);
+ EnterExitFrameEpilogue(arg_stack_space, false);
}
-void MacroAssembler::LeaveExitFrame() {
+void MacroAssembler::LeaveExitFrame(bool save_doubles) {
// Registers:
// r12 : argv
-
+ if (save_doubles) {
+ int offset = -2 * kPointerSize;
+ for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
+ XMMRegister reg = XMMRegister::FromAllocationIndex(i);
+ movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
+ }
+ }
// Get the return address from the stack and restore the frame pointer.
movq(rcx, Operand(rbp, 1 * kPointerSize));
movq(rbp, Operand(rbp, 0 * kPointerSize));
- // Pop everything up to and including the arguments and the receiver
+ // Drop everything up to and including the arguments and the receiver
// from the caller stack.
lea(rsp, Operand(r12, 1 * kPointerSize));
@@ -1970,11 +2110,11 @@
Register top_reg = result_end.is_valid() ? result_end : result;
- if (top_reg.is(result)) {
- addq(top_reg, Immediate(object_size));
- } else {
- lea(top_reg, Operand(result, object_size));
+ if (!top_reg.is(result)) {
+ movq(top_reg, result);
}
+ addq(top_reg, Immediate(object_size));
+ j(carry, gc_required);
movq(kScratchRegister, new_space_allocation_limit);
cmpq(top_reg, Operand(kScratchRegister, 0));
j(above, gc_required);
@@ -2024,7 +2164,12 @@
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- lea(result_end, Operand(result, element_count, element_size, header_size));
+
+ // We assume that element_count*element_size + header_size does not
+ // overflow.
+ lea(result_end, Operand(element_count, element_size, header_size));
+ addq(result_end, result);
+ j(carry, gc_required);
movq(kScratchRegister, new_space_allocation_limit);
cmpq(result_end, Operand(kScratchRegister, 0));
j(above, gc_required);
@@ -2070,6 +2215,7 @@
movq(result_end, object_size);
}
addq(result_end, result);
+ j(carry, gc_required);
movq(kScratchRegister, new_space_allocation_limit);
cmpq(result_end, Operand(kScratchRegister, 0));
j(above, gc_required);