Push version 1.3.9 to trunk.
Optimized stack guard checks on ARM.
Optimized API operations by inlining more in the API.
Optimized creation of objects from simple constructor functions.
Enabled a number of missing optimizations in the 64-bit port.
Implemented native-code support for regular expressions on ARM.
Stopped using the 'sahf' instruction on 64-bit machines that do not support it.
Fixed a bug in the support for forceful termination of JavaScript execution.
git-svn-id: http://v8.googlecode.com/svn/trunk@2811 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 0f79fba..ae45eab 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -46,6 +46,22 @@
}
+void MacroAssembler::LoadRoot(Register destination,
+ Heap::RootListIndex index) {
+ movq(destination, Operand(r13, index << kPointerSizeLog2));
+}
+
+
+void MacroAssembler::PushRoot(Heap::RootListIndex index) {
+ push(Operand(r13, index << kPointerSizeLog2));
+}
+
+
+void MacroAssembler::CompareRoot(Register with,
+ Heap::RootListIndex index) {
+ cmpq(with, Operand(r13, index << kPointerSizeLog2));
+}
+
static void RecordWriteHelper(MacroAssembler* masm,
Register object,
@@ -276,7 +292,7 @@
if (num_arguments > 0) {
addq(rsp, Immediate(num_arguments * kPointerSize));
}
- movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT);
+ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
}
@@ -634,7 +650,7 @@
// If the prototype or initial map is the hole, don't return it and
// simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system.
- Cmp(result, Factory::the_hole_value());
+ CompareRoot(result, Heap::kTheHoleValueRootIndex);
j(equal, miss);
// If the function does not have an initial map, we're done.
@@ -1000,12 +1016,6 @@
}
#endif
- // Reserve space for the Arguments object. The Windows 64-bit ABI
- // requires us to pass this structure as a pointer to its location on
- // the stack. We also need backing space for the pointer, even though
- // it is passed in a register.
- subq(rsp, Immediate(3 * kPointerSize));
-
// Get the required frame alignment for the OS.
static const int kFrameAlignment = OS::ActivationFrameAlignment();
if (kFrameAlignment > 0) {
@@ -1014,6 +1024,17 @@
and_(rsp, kScratchRegister);
}
+#ifdef _WIN64
+ // Reserve space for the Arguments object. The Windows 64-bit ABI
+ // requires us to pass this structure as a pointer to its location on
+ // the stack. The structure contains 2 pointers.
+ // The structure on the stack must be 16-byte aligned.
+ // We also need backing space for 4 parameters, even though
+ // we only pass one parameter, and it is in a register.
+ subq(rsp, Immediate(6 * kPointerSize));
+ ASSERT(kFrameAlignment == 2 * kPointerSize); // Change the padding if needed.
+#endif
+
// Patch the saved entry sp.
movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
}
@@ -1188,12 +1209,12 @@
// Preserve original value of holder_reg.
push(holder_reg);
movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
- Cmp(holder_reg, Factory::null_value());
+ CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Check(not_equal, "JSGlobalProxy::context() should not be null.");
// Read the first word and compare to global_context_map(),
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
- Cmp(holder_reg, Factory::global_context_map());
+ CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
Check(equal, "JSGlobalObject::global_context should be a global context.");
pop(holder_reg);
}
@@ -1210,4 +1231,156 @@
}
+void MacroAssembler::LoadAllocationTopHelper(
+ Register result,
+ Register result_end,
+ Register scratch,
+ bool result_contains_top_on_entry) {
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address();
+
+ // Just return if allocation top is already known.
+ if (result_contains_top_on_entry) {
+ // No use of scratch if allocation top is provided.
+ ASSERT(scratch.is(no_reg));
+ return;
+ }
+
+ // Move address of new object to result. Use scratch register if available.
+ if (scratch.is(no_reg)) {
+ movq(kScratchRegister, new_space_allocation_top);
+ movq(result, Operand(kScratchRegister, 0));
+ } else {
+ ASSERT(!scratch.is(result_end));
+ movq(scratch, new_space_allocation_top);
+ movq(result, Operand(scratch, 0));
+ }
+}
+
+
+void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
+ Register scratch) {
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address();
+
+ // Update new top.
+ if (result_end.is(rax)) {
+ // rax can be stored directly to a memory location.
+ store_rax(new_space_allocation_top);
+ } else {
+ // Register required - use scratch provided if available.
+ if (scratch.is(no_reg)) {
+ movq(kScratchRegister, new_space_allocation_top);
+ movq(Operand(kScratchRegister, 0), result_end);
+ } else {
+ movq(Operand(scratch, 0), result_end);
+ }
+ }
+}
+
+
+void MacroAssembler::AllocateObjectInNewSpace(
+ int object_size,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ bool result_contains_top_on_entry) {
+ ASSERT(!result.is(result_end));
+
+ // Load address of new object into result.
+ LoadAllocationTopHelper(result,
+ result_end,
+ scratch,
+ result_contains_top_on_entry);
+
+ // Calculate new top and bail out if new space is exhausted.
+ ExternalReference new_space_allocation_limit =
+ ExternalReference::new_space_allocation_limit_address();
+ lea(result_end, Operand(result, object_size));
+ movq(kScratchRegister, new_space_allocation_limit);
+ cmpq(result_end, Operand(kScratchRegister, 0));
+ j(above, gc_required);
+
+ // Update allocation top.
+ UpdateAllocationTopHelper(result_end, scratch);
+}
+
+
+void MacroAssembler::AllocateObjectInNewSpace(
+ int header_size,
+ ScaleFactor element_size,
+ Register element_count,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ bool result_contains_top_on_entry) {
+ ASSERT(!result.is(result_end));
+
+ // Load address of new object into result.
+ LoadAllocationTopHelper(result,
+ result_end,
+ scratch,
+ result_contains_top_on_entry);
+
+ // Calculate new top and bail out if new space is exhausted.
+ ExternalReference new_space_allocation_limit =
+ ExternalReference::new_space_allocation_limit_address();
+ lea(result_end, Operand(result, element_count, element_size, header_size));
+ movq(kScratchRegister, new_space_allocation_limit);
+ cmpq(result_end, Operand(kScratchRegister, 0));
+ j(above, gc_required);
+
+ // Update allocation top.
+ UpdateAllocationTopHelper(result_end, scratch);
+}
+
+
+void MacroAssembler::AllocateObjectInNewSpace(
+ Register object_size,
+ Register result,
+ Register result_end,
+ Register scratch,
+ Label* gc_required,
+ bool result_contains_top_on_entry) {
+
+ // Load address of new object into result.
+ LoadAllocationTopHelper(result,
+ result_end,
+ scratch,
+ result_contains_top_on_entry);
+
+
+ // Calculate new top and bail out if new space is exhausted.
+ ExternalReference new_space_allocation_limit =
+ ExternalReference::new_space_allocation_limit_address();
+ if (!object_size.is(result_end)) {
+ movq(result_end, object_size);
+ }
+ addq(result_end, result);
+ movq(kScratchRegister, new_space_allocation_limit);
+ cmpq(result_end, Operand(kScratchRegister, 0));
+ j(above, gc_required);
+
+ // Update allocation top.
+ UpdateAllocationTopHelper(result_end, scratch);
+}
+
+
+void MacroAssembler::UndoAllocationInNewSpace(Register object) {
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address();
+
+ // Make sure the object has no tag before resetting top.
+ and_(object, Immediate(~kHeapObjectTagMask));
+ movq(kScratchRegister, new_space_allocation_top);
+#ifdef DEBUG
+ cmpq(object, Operand(kScratchRegister, 0));
+ Check(below, "Undo allocation of non allocated memory");
+#endif
+ movq(Operand(kScratchRegister, 0), object);
+}
+
+
} } // namespace v8::internal