Merge V8 5.2.361.47 DO NOT MERGE
https://chromium.googlesource.com/v8/v8/+/5.2.361.47
FPIIM-449
Change-Id: Ibec421b85a9b88cb3a432ada642e469fe7e78346
(cherry picked from commit bcf72ee8e3b26f1d0726869c7ddb3921c68b09a8)
diff --git a/src/crankshaft/arm64/lithium-codegen-arm64.cc b/src/crankshaft/arm64/lithium-codegen-arm64.cc
index 9bbc8b8..ebc5277 100644
--- a/src/crankshaft/arm64/lithium-codegen-arm64.cc
+++ b/src/crankshaft/arm64/lithium-codegen-arm64.cc
@@ -599,7 +599,7 @@
Comment(";;; Prologue begin");
// Allocate a local context if needed.
- if (info()->num_heap_slots() > 0) {
+ if (info()->scope()->num_heap_slots() > 0) {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is in x1.
@@ -775,8 +775,6 @@
// table.
__ Bl(&call_deopt_entry);
}
- info()->LogDeoptCallPosition(masm()->pc_offset(),
- table_entry->deopt_info.inlining_id);
masm()->CheckConstPool(false, false);
}
@@ -892,7 +890,7 @@
__ Bind(&dont_trap);
}
- Deoptimizer::DeoptInfo deopt_info = MakeDeoptInfo(instr, deopt_reason);
+ Deoptimizer::DeoptInfo deopt_info = MakeDeoptInfo(instr, deopt_reason, id);
DCHECK(info()->IsStub() || frame_is_built_);
// Go through jump table if we need to build frame, or restore caller doubles.
@@ -900,7 +898,6 @@
frame_is_built_ && !info()->saves_caller_doubles()) {
DeoptComment(deopt_info);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
- info()->LogDeoptCallPosition(masm()->pc_offset(), deopt_info.inlining_id);
} else {
Deoptimizer::JumpTableEntry* table_entry =
new (zone()) Deoptimizer::JumpTableEntry(
@@ -1416,7 +1413,7 @@
Register temp2 = ToRegister(instr->temp2());
// Allocate memory for the object.
- AllocationFlags flags = TAG_OBJECT;
+ AllocationFlags flags = NO_ALLOCATION_FLAGS;
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
@@ -1426,6 +1423,11 @@
flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
+ if (instr->hydrogen()->IsAllocationFoldingDominator()) {
+ flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR);
+ }
+ DCHECK(!instr->hydrogen()->IsAllocationFolded());
+
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
CHECK(size <= Page::kMaxRegularHeapObjectSize);
@@ -1487,6 +1489,49 @@
CallRuntimeFromDeferred(
Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
__ StoreToSafepointRegisterSlot(x0, ToRegister(instr->result()));
+
+ if (instr->hydrogen()->IsAllocationFoldingDominator()) {
+ AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS;
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
+ DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+ allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE);
+ }
+ // If the allocation folding dominator allocate triggered a GC, allocation
+ // happend in the runtime. We have to reset the top pointer to virtually
+ // undo the allocation.
+ ExternalReference allocation_top =
+ AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
+ Register top_address = x10;
+ __ Sub(x0, x0, Operand(kHeapObjectTag));
+ __ Mov(top_address, Operand(allocation_top));
+ __ Str(x0, MemOperand(top_address));
+ __ Add(x0, x0, Operand(kHeapObjectTag));
+ }
+}
+
+void LCodeGen::DoFastAllocate(LFastAllocate* instr) {
+ DCHECK(instr->hydrogen()->IsAllocationFolded());
+ DCHECK(!instr->hydrogen()->IsAllocationFoldingDominator());
+ Register result = ToRegister(instr->result());
+ Register scratch1 = ToRegister(instr->temp1());
+ Register scratch2 = ToRegister(instr->temp2());
+
+ AllocationFlags flags = ALLOCATION_FOLDED;
+ if (instr->hydrogen()->MustAllocateDoubleAligned()) {
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
+ }
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
+ DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
+ }
+ if (instr->size()->IsConstantOperand()) {
+ int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
+ CHECK(size <= Page::kMaxRegularHeapObjectSize);
+ __ FastAllocate(size, result, scratch1, scratch2, flags);
+ } else {
+ Register size = ToRegister(instr->size());
+ __ FastAllocate(size, result, scratch1, scratch2, flags);
+ }
}
@@ -2758,16 +2803,6 @@
}
-void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
- DCHECK(ToRegister(instr->context()).is(cp));
- DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
- DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
- DCHECK(ToRegister(instr->result()).is(x0));
- InstanceOfStub stub(isolate());
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
-}
-
-
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());