Merge V8 5.3.332.45.  DO NOT MERGE

Test: Manual

FPIIM-449

Change-Id: Id3254828b068abdea3cb10442e0172a8c9a98e03
(cherry picked from commit 13e2dadd00298019ed862f2b2fc5068bba730bcf)
diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h
index f9c9235..d6c509e 100644
--- a/src/heap/heap-inl.h
+++ b/src/heap/heap-inl.h
@@ -393,14 +393,30 @@
   return OldGenerationSpaceAvailable() < 0;
 }
 
-
+template <PromotionMode promotion_mode>
 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
   Page* page = Page::FromAddress(old_address);
   Address age_mark = new_space_.age_mark();
+
+  if (promotion_mode == PROMOTE_MARKED) {
+    MarkBit mark_bit = Marking::MarkBitFrom(old_address);
+    if (!Marking::IsWhite(mark_bit)) {
+      return true;
+    }
+  }
+
   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
          (!page->ContainsLimit(age_mark) || old_address < age_mark);
 }
 
+PromotionMode Heap::CurrentPromotionMode() {
+  if (incremental_marking()->IsMarking()) {
+    return PROMOTE_MARKED;
+  } else {
+    return DEFAULT_PROMOTION;
+  }
+}
+
 void Heap::RecordWrite(Object* object, int offset, Object* o) {
   if (!InNewSpace(o) || !object->IsHeapObject() || InNewSpace(object)) {
     return;
@@ -460,6 +476,31 @@
             static_cast<size_t>(byte_size / kPointerSize));
 }
 
+bool Heap::PurgeLeftTrimmedObject(Object** object) {
+  HeapObject* current = reinterpret_cast<HeapObject*>(*object);
+  const MapWord map_word = current->map_word();
+  if (current->IsFiller() && !map_word.IsForwardingAddress()) {
+#ifdef DEBUG
+    // We need to find a FixedArrayBase map after walking the fillers.
+    while (current->IsFiller()) {
+      Address next = reinterpret_cast<Address>(current);
+      if (current->map() == one_pointer_filler_map()) {
+        next += kPointerSize;
+      } else if (current->map() == two_pointer_filler_map()) {
+        next += 2 * kPointerSize;
+      } else {
+        next += current->Size();
+      }
+      current = reinterpret_cast<HeapObject*>(next);
+    }
+    DCHECK(current->IsFixedArrayBase());
+#endif  // DEBUG
+    *object = nullptr;
+    return true;
+  }
+  return false;
+}
+
 template <Heap::FindMementoMode mode>
 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
   // Check if there is potentially a memento behind the object. If
@@ -510,7 +551,7 @@
 
 template <Heap::UpdateAllocationSiteMode mode>
 void Heap::UpdateAllocationSite(HeapObject* object,
-                                HashMap* pretenuring_feedback) {
+                                base::HashMap* pretenuring_feedback) {
   DCHECK(InFromSpace(object));
   if (!FLAG_allocation_site_pretenuring ||
       !AllocationSite::CanTrack(object->map()->instance_type()))
@@ -538,7 +579,7 @@
     // to dereference the allocation site and rather have to postpone all checks
     // till actually merging the data.
     Address key = memento_candidate->GetAllocationSiteUnchecked();
-    HashMap::Entry* e =
+    base::HashMap::Entry* e =
         pretenuring_feedback->LookupOrInsert(key, ObjectHash(key));
     DCHECK(e != nullptr);
     (*bit_cast<intptr_t*>(&e->value))++;
@@ -596,12 +637,12 @@
   for (int i = 0; i < new_space_strings_.length(); ++i) {
     Object* obj = Object::cast(new_space_strings_[i]);
     DCHECK(heap_->InNewSpace(obj));
-    DCHECK(obj != heap_->the_hole_value());
+    DCHECK(!obj->IsTheHole(heap_->isolate()));
   }
   for (int i = 0; i < old_space_strings_.length(); ++i) {
     Object* obj = Object::cast(old_space_strings_[i]);
     DCHECK(!heap_->InNewSpace(obj));
-    DCHECK(obj != heap_->the_hole_value());
+    DCHECK(!obj->IsTheHole(heap_->isolate()));
   }
 #endif
 }
@@ -710,6 +751,17 @@
   set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
 }
 
+int Heap::GetNextTemplateSerialNumber() {
+  int next_serial_number = next_template_serial_number()->value() + 1;
+  set_next_template_serial_number(Smi::FromInt(next_serial_number));
+  return next_serial_number;
+}
+
+void Heap::SetSerializedTemplates(FixedArray* templates) {
+  DCHECK_EQ(empty_fixed_array(), serialized_templates());
+  set_serialized_templates(templates);
+}
+
 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
     : heap_(isolate->heap()) {
   heap_->always_allocate_scope_count_.Increment(1);