Merge V8 5.3.332.45.  DO NOT MERGE

Test: Manual

FPIIM-449

Change-Id: Id3254828b068abdea3cb10442e0172a8c9a98e03
(cherry picked from commit 13e2dadd00298019ed862f2b2fc5068bba730bcf)
diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc
index c250b90..f578d43 100644
--- a/src/heap/incremental-marking.cc
+++ b/src/heap/incremental-marking.cc
@@ -10,8 +10,9 @@
 #include "src/heap/gc-idle-time-handler.h"
 #include "src/heap/gc-tracer.h"
 #include "src/heap/mark-compact-inl.h"
-#include "src/heap/objects-visiting.h"
+#include "src/heap/object-stats.h"
 #include "src/heap/objects-visiting-inl.h"
+#include "src/heap/objects-visiting.h"
 #include "src/tracing/trace-event.h"
 #include "src/v8.h"
 
@@ -175,6 +176,9 @@
     table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
     table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
     table_.Register(kVisitJSRegExp, &VisitJSRegExp);
+    if (FLAG_track_gc_object_stats) {
+      IncrementalMarkingObjectStatsVisitor::Initialize(&table_);
+    }
   }
 
   static const int kProgressBarScanningChunk = 32 * 1024;
@@ -231,7 +235,7 @@
     // Note that GC can happen when the context is not fully initialized,
     // so the cache can be undefined.
     Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
-    if (!cache->IsUndefined()) {
+    if (!cache->IsUndefined(map->GetIsolate())) {
       MarkObjectGreyDoNotEnqueue(cache);
     }
     VisitNativeContext(map, context);
@@ -341,9 +345,7 @@
 
 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     PagedSpace* space) {
-  PageIterator it(space);
-  while (it.has_next()) {
-    Page* p = it.next();
+  for (Page* p : *space) {
     SetOldSpacePageFlags(p, false, false);
   }
 }
@@ -351,9 +353,7 @@
 
 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     NewSpace* space) {
-  NewSpacePageIterator it(space);
-  while (it.has_next()) {
-    Page* p = it.next();
+  for (Page* p : *space) {
     SetNewSpacePageFlags(p, false);
   }
 }
@@ -365,27 +365,21 @@
   DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
 
-  LargePage* lop = heap_->lo_space()->first_page();
-  while (LargePage::IsValid(lop)) {
+  for (LargePage* lop : *heap_->lo_space()) {
     SetOldSpacePageFlags(lop, false, false);
-    lop = lop->next_page();
   }
 }
 
 
 void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
-  PageIterator it(space);
-  while (it.has_next()) {
-    Page* p = it.next();
+  for (Page* p : *space) {
     SetOldSpacePageFlags(p, true, is_compacting_);
   }
 }
 
 
 void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
-  NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
-  while (it.has_next()) {
-    Page* p = it.next();
+  for (Page* p : *space) {
     SetNewSpacePageFlags(p, true);
   }
 }
@@ -397,10 +391,8 @@
   ActivateIncrementalWriteBarrier(heap_->code_space());
   ActivateIncrementalWriteBarrier(heap_->new_space());
 
-  LargePage* lop = heap_->lo_space()->first_page();
-  while (LargePage::IsValid(lop)) {
+  for (LargePage* lop : *heap_->lo_space()) {
     SetOldSpacePageFlags(lop, true, is_compacting_);
-    lop = lop->next_page();
   }
 }
 
@@ -469,9 +461,10 @@
   UnseededNumberDictionary* stubs = heap->code_stubs();
 
   int capacity = stubs->Capacity();
+  Isolate* isolate = heap->isolate();
   for (int i = 0; i < capacity; i++) {
     Object* k = stubs->KeyAt(i);
-    if (stubs->IsKey(k)) {
+    if (stubs->IsKey(isolate, k)) {
       uint32_t key = NumberToUint32(k);
 
       if (CodeStub::MajorKeyFromKey(key) == CodeStub::RecordWrite) {
@@ -537,6 +530,10 @@
 
   state_ = MARKING;
 
+  if (heap_->UsingEmbedderHeapTracer()) {
+    heap_->mark_compact_collector()->embedder_heap_tracer()->TracePrologue();
+  }
+
   RecordWriteStub::Mode mode = is_compacting_
                                    ? RecordWriteStub::INCREMENTAL_COMPACTION
                                    : RecordWriteStub::INCREMENTAL;
@@ -930,12 +927,12 @@
   }
 
   Object* context = heap_->native_contexts_list();
-  while (!context->IsUndefined()) {
+  while (!context->IsUndefined(heap_->isolate())) {
     // GC can happen when the context is not fully initialized,
     // so the cache can be undefined.
     HeapObject* cache = HeapObject::cast(
         Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
-    if (!cache->IsUndefined()) {
+    if (!cache->IsUndefined(heap_->isolate())) {
       MarkBit mark_bit = Marking::MarkBitFrom(cache);
       if (Marking::IsGrey(mark_bit)) {
         Marking::GreyToBlack(mark_bit);