Upgrade V8 to 5.1.281.57  DO NOT MERGE

FPIIM-449

Change-Id: Id981b686b4d587ac31697662eb98bb34be42ad90
(cherry picked from commit 3b9bc31999c9787eb726ecdbfd5796bfdec32a18)
diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc
index 88aee8a..424e987 100644
--- a/test/cctest/heap/test-heap.cc
+++ b/test/cctest/heap/test-heap.cc
@@ -1164,7 +1164,7 @@
   // Allocate a JS array to OLD_SPACE and NEW_SPACE
   objs[next_objs_index++] = factory->NewJSArray(10);
   objs[next_objs_index++] =
-      factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
+      factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, TENURED);
 
   // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
   objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
@@ -1572,8 +1572,7 @@
   Factory* factory = isolate->factory();
   Heap* heap = isolate->heap();
   CompilationCache* compilation_cache = isolate->compilation_cache();
-  LanguageMode language_mode =
-      construct_language_mode(FLAG_use_strict, FLAG_use_strong);
+  LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
 
   v8::HandleScope scope(CcTest::isolate());
   const char* raw_source =
@@ -2068,7 +2067,7 @@
       heap->new_space()->AllocateRawAligned(size, alignment);
   HeapObject* obj = NULL;
   allocation.To(&obj);
-  heap->CreateFillerObjectAt(obj->address(), size);
+  heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
   return obj;
 }
 
@@ -2171,7 +2170,7 @@
       heap->old_space()->AllocateRawAligned(size, alignment);
   HeapObject* obj = NULL;
   allocation.To(&obj);
-  heap->CreateFillerObjectAt(obj->address(), size);
+  heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
   return obj;
 }
 
@@ -2334,11 +2333,7 @@
   Heap* heap = CcTest::heap();
   NewSpace* new_space = heap->new_space();
 
-  if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
-      heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
-    // The max size cannot exceed the reserved size, since semispaces must be
-    // always within the reserved space.  We can't test new space growing and
-    // shrinking if the reserved size is the same as the minimum (initial) size.
+  if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
     return;
   }
 
@@ -2383,11 +2378,7 @@
 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
   CcTest::InitializeVM();
   Heap* heap = CcTest::heap();
-  if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
-      heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
-    // The max size cannot exceed the reserved size, since semispaces must be
-    // always within the reserved space.  We can't test new space growing and
-    // shrinking if the reserved size is the same as the minimum (initial) size.
+  if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
     return;
   }
 
@@ -4285,8 +4276,9 @@
           AllocationMemento::kSize + kPointerSize);
   CHECK(allocation.To(&obj));
   Address addr_obj = obj->address();
-  CcTest::heap()->CreateFillerObjectAt(
-      addr_obj, AllocationMemento::kSize + kPointerSize);
+  CcTest::heap()->CreateFillerObjectAt(addr_obj,
+                                       AllocationMemento::kSize + kPointerSize,
+                                       ClearRecordedSlots::kNo);
 
   // Give the array a name, making sure not to allocate strings.
   v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
@@ -4644,7 +4636,7 @@
   // Start incremental marking to active write barrier.
   SimulateIncrementalMarking(heap, false);
   heap->incremental_marking()->AdvanceIncrementalMarking(
-      10000000, 10000000, IncrementalMarking::IdleStepActions());
+      10000000, IncrementalMarking::IdleStepActions());
 
   // Create references from the large object to the object on the evacuation
   // candidate.
@@ -4937,7 +4929,8 @@
                i, i, i, i, i, i, i, i);
       CompileRun(source.start());
     }
-    heap->CollectAllGarbage();
+    // We have to abort incremental marking here to abandon black pages.
+    heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   }
   int elements = 0;
   if (heap->weak_object_to_code_table()->IsHashTable()) {
@@ -5457,6 +5450,9 @@
     CHECK(weak_cell->value()->IsFixedArray());
     weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
   }
+  // Call collect all twice to make sure that we also cleared
+  // weak cells that were allocated on black pages.
+  heap->CollectAllGarbage();
   heap->CollectAllGarbage();
   CHECK_EQ(*survivor, weak_cells[0]->value());
   for (int i = 1; i < N; i++) {
@@ -6003,7 +5999,7 @@
   Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
   Handle<Name> key = isolate->factory()->stack_trace_symbol();
   Handle<Object> stack_trace =
-      JSObject::GetProperty(exception, key).ToHandleChecked();
+      Object::GetProperty(exception, key).ToHandleChecked();
   Handle<Object> code =
       Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
   CHECK(code->IsCode());
@@ -6277,6 +6273,7 @@
   Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
   fun->ReplaceCode(*isolate->builtins()->CompileLazy());
   fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
+  fun->shared()->ClearBytecodeArray();  // Bytecode is code too.
   isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
 }
 
@@ -6320,14 +6317,14 @@
   int time2 = 200;
   size_t counter2 = 2000;
   tracer->SampleAllocation(time2, 0, counter2);
-  size_t throughput =
-      tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
+  size_t throughput = static_cast<size_t>(
+      tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
   CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
   int time3 = 1000;
   size_t counter3 = 30000;
   tracer->SampleAllocation(time3, 0, counter3);
-  throughput =
-      tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
+  throughput = static_cast<size_t>(
+      tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
   CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
 }
 
@@ -6344,7 +6341,8 @@
   int time2 = 200;
   size_t counter2 = 2000;
   tracer->SampleAllocation(time2, counter2, counter2);
-  size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
+  size_t throughput = static_cast<size_t>(
+      tracer->AllocationThroughputInBytesPerMillisecond(100));
   CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
   int time3 = 1000;
   size_t counter3 = 30000;
@@ -6531,5 +6529,93 @@
   heap->CollectGarbage(NEW_SPACE);
 }
 
+HEAP_TEST(Regress589413) {
+  FLAG_stress_compaction = true;
+  FLAG_manual_evacuation_candidates_selection = true;
+  FLAG_parallel_compaction = false;
+  FLAG_concurrent_sweeping = false;
+  CcTest::InitializeVM();
+  v8::HandleScope scope(CcTest::isolate());
+  Heap* heap = CcTest::heap();
+  // Get the heap in clean state.
+  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
+  Isolate* isolate = CcTest::i_isolate();
+  Factory* factory = isolate->factory();
+  // Fill the new space with byte arrays with elements looking like pointers.
+  const int M = 256;
+  ByteArray* byte_array;
+  while (heap->AllocateByteArray(M).To(&byte_array)) {
+    for (int j = 0; j < M; j++) {
+      byte_array->set(j, 0x31);
+    }
+    // Add the array in root set.
+    handle(byte_array);
+  }
+  // Make sure the byte arrays will be promoted on the next GC.
+  heap->CollectGarbage(NEW_SPACE);
+  // This number is close to large free list category threshold.
+  const int N = 0x3eee;
+  {
+    std::vector<FixedArray*> arrays;
+    std::set<Page*> pages;
+    FixedArray* array;
+    // Fill all pages with fixed arrays.
+    heap->set_force_oom(true);
+    while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
+      arrays.push_back(array);
+      pages.insert(Page::FromAddress(array->address()));
+      // Add the array in root set.
+      handle(array);
+    }
+    // Expand and full one complete page with fixed arrays.
+    heap->set_force_oom(false);
+    while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
+      arrays.push_back(array);
+      pages.insert(Page::FromAddress(array->address()));
+      // Add the array in root set.
+      handle(array);
+      // Do not expand anymore.
+      heap->set_force_oom(true);
+    }
+    // Expand and mark the new page as evacuation candidate.
+    heap->set_force_oom(false);
+    {
+      AlwaysAllocateScope always_allocate(isolate);
+      Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
+      Page* ec_page = Page::FromAddress(ec_obj->address());
+      ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
+      // Make all arrays point to evacuation candidate so that
+      // slots are recorded for them.
+      for (size_t j = 0; j < arrays.size(); j++) {
+        array = arrays[j];
+        for (int i = 0; i < N; i++) {
+          array->set(i, *ec_obj);
+        }
+      }
+    }
+    SimulateIncrementalMarking(heap);
+    for (size_t j = 0; j < arrays.size(); j++) {
+      heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
+    }
+  }
+  // Force allocation from the free list.
+  heap->set_force_oom(true);
+  heap->CollectGarbage(OLD_SPACE);
+}
+
+TEST(Regress609761) {
+  CcTest::InitializeVM();
+  v8::HandleScope scope(CcTest::isolate());
+  Heap* heap = CcTest::heap();
+  Isolate* isolate = heap->isolate();
+
+  intptr_t size_before = heap->SizeOfObjects();
+  Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
+  array->Shrink(1);
+  intptr_t size_after = heap->SizeOfObjects();
+  CHECK_EQ(size_after, size_before + array->Size());
+}
+
 }  // namespace internal
 }  // namespace v8