Revision 2.4.4.

Fix bug with hangs on very large sparse arrays.

Try harder to free up memory when running out of space.

Add heap snapshots to JSON format to API.

Recalibrate benchmarks.
Review URL: http://codereview.chromium.org/3421009

git-svn-id: http://v8.googlecode.com/svn/trunk@5462 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/heap.cc b/src/heap.cc
index 443c926..650800f 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -55,7 +55,6 @@
 String* Heap::hidden_symbol_;
 Object* Heap::roots_[Heap::kRootListLength];
 
-
 NewSpace Heap::new_space_;
 OldSpace* Heap::old_pointer_space_ = NULL;
 OldSpace* Heap::old_data_space_ = NULL;
@@ -64,9 +63,6 @@
 CellSpace* Heap::cell_space_ = NULL;
 LargeObjectSpace* Heap::lo_space_ = NULL;
 
-static const int kMinimumPromotionLimit = 2*MB;
-static const int kMinimumAllocationLimit = 8*MB;
-
 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
 
@@ -405,17 +401,26 @@
 }
 
 
-void Heap::CollectAllGarbage(bool force_compaction) {
+void Heap::CollectAllGarbage(bool force_compaction,
+                             CollectionPolicy collectionPolicy) {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
   MarkCompactCollector::SetForceCompaction(force_compaction);
-  CollectGarbage(0, OLD_POINTER_SPACE);
+  CollectGarbage(0, OLD_POINTER_SPACE, collectionPolicy);
   MarkCompactCollector::SetForceCompaction(false);
 }
 
 
-bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
+void Heap::CollectAllAvailableGarbage() {
+  CompilationCache::Clear();
+  CollectAllGarbage(true, AGGRESSIVE);
+}
+
+
+bool Heap::CollectGarbage(int requested_size,
+                          AllocationSpace space,
+                          CollectionPolicy collectionPolicy) {
   // The VM is in the GC state until exiting this function.
   VMState state(GC);
 
@@ -442,7 +447,7 @@
         ? &Counters::gc_scavenger
         : &Counters::gc_compactor;
     rate->Start();
-    PerformGarbageCollection(space, collector, &tracer);
+    PerformGarbageCollection(collector, &tracer, collectionPolicy);
     rate->Stop();
 
     GarbageCollectionEpilogue();
@@ -475,7 +480,7 @@
 
 void Heap::PerformScavenge() {
   GCTracer tracer;
-  PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
+  PerformGarbageCollection(SCAVENGER, &tracer, NORMAL);
 }
 
 
@@ -664,9 +669,9 @@
   survival_rate_ = survival_rate;
 }
 
-void Heap::PerformGarbageCollection(AllocationSpace space,
-                                    GarbageCollector collector,
-                                    GCTracer* tracer) {
+void Heap::PerformGarbageCollection(GarbageCollector collector,
+                                    GCTracer* tracer,
+                                    CollectionPolicy collectionPolicy) {
   VerifySymbolTable();
   if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
     ASSERT(!allocation_allowed_);
@@ -696,25 +701,45 @@
 
     UpdateSurvivalRateTrend(start_new_space_size);
 
-    int old_gen_size = PromotedSpaceSize();
-    old_gen_promotion_limit_ =
-        old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
-    old_gen_allocation_limit_ =
-        old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
+    UpdateOldSpaceLimits();
 
-    if (high_survival_rate_during_scavenges &&
-        IsStableOrIncreasingSurvivalTrend()) {
-      // Stable high survival rates of young objects both during partial and
-      // full collection indicate that mutator is either building or modifying
-      // a structure with a long lifetime.
-      // In this case we aggressively raise old generation memory limits to
-      // postpone subsequent mark-sweep collection and thus trade memory
-      // space for the mutation speed.
-      old_gen_promotion_limit_ *= 2;
-      old_gen_allocation_limit_ *= 2;
+    // Major GC would invoke weak handle callbacks on weakly reachable
+    // handles, but won't collect weakly reachable objects until next
+    // major GC.  Therefore if we collect aggressively and weak handle callback
+    // has been invoked, we rerun major GC to release objects which become
+    // garbage.
+    if (collectionPolicy == AGGRESSIVE) {
+      // Note: as weak callbacks can execute arbitrary code, we cannot
+      // hope that eventually there will be no weak callbacks invocations.
+      // Therefore stop recollecting after several attempts.
+      const int kMaxNumberOfAttempts = 7;
+      for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
+        { DisableAssertNoAllocation allow_allocation;
+          GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
+          if (!GlobalHandles::PostGarbageCollectionProcessing()) break;
+        }
+        MarkCompact(tracer);
+        // Weak handle callbacks can allocate data, so keep limits correct.
+        UpdateOldSpaceLimits();
+      }
+    } else {
+      if (high_survival_rate_during_scavenges &&
+          IsStableOrIncreasingSurvivalTrend()) {
+        // Stable high survival rates of young objects both during partial and
+        // full collection indicate that mutator is either building or modifying
+        // a structure with a long lifetime.
+        // In this case we aggressively raise old generation memory limits to
+        // postpone subsequent mark-sweep collection and thus trade memory
+        // space for the mutation speed.
+        old_gen_promotion_limit_ *= 2;
+        old_gen_allocation_limit_ *= 2;
+      }
     }
 
-    old_gen_exhausted_ = false;
+    { DisableAssertNoAllocation allow_allocation;
+      GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
+      GlobalHandles::PostGarbageCollectionProcessing();
+    }
   } else {
     tracer_ = tracer;
     Scavenge();
@@ -725,12 +750,6 @@
 
   Counters::objs_since_last_young.Set(0);
 
-  if (collector == MARK_COMPACTOR) {
-    DisableAssertNoAllocation allow_allocation;
-    GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
-    GlobalHandles::PostGarbageCollectionProcessing();
-  }
-
   // Update relocatables.
   Relocatable::PostGarbageCollectionProcessing();
 
@@ -1834,6 +1853,13 @@
 
   CreateFixedStubs();
 
+  // Allocate the dictionary of intrinsic function names.
+  obj = StringDictionary::Allocate(Runtime::kNumFunctions);
+  if (obj->IsFailure()) return false;
+  obj = Runtime::InitializeIntrinsicFunctionNames(obj);
+  if (obj->IsFailure()) return false;
+  set_intrinsic_function_names(StringDictionary::cast(obj));
+
   if (InitializeNumberStringCache()->IsFailure()) return false;
 
   // Allocate cache for single character ASCII strings.