Push version 1.3.17 to trunk.

Added API method to get simple heap statistics.

Improved heap profiler support.

Fixed the implementation of the resource constraint API so it works when using snapshots.

Fixed a number of issues in the Windows 64-bit version.

Optimized calls to API getters.

Added valgrind notification on code modification to the 64-bit version.

Fixed issue where we logged shared library addresses on Windows at startup and never used them.



git-svn-id: http://v8.googlecode.com/svn/trunk@3167 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/heap.cc b/src/heap.cc
index 2082e97..ae18fbe 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -39,9 +39,11 @@
 #include "natives.h"
 #include "scanner.h"
 #include "scopeinfo.h"
+#include "snapshot.h"
 #include "v8threads.h"
 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
 #include "regexp-macro-assembler.h"
+#include "arm/regexp-macro-assembler-arm.h"
 #endif
 
 namespace v8 {
@@ -74,28 +76,35 @@
 // semispace_size_ should be a power of 2 and old_generation_size_ should be
 // a multiple of Page::kPageSize.
 #if defined(ANDROID)
-int Heap::semispace_size_  = 512*KB;
-int Heap::old_generation_size_ = 128*MB;
+int Heap::max_semispace_size_  = 512*KB;
+int Heap::max_old_generation_size_ = 128*MB;
 int Heap::initial_semispace_size_ = 128*KB;
 size_t Heap::code_range_size_ = 0;
 #elif defined(V8_TARGET_ARCH_X64)
-int Heap::semispace_size_  = 16*MB;
-int Heap::old_generation_size_ = 1*GB;
+int Heap::max_semispace_size_  = 16*MB;
+int Heap::max_old_generation_size_ = 1*GB;
 int Heap::initial_semispace_size_ = 1*MB;
 size_t Heap::code_range_size_ = 512*MB;
 #else
-int Heap::semispace_size_  = 8*MB;
-int Heap::old_generation_size_ = 512*MB;
+int Heap::max_semispace_size_  = 8*MB;
+int Heap::max_old_generation_size_ = 512*MB;
 int Heap::initial_semispace_size_ = 512*KB;
 size_t Heap::code_range_size_ = 0;
 #endif
 
+// The snapshot semispace size will be the default semispace size if
+// snapshotting is used and will be the requested semispace size as
+// set up by ConfigureHeap otherwise.
+int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
+
 GCCallback Heap::global_gc_prologue_callback_ = NULL;
 GCCallback Heap::global_gc_epilogue_callback_ = NULL;
 
 // Variables set based on semispace_size_ and old_generation_size_ in
 // ConfigureHeap.
-int Heap::young_generation_size_ = 0;  // Will be 2 * semispace_size_.
+
+// Will be 4 * reserved_semispace_size_ to ensure that young
+// generation can be aligned to its size.
 int Heap::survived_since_last_expansion_ = 0;
 int Heap::external_allocation_limit_ = 0;
 
@@ -105,6 +114,7 @@
 int Heap::gc_count_ = 0;
 
 int Heap::always_allocate_scope_depth_ = 0;
+int Heap::linear_allocation_scope_depth_ = 0;
 bool Heap::context_disposed_pending_ = false;
 
 #ifdef DEBUG
@@ -127,6 +137,19 @@
 }
 
 
+int Heap::CommittedMemory() {
+  if (!HasBeenSetup()) return 0;
+
+  return new_space_.CommittedMemory() +
+      old_pointer_space_->CommittedMemory() +
+      old_data_space_->CommittedMemory() +
+      code_space_->CommittedMemory() +
+      map_space_->CommittedMemory() +
+      cell_space_->CommittedMemory() +
+      lo_space_->Size();
+}
+
+
 int Heap::Available() {
   if (!HasBeenSetup()) return 0;
 
@@ -222,19 +245,34 @@
 void Heap::PrintShortHeapStatistics() {
   if (!FLAG_trace_gc_verbose) return;
   PrintF("Memory allocator,   used: %8d, available: %8d\n",
-         MemoryAllocator::Size(), MemoryAllocator::Available());
+         MemoryAllocator::Size(),
+         MemoryAllocator::Available());
   PrintF("New space,          used: %8d, available: %8d\n",
-         Heap::new_space_.Size(), new_space_.Available());
-  PrintF("Old pointers,       used: %8d, available: %8d\n",
-         old_pointer_space_->Size(), old_pointer_space_->Available());
-  PrintF("Old data space,     used: %8d, available: %8d\n",
-         old_data_space_->Size(), old_data_space_->Available());
-  PrintF("Code space,         used: %8d, available: %8d\n",
-         code_space_->Size(), code_space_->Available());
-  PrintF("Map space,          used: %8d, available: %8d\n",
-         map_space_->Size(), map_space_->Available());
+         Heap::new_space_.Size(),
+         new_space_.Available());
+  PrintF("Old pointers,       used: %8d, available: %8d, waste: %8d\n",
+         old_pointer_space_->Size(),
+         old_pointer_space_->Available(),
+         old_pointer_space_->Waste());
+  PrintF("Old data space,     used: %8d, available: %8d, waste: %8d\n",
+         old_data_space_->Size(),
+         old_data_space_->Available(),
+         old_data_space_->Waste());
+  PrintF("Code space,         used: %8d, available: %8d, waste: %8d\n",
+         code_space_->Size(),
+         code_space_->Available(),
+         code_space_->Waste());
+  PrintF("Map space,          used: %8d, available: %8d, waste: %8d\n",
+         map_space_->Size(),
+         map_space_->Available(),
+         map_space_->Waste());
+  PrintF("Cell space,         used: %8d, available: %8d, waste: %8d\n",
+         cell_space_->Size(),
+         cell_space_->Available(),
+         cell_space_->Waste());
   PrintF("Large object space, used: %8d, avaialble: %8d\n",
-         lo_space_->Size(), lo_space_->Available());
+         lo_space_->Size(),
+         lo_space_->Available());
 }
 #endif
 
@@ -478,7 +516,13 @@
 
   Counters::objs_since_last_young.Set(0);
 
-  PostGarbageCollectionProcessing();
+  if (collector == MARK_COMPACTOR) {
+    DisableAssertNoAllocation allow_allocation;
+    GlobalHandles::PostGarbageCollectionProcessing();
+  }
+
+  // Update relocatables.
+  Relocatable::PostGarbageCollectionProcessing();
 
   if (collector == MARK_COMPACTOR) {
     // Register the amount of external allocated memory.
@@ -494,17 +538,6 @@
 }
 
 
-void Heap::PostGarbageCollectionProcessing() {
-  // Process weak handles post gc.
-  {
-    DisableAssertNoAllocation allow_allocation;
-    GlobalHandles::PostGarbageCollectionProcessing();
-  }
-  // Update relocatables.
-  Relocatable::PostGarbageCollectionProcessing();
-}
-
-
 void Heap::MarkCompact(GCTracer* tracer) {
   gc_state_ = MARK_COMPACT;
   mc_count_++;
@@ -1195,6 +1228,41 @@
   if (obj->IsFailure()) return false;
   set_pixel_array_map(Map::cast(obj));
 
+  obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_byte_array_map(Map::cast(obj));
+
+  obj = AllocateMap(EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_unsigned_byte_array_map(Map::cast(obj));
+
+  obj = AllocateMap(EXTERNAL_SHORT_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_short_array_map(Map::cast(obj));
+
+  obj = AllocateMap(EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_unsigned_short_array_map(Map::cast(obj));
+
+  obj = AllocateMap(EXTERNAL_INT_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_int_array_map(Map::cast(obj));
+
+  obj = AllocateMap(EXTERNAL_UNSIGNED_INT_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_unsigned_int_array_map(Map::cast(obj));
+
+  obj = AllocateMap(EXTERNAL_FLOAT_ARRAY_TYPE,
+                    ExternalArray::kAlignedSize);
+  if (obj->IsFailure()) return false;
+  set_external_float_array_map(Map::cast(obj));
+
   obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
   if (obj->IsFailure()) return false;
   set_code_map(Map::cast(obj));
@@ -1615,6 +1683,35 @@
 }
 
 
+Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
+  return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
+}
+
+
+Heap::RootListIndex Heap::RootIndexForExternalArrayType(
+    ExternalArrayType array_type) {
+  switch (array_type) {
+    case kExternalByteArray:
+      return kExternalByteArrayMapRootIndex;
+    case kExternalUnsignedByteArray:
+      return kExternalUnsignedByteArrayMapRootIndex;
+    case kExternalShortArray:
+      return kExternalShortArrayMapRootIndex;
+    case kExternalUnsignedShortArray:
+      return kExternalUnsignedShortArrayMapRootIndex;
+    case kExternalIntArray:
+      return kExternalIntArrayMapRootIndex;
+    case kExternalUnsignedIntArray:
+      return kExternalUnsignedIntArrayMapRootIndex;
+    case kExternalFloatArray:
+      return kExternalFloatArrayMapRootIndex;
+    default:
+      UNREACHABLE();
+      return kUndefinedValueRootIndex;
+  }
+}
+
+
 Object* Heap::NewNumberFromDouble(double value, PretenureFlag pretenure) {
   return SmiOrNumberFromDouble(value,
                                true /* number object must be new */,
@@ -1713,10 +1810,10 @@
   }
 
   Map* map;
-  if (length <= String::kMaxShortStringSize) {
+  if (length <= String::kMaxShortSize) {
     map = is_ascii ? short_cons_ascii_string_map()
       : short_cons_string_map();
-  } else if (length <= String::kMaxMediumStringSize) {
+  } else if (length <= String::kMaxMediumSize) {
     map = is_ascii ? medium_cons_ascii_string_map()
       : medium_cons_string_map();
   } else {
@@ -1746,11 +1843,11 @@
   }
 
   Map* map;
-  if (length <= String::kMaxShortStringSize) {
+  if (length <= String::kMaxShortSize) {
     map = buffer->IsAsciiRepresentation() ?
       short_sliced_ascii_string_map() :
       short_sliced_string_map();
-  } else if (length <= String::kMaxMediumStringSize) {
+  } else if (length <= String::kMaxMediumSize) {
     map = buffer->IsAsciiRepresentation() ?
       medium_sliced_ascii_string_map() :
       medium_sliced_string_map();
@@ -1815,9 +1912,9 @@
     ExternalAsciiString::Resource* resource) {
   Map* map;
   int length = resource->length();
-  if (length <= String::kMaxShortStringSize) {
+  if (length <= String::kMaxShortSize) {
     map = short_external_ascii_string_map();
-  } else if (length <= String::kMaxMediumStringSize) {
+  } else if (length <= String::kMaxMediumSize) {
     map = medium_external_ascii_string_map();
   } else {
     map = long_external_ascii_string_map();
@@ -1940,6 +2037,31 @@
 }
 
 
+Object* Heap::AllocateExternalArray(int length,
+                                    ExternalArrayType array_type,
+                                    void* external_pointer,
+                                    PretenureFlag pretenure) {
+  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+
+  // New space can't cope with forced allocation.
+  if (always_allocate()) space = OLD_DATA_SPACE;
+
+  Object* result = AllocateRaw(ExternalArray::kAlignedSize,
+                               space,
+                               OLD_DATA_SPACE);
+
+  if (result->IsFailure()) return result;
+
+  reinterpret_cast<ExternalArray*>(result)->set_map(
+      MapForExternalArrayType(array_type));
+  reinterpret_cast<ExternalArray*>(result)->set_length(length);
+  reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
+      external_pointer);
+
+  return result;
+}
+
+
 Object* Heap::CreateCode(const CodeDesc& desc,
                          ZoneScopeInfo* sinfo,
                          Code::Flags flags,
@@ -2021,7 +2143,9 @@
                                TargetSpaceId(map->instance_type()));
   if (result->IsFailure()) return result;
   HeapObject::cast(result)->set_map(map);
+#ifdef ENABLE_LOGGING_AND_PROFILING
   ProducerHeapProfile::RecordJSObjectAllocation(result);
+#endif
   return result;
 }
 
@@ -2134,9 +2258,8 @@
   // descriptors for these to the initial map as the object cannot be
   // constructed without having these properties.
   ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
-  if (fun->shared()->has_only_this_property_assignments() &&
-      fun->shared()->this_property_assignments_count() > 0 &&
-      fun->shared()->has_only_simple_this_property_assignments()) {
+  if (fun->shared()->has_only_simple_this_property_assignments() &&
+      fun->shared()->this_property_assignments_count() > 0) {
     int count = fun->shared()->this_property_assignments_count();
     if (count > in_object_properties) {
       count = in_object_properties;
@@ -2344,7 +2467,9 @@
     JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
   }
   // Return the new clone.
+#ifdef ENABLE_LOGGING_AND_PROFILING
   ProducerHeapProfile::RecordJSObjectAllocation(clone);
+#endif
   return clone;
 }
 
@@ -2534,18 +2659,18 @@
   Map* map;
 
   if (is_ascii) {
-    if (chars <= String::kMaxShortStringSize) {
+    if (chars <= String::kMaxShortSize) {
       map = short_ascii_symbol_map();
-    } else if (chars <= String::kMaxMediumStringSize) {
+    } else if (chars <= String::kMaxMediumSize) {
       map = medium_ascii_symbol_map();
     } else {
       map = long_ascii_symbol_map();
     }
     size = SeqAsciiString::SizeFor(chars);
   } else {
-    if (chars <= String::kMaxShortStringSize) {
+    if (chars <= String::kMaxShortSize) {
       map = short_symbol_map();
-    } else if (chars <= String::kMaxMediumStringSize) {
+    } else if (chars <= String::kMaxMediumSize) {
       map = medium_symbol_map();
     } else {
       map = long_symbol_map();
@@ -2595,9 +2720,9 @@
 
   // Determine the map based on the string's length.
   Map* map;
-  if (length <= String::kMaxShortStringSize) {
+  if (length <= String::kMaxShortSize) {
     map = short_ascii_string_map();
-  } else if (length <= String::kMaxMediumStringSize) {
+  } else if (length <= String::kMaxMediumSize) {
     map = medium_ascii_string_map();
   } else {
     map = long_ascii_string_map();
@@ -2632,9 +2757,9 @@
 
   // Determine the map based on the string's length.
   Map* map;
-  if (length <= String::kMaxShortStringSize) {
+  if (length <= String::kMaxShortSize) {
     map = short_string_map();
-  } else if (length <= String::kMaxMediumStringSize) {
+  } else if (length <= String::kMaxMediumSize) {
     map = medium_string_map();
   } else {
     map = long_string_map();
@@ -3119,60 +3244,53 @@
 }
 
 
-#ifdef DEBUG
-#define SYNCHRONIZE_TAG(tag) v->Synchronize(tag)
-#else
-#define SYNCHRONIZE_TAG(tag)
-#endif
-
 void Heap::IterateRoots(ObjectVisitor* v) {
   IterateStrongRoots(v);
   v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
-  SYNCHRONIZE_TAG("symbol_table");
+  v->Synchronize("symbol_table");
 }
 
 
 void Heap::IterateStrongRoots(ObjectVisitor* v) {
   v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
-  SYNCHRONIZE_TAG("strong_root_list");
+  v->Synchronize("strong_root_list");
 
   v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_));
-  SYNCHRONIZE_TAG("symbol");
+  v->Synchronize("symbol");
 
   Bootstrapper::Iterate(v);
-  SYNCHRONIZE_TAG("bootstrapper");
+  v->Synchronize("bootstrapper");
   Top::Iterate(v);
-  SYNCHRONIZE_TAG("top");
+  v->Synchronize("top");
   Relocatable::Iterate(v);
-  SYNCHRONIZE_TAG("relocatable");
+  v->Synchronize("relocatable");
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   Debug::Iterate(v);
 #endif
-  SYNCHRONIZE_TAG("debug");
+  v->Synchronize("debug");
   CompilationCache::Iterate(v);
-  SYNCHRONIZE_TAG("compilationcache");
+  v->Synchronize("compilationcache");
 
   // Iterate over local handles in handle scopes.
   HandleScopeImplementer::Iterate(v);
-  SYNCHRONIZE_TAG("handlescope");
+  v->Synchronize("handlescope");
 
   // Iterate over the builtin code objects and code stubs in the heap. Note
   // that it is not strictly necessary to iterate over code objects on
   // scavenge collections.  We still do it here because this same function
   // is used by the mark-sweep collector and the deserializer.
   Builtins::IterateBuiltins(v);
-  SYNCHRONIZE_TAG("builtins");
+  v->Synchronize("builtins");
 
   // Iterate over global handles.
   GlobalHandles::IterateRoots(v);
-  SYNCHRONIZE_TAG("globalhandles");
+  v->Synchronize("globalhandles");
 
   // Iterate over pointers being held by inactive threads.
   ThreadManager::Iterate(v);
-  SYNCHRONIZE_TAG("threadmanager");
+  v->Synchronize("threadmanager");
 }
-#undef SYNCHRONIZE_TAG
 
 
 // Flag is set when the heap has been configured.  The heap can be repeatedly
@@ -3182,21 +3300,37 @@
 // TODO(1236194): Since the heap size is configurable on the command line
 // and through the API, we should gracefully handle the case that the heap
 // size is not big enough to fit all the initial objects.
-bool Heap::ConfigureHeap(int semispace_size, int old_gen_size) {
+bool Heap::ConfigureHeap(int max_semispace_size, int max_old_gen_size) {
   if (HasBeenSetup()) return false;
 
-  if (semispace_size > 0) semispace_size_ = semispace_size;
-  if (old_gen_size > 0) old_generation_size_ = old_gen_size;
+  if (max_semispace_size > 0) max_semispace_size_ = max_semispace_size;
+
+  if (Snapshot::IsEnabled()) {
+    // If we are using a snapshot we always reserve the default amount
+    // of memory for each semispace because code in the snapshot has
+    // write-barrier code that relies on the size and alignment of new
+    // space.  We therefore cannot use a larger max semispace size
+    // than the default reserved semispace size.
+    if (max_semispace_size_ > reserved_semispace_size_) {
+      max_semispace_size_ = reserved_semispace_size_;
+    }
+  } else {
+    // If we are not using snapshots we reserve space for the actual
+    // max semispace size.
+    reserved_semispace_size_ = max_semispace_size_;
+  }
+
+  if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
 
   // The new space size must be a power of two to support single-bit testing
   // for containment.
-  semispace_size_ = RoundUpToPowerOf2(semispace_size_);
-  initial_semispace_size_ = Min(initial_semispace_size_, semispace_size_);
-  young_generation_size_ = 2 * semispace_size_;
-  external_allocation_limit_ = 10 * semispace_size_;
+  max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
+  reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
+  initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
+  external_allocation_limit_ = 10 * max_semispace_size_;
 
   // The old generation is paged.
-  old_generation_size_ = RoundUp(old_generation_size_, Page::kPageSize);
+  max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
 
   heap_configured = true;
   return true;
@@ -3204,7 +3338,7 @@
 
 
 bool Heap::ConfigureHeapDefault() {
-  return ConfigureHeap(FLAG_new_space_size, FLAG_old_space_size);
+  return ConfigureHeap(FLAG_max_new_space_size / 2, FLAG_max_old_space_size);
 }
 
 
@@ -3240,30 +3374,31 @@
   }
 
   // Setup memory allocator and reserve a chunk of memory for new
-  // space.  The chunk is double the size of the new space to ensure
-  // that we can find a pair of semispaces that are contiguous and
-  // aligned to their size.
-  if (!MemoryAllocator::Setup(MaxCapacity())) return false;
+  // space.  The chunk is double the size of the requested reserved
+  // new space size to ensure that we can find a pair of semispaces that
+  // are contiguous and aligned to their size.
+  if (!MemoryAllocator::Setup(MaxReserved())) return false;
   void* chunk =
-      MemoryAllocator::ReserveInitialChunk(2 * young_generation_size_);
+      MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
   if (chunk == NULL) return false;
 
   // Align the pair of semispaces to their size, which must be a power
   // of 2.
-  ASSERT(IsPowerOf2(young_generation_size_));
   Address new_space_start =
-      RoundUp(reinterpret_cast<byte*>(chunk), young_generation_size_);
-  if (!new_space_.Setup(new_space_start, young_generation_size_)) return false;
+      RoundUp(reinterpret_cast<byte*>(chunk), 2 * reserved_semispace_size_);
+  if (!new_space_.Setup(new_space_start, 2 * reserved_semispace_size_)) {
+    return false;
+  }
 
   // Initialize old pointer space.
   old_pointer_space_ =
-      new OldSpace(old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
+      new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
   if (old_pointer_space_ == NULL) return false;
   if (!old_pointer_space_->Setup(NULL, 0)) return false;
 
   // Initialize old data space.
   old_data_space_ =
-      new OldSpace(old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
+      new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
   if (old_data_space_ == NULL) return false;
   if (!old_data_space_->Setup(NULL, 0)) return false;
 
@@ -3278,7 +3413,7 @@
   }
 
   code_space_ =
-      new OldSpace(old_generation_size_, CODE_SPACE, EXECUTABLE);
+      new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
   if (code_space_ == NULL) return false;
   if (!code_space_->Setup(NULL, 0)) return false;
 
@@ -3288,7 +3423,7 @@
   if (!map_space_->Setup(NULL, 0)) return false;
 
   // Initialize global property cell space.
-  cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE);
+  cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
   if (cell_space_ == NULL) return false;
   if (!cell_space_->Setup(NULL, 0)) return false;
 
@@ -3311,8 +3446,10 @@
   LOG(IntEvent("heap-capacity", Capacity()));
   LOG(IntEvent("heap-available", Available()));
 
+#ifdef ENABLE_LOGGING_AND_PROFILING
   // This should be called only after initial objects have been created.
   ProducerHeapProfile::Setup();
+#endif
 
   return true;
 }