Version 1.2.5.

Fixed bug in initial boundary check for Boyer-Moore text search (issue 349).

Fixed compilation issues with MinGW and gcc 4.3+ and added support for armv7 and cortex-a8 architectures.  Patches by Lei Zhang and Craig Schlenter.

Added a script cache to the debugger.

Optimized compilation performance by improving internal data structures and avoiding expensive property load optimizations for code that's infrequently executed.

Exposed the calling JavaScript context through the static API function Context::GetCalling().


git-svn-id: http://v8.googlecode.com/svn/trunk@2050 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/heap.cc b/src/heap.cc
index a5b7b30..772cf32 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -40,7 +40,8 @@
 #include "scopeinfo.h"
 #include "v8threads.h"
 
-namespace v8 { namespace internal {
+namespace v8 {
+namespace internal {
 
 #define ROOT_ALLOCATION(type, name) type* Heap::name##_;
   ROOT_LIST(ROOT_ALLOCATION)
@@ -283,6 +284,9 @@
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
   ReportStatisticsAfterGC();
 #endif
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  Debug::AfterGarbageCollection();
+#endif
 }
 
 
@@ -664,11 +668,33 @@
   // Copy objects reachable from weak pointers.
   GlobalHandles::IterateWeakRoots(&scavenge_visitor);
 
+#if V8_HOST_ARCH_64_BIT
+  // TODO(X64): Make this go away again. We currently disable RSets for
+  // 64-bit-mode.
+  HeapObjectIterator old_pointer_iterator(old_pointer_space_);
+  while (old_pointer_iterator.has_next()) {
+    HeapObject* heap_object = old_pointer_iterator.next();
+    heap_object->Iterate(&scavenge_visitor);
+  }
+  HeapObjectIterator map_iterator(map_space_);
+  while (map_iterator.has_next()) {
+    HeapObject* heap_object = map_iterator.next();
+    heap_object->Iterate(&scavenge_visitor);
+  }
+  LargeObjectIterator lo_iterator(lo_space_);
+  while (lo_iterator.has_next()) {
+    HeapObject* heap_object = lo_iterator.next();
+    if (heap_object->IsFixedArray()) {
+      heap_object->Iterate(&scavenge_visitor);
+    }
+  }
+#else  // V8_HOST_ARCH_64_BIT
   // Copy objects reachable from the old generation.  By definition,
   // there are no intergenerational pointers in code or data spaces.
   IterateRSet(old_pointer_space_, &ScavengePointer);
   IterateRSet(map_space_, &ScavengePointer);
   lo_space_->IterateRSet(&ScavengePointer);
+#endif   // V8_HOST_ARCH_64_BIT
 
   do {
     ASSERT(new_space_front <= new_space_.top());
@@ -775,6 +801,8 @@
 
 
 int Heap::UpdateRSet(HeapObject* obj) {
+#ifndef V8_HOST_ARCH_64_BIT
+  // TODO(X64) Reenable RSet when we have a working 64-bit layout of Page.
   ASSERT(!InNewSpace(obj));
   // Special handling of fixed arrays to iterate the body based on the start
   // address and offset.  Just iterating the pointers as in UpdateRSetVisitor
@@ -796,6 +824,7 @@
     UpdateRSetVisitor v;
     obj->Iterate(&v);
   }
+#endif  // V8_HOST_ARCH_64_BIT
   return obj->Size();
 }
 
@@ -996,7 +1025,7 @@
   meta_map_ = reinterpret_cast<Map*>(obj);
   meta_map()->set_map(meta_map());
 
-  obj = AllocatePartialMap(FIXED_ARRAY_TYPE, Array::kHeaderSize);
+  obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
   if (obj->IsFailure()) return false;
   fixed_array_map_ = Map::cast(obj);
 
@@ -1053,37 +1082,37 @@
   STRING_TYPE_LIST(ALLOCATE_STRING_MAP);
 #undef ALLOCATE_STRING_MAP
 
-  obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize);
+  obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize);
   if (obj->IsFailure()) return false;
   undetectable_short_string_map_ = Map::cast(obj);
   undetectable_short_string_map_->set_is_undetectable();
 
-  obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize);
+  obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize);
   if (obj->IsFailure()) return false;
   undetectable_medium_string_map_ = Map::cast(obj);
   undetectable_medium_string_map_->set_is_undetectable();
 
-  obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize);
+  obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize);
   if (obj->IsFailure()) return false;
   undetectable_long_string_map_ = Map::cast(obj);
   undetectable_long_string_map_->set_is_undetectable();
 
-  obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
+  obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
   if (obj->IsFailure()) return false;
   undetectable_short_ascii_string_map_ = Map::cast(obj);
   undetectable_short_ascii_string_map_->set_is_undetectable();
 
-  obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
+  obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
   if (obj->IsFailure()) return false;
   undetectable_medium_ascii_string_map_ = Map::cast(obj);
   undetectable_medium_ascii_string_map_->set_is_undetectable();
 
-  obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
+  obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
   if (obj->IsFailure()) return false;
   undetectable_long_ascii_string_map_ = Map::cast(obj);
   undetectable_long_ascii_string_map_->set_is_undetectable();
 
-  obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kHeaderSize);
+  obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize);
   if (obj->IsFailure()) return false;
   byte_array_map_ = Map::cast(obj);
 
@@ -1699,7 +1728,7 @@
 
 
 Object* Heap::CreateCode(const CodeDesc& desc,
-                         ScopeInfo<>* sinfo,
+                         ZoneScopeInfo* sinfo,
                          Code::Flags flags,
                          Handle<Object> self_reference) {
   // Compute size
@@ -2635,12 +2664,13 @@
 #endif  // DEBUG
 
 
-void Heap::IterateRSetRange(Address object_start,
-                            Address object_end,
-                            Address rset_start,
-                            ObjectSlotCallback copy_object_func) {
+int Heap::IterateRSetRange(Address object_start,
+                           Address object_end,
+                           Address rset_start,
+                           ObjectSlotCallback copy_object_func) {
   Address object_address = object_start;
   Address rset_address = rset_start;
+  int set_bits_count = 0;
 
   // Loop over all the pointers in [object_start, object_end).
   while (object_address < object_end) {
@@ -2657,6 +2687,7 @@
           // If this pointer does not need to be remembered anymore, clear
           // the remembered set bit.
           if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask;
+          set_bits_count++;
         }
         object_address += kPointerSize;
       }
@@ -2670,6 +2701,7 @@
     }
     rset_address += kIntSize;
   }
+  return set_bits_count;
 }
 
 
@@ -2677,11 +2709,20 @@
   ASSERT(Page::is_rset_in_use());
   ASSERT(space == old_pointer_space_ || space == map_space_);
 
+  static void* paged_rset_histogram = StatsTable::CreateHistogram(
+      "V8.RSetPaged",
+      0,
+      Page::kObjectAreaSize / kPointerSize,
+      30);
+
   PageIterator it(space, PageIterator::PAGES_IN_USE);
   while (it.has_next()) {
     Page* page = it.next();
-    IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
-                     page->RSetStart(), copy_object_func);
+    int count = IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
+                                 page->RSetStart(), copy_object_func);
+    if (paged_rset_histogram != NULL) {
+      StatsTable::AddHistogramSample(paged_rset_histogram, count);
+    }
   }
 }