Version 3.2.7

Disabled the original 'classic' V8 code generator.  Crankshaft is now the default on all platforms.

Changed the heap profiler to use more descriptive names.

Performance and stability improvements to isolates on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@7491 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/heap.cc b/src/heap.cc
index 5d1a66e..6250172 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -941,6 +941,8 @@
 
   gc_state_ = SCAVENGE;
 
+  SwitchScavengingVisitorsTableIfProfilingWasEnabled();
+
   Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
 #ifdef DEBUG
   VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
@@ -1232,6 +1234,32 @@
 }
 
 
+enum LoggingAndProfiling {
+  LOGGING_AND_PROFILING_ENABLED,
+  LOGGING_AND_PROFILING_DISABLED
+};
+
+
+typedef void (*ScavengingCallback)(Map* map,
+                                   HeapObject** slot,
+                                   HeapObject* object);
+
+
+static Atomic32 scavenging_visitors_table_mode_;
+static VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
+
+
+INLINE(static void DoScavengeObject(Map* map,
+                                    HeapObject** slot,
+                                    HeapObject* obj));
+
+
+void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
+  scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
+}
+
+
+template<LoggingAndProfiling logging_and_profiling_mode>
 class ScavengingVisitor : public StaticVisitorBase {
  public:
   static void Initialize() {
@@ -1240,23 +1268,22 @@
     table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
     table_.Register(kVisitByteArray, &EvacuateByteArray);
     table_.Register(kVisitFixedArray, &EvacuateFixedArray);
+
     table_.Register(kVisitGlobalContext,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                        VisitSpecialized<Context::kSize>);
-
-    typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
+                        template VisitSpecialized<Context::kSize>);
 
     table_.Register(kVisitConsString,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                        VisitSpecialized<ConsString::kSize>);
+                        template VisitSpecialized<ConsString::kSize>);
 
     table_.Register(kVisitSharedFunctionInfo,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                        VisitSpecialized<SharedFunctionInfo::kSize>);
+                        template VisitSpecialized<SharedFunctionInfo::kSize>);
 
     table_.Register(kVisitJSFunction,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                    VisitSpecialized<JSFunction::kSize>);
+                        template VisitSpecialized<JSFunction::kSize>);
 
     table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
                                    kVisitDataObject,
@@ -1271,12 +1298,10 @@
                                    kVisitStructGeneric>();
   }
 
-
-  static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
-    table_.GetVisitor(map)(map, slot, obj);
+  static VisitorDispatchTable<ScavengingCallback>* GetTable() {
+    return &table_;
   }
 
-
  private:
   enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
   enum SizeRestriction { SMALL, UNKNOWN_SIZE };
@@ -1313,21 +1338,24 @@
     // Set the forwarding address.
     source->set_map_word(MapWord::FromForwardingAddress(target));
 
+    if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-    // Update NewSpace stats if necessary.
-    RecordCopiedObject(heap, target);
+      // Update NewSpace stats if necessary.
+      RecordCopiedObject(heap, target);
 #endif
-    HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
+      HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
 #if defined(ENABLE_LOGGING_AND_PROFILING)
-    Isolate* isolate = heap->isolate();
-    if (isolate->logger()->is_logging() ||
-        isolate->cpu_profiler()->is_profiling()) {
-      if (target->IsSharedFunctionInfo()) {
-        PROFILE(isolate, SharedFunctionInfoMoveEvent(
-            source->address(), target->address()));
+      Isolate* isolate = heap->isolate();
+      if (isolate->logger()->is_logging() ||
+          isolate->cpu_profiler()->is_profiling()) {
+        if (target->IsSharedFunctionInfo()) {
+          PROFILE(isolate, SharedFunctionInfoMoveEvent(
+              source->address(), target->address()));
+        }
       }
-    }
 #endif
+    }
+
     return target;
   }
 
@@ -1443,7 +1471,7 @@
         return;
       }
 
-      Scavenge(first->map(), slot, first);
+      DoScavengeObject(first->map(), slot, first);
       object->set_map_word(MapWord::FromForwardingAddress(*slot));
       return;
     }
@@ -1470,13 +1498,51 @@
     }
   };
 
-  typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
-
-  static VisitorDispatchTable<Callback> table_;
+  static VisitorDispatchTable<ScavengingCallback> table_;
 };
 
 
-VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
+template<LoggingAndProfiling logging_and_profiling_mode>
+VisitorDispatchTable<ScavengingCallback>
+    ScavengingVisitor<logging_and_profiling_mode>::table_;
+
+
+static void InitializeScavengingVisitorsTables() {
+  ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize();
+  ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize();
+  scavenging_visitors_table_.CopyFrom(
+      ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable());
+  scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED;
+}
+
+
+void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
+  if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
+    // Table was already updated by some isolate.
+    return;
+  }
+
+  if (isolate()->logger()->is_logging() ||
+      isolate()->cpu_profiler()->is_profiling() ||
+      (isolate()->heap_profiler() != NULL &&
+       isolate()->heap_profiler()->is_profiling())) {
+    // If one of the isolates is doing scavenge at this moment of time
+    // it might see this table in an inconsitent state when
+    // some of the callbacks point to
+    // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others
+    // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>.
+    // However this does not lead to any bugs as such isolate does not have
+    // profiling enabled and any isolate with enabled profiling is guaranteed
+    // to see the table in the consistent state.
+    scavenging_visitors_table_.CopyFrom(
+        ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable());
+
+    // We use Release_Store to prevent reordering of this write before writes
+    // to the table.
+    Release_Store(&scavenging_visitors_table_mode_,
+                  LOGGING_AND_PROFILING_ENABLED);
+  }
+}
 
 
 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
@@ -1484,7 +1550,7 @@
   MapWord first_word = object->map_word();
   ASSERT(!first_word.IsForwardingAddress());
   Map* map = first_word.ToMap();
-  ScavengingVisitor::Scavenge(map, p, object);
+  DoScavengeObject(map, p, object);
 }
 
 
@@ -4757,10 +4823,10 @@
   gc_initializer_mutex->Lock();
   static bool initialized_gc = false;
   if (!initialized_gc) {
-      initialized_gc = true;
-      ScavengingVisitor::Initialize();
-      NewSpaceScavenger::Initialize();
-      MarkCompactCollector::Initialize();
+    initialized_gc = true;
+    InitializeScavengingVisitorsTables();
+    NewSpaceScavenger::Initialize();
+    MarkCompactCollector::Initialize();
   }
   gc_initializer_mutex->Unlock();