Version 3.13.4

Print reason for disabling optimization. Kill --trace-bailout flag.

Provided option to disable full DEBUG build on Android.

Introduced global contexts to represent lexical global scope(s).

Fixed rounding in Uint8ClampedArray setter. (issue 2294)

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@12394 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/heap.cc b/src/heap.cc
index 8fc0253..694d9fd 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -175,7 +175,7 @@
   }
 
   memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
-  global_contexts_list_ = NULL;
+  native_contexts_list_ = NULL;
   mark_compact_collector_.heap_ = this;
   external_string_table_.heap_ = this;
   // Put a dummy entry in the remembered pages so we can find the list the
@@ -755,7 +755,7 @@
 void Heap::ClearJSFunctionResultCaches() {
   if (isolate_->bootstrapper()->IsActive()) return;
 
-  Object* context = global_contexts_list_;
+  Object* context = native_contexts_list_;
   while (!context->IsUndefined()) {
     // Get the caches for this context. GC can happen when the context
     // is not fully initialized, so the caches can be undefined.
@@ -782,7 +782,7 @@
     return;
   }
 
-  Object* context = global_contexts_list_;
+  Object* context = native_contexts_list_;
   while (!context->IsUndefined()) {
     // GC can happen when the context is not fully initialized,
     // so the cache can be undefined.
@@ -1295,8 +1295,8 @@
     }
   }
 
-  // Scavenge object reachable from the global contexts list directly.
-  scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
+  // Scavenge object reachable from the native contexts list directly.
+  scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
 
   new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
   isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
@@ -1456,7 +1456,7 @@
   Object* undefined = undefined_value();
   Object* head = undefined;
   Context* tail = NULL;
-  Object* candidate = global_contexts_list_;
+  Object* candidate = native_contexts_list_;
 
   // We don't record weak slots during marking or scavenges.
   // Instead we do it once when we complete mark-compact cycle.
@@ -1529,7 +1529,7 @@
   }
 
   // Update the head of the list of contexts.
-  global_contexts_list_ = head;
+  native_contexts_list_ = head;
 }
 
 
@@ -1655,7 +1655,7 @@
     table_.Register(kVisitFixedArray, &EvacuateFixedArray);
     table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
 
-    table_.Register(kVisitGlobalContext,
+    table_.Register(kVisitNativeContext,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
                         template VisitSpecialized<Context::kSize>);
 
@@ -2049,9 +2049,8 @@
 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
                                       int instance_size) {
   Object* result;
-  { MaybeObject* maybe_result = AllocateRawMap();
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  MaybeObject* maybe_result = AllocateRawMap();
+  if (!maybe_result->ToObject(&result)) return maybe_result;
 
   // Map::cast cannot be used due to uninitialized map field.
   reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
@@ -2064,8 +2063,7 @@
   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
   reinterpret_cast<Map*>(result)->set_bit_field(0);
   reinterpret_cast<Map*>(result)->set_bit_field2(0);
-  reinterpret_cast<Map*>(result)->set_bit_field3(
-      Map::LastAddedBits::encode(Map::kNoneAdded));
+  reinterpret_cast<Map*>(result)->set_bit_field3(0);
   return result;
 }
 
@@ -2092,7 +2090,7 @@
   map->set_unused_property_fields(0);
   map->set_bit_field(0);
   map->set_bit_field2(1 << Map::kIsExtensible);
-  map->set_bit_field3(Map::LastAddedBits::encode(Map::kNoneAdded));
+  map->set_bit_field3(0);
   map->set_elements_kind(elements_kind);
 
   // If the map object is aligned fill the padding area with Smi 0 objects.
@@ -2137,8 +2135,7 @@
   { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
     if (!maybe_info->To(&info)) return maybe_info;
   }
-  info->set_ic_total_count(0);
-  info->set_ic_with_type_info_count(0);
+  info->initialize_storage();
   info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
                                 SKIP_WRITE_BARRIER);
   return info;
@@ -2441,10 +2438,16 @@
         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
     if (!maybe_obj->ToObject(&obj)) return false;
   }
-  Map* global_context_map = Map::cast(obj);
-  global_context_map->set_dictionary_map(true);
-  global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
-  set_global_context_map(global_context_map);
+  set_global_context_map(Map::cast(obj));
+
+  { MaybeObject* maybe_obj =
+        AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+    if (!maybe_obj->ToObject(&obj)) return false;
+  }
+  Map* native_context_map = Map::cast(obj);
+  native_context_map->set_dictionary_map(true);
+  native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext);
+  set_native_context_map(native_context_map);
 
   { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
                                          SharedFunctionInfo::kAlignedSize);
@@ -3723,7 +3726,7 @@
   // from the function's context, since the function can be from a
   // different context.
   JSFunction* object_function =
-      function->context()->global_context()->object_function();
+      function->context()->native_context()->object_function();
 
   // Each function prototype gets a copy of the object function map.
   // This avoid unwanted sharing of maps between prototypes of different
@@ -3773,12 +3776,12 @@
       !JSFunction::cast(callee)->shared()->is_classic_mode();
   if (strict_mode_callee) {
     boilerplate =
-        isolate()->context()->global_context()->
+        isolate()->context()->native_context()->
             strict_mode_arguments_boilerplate();
     arguments_object_size = kArgumentsObjectSizeStrict;
   } else {
     boilerplate =
-        isolate()->context()->global_context()->arguments_boilerplate();
+        isolate()->context()->native_context()->arguments_boilerplate();
     arguments_object_size = kArgumentsObjectSize;
   }
 
@@ -3884,7 +3887,7 @@
         FieldDescriptor field(name, i, NONE, i + 1);
         descriptors->Set(i, &field, witness);
       }
-      descriptors->Sort(witness);
+      descriptors->Sort();
 
       // The descriptors may contain duplicates because the compiler does not
       // guarantee the uniqueness of property names (it would have required
@@ -4156,8 +4159,9 @@
   for (int i = 0; i < descs->number_of_descriptors(); i++) {
     PropertyDetails details = descs->GetDetails(i);
     ASSERT(details.type() == CALLBACKS);  // Only accessors are expected.
-    PropertyDetails d =
-        PropertyDetails(details.attributes(), CALLBACKS, details.index());
+    PropertyDetails d = PropertyDetails(details.attributes(),
+                                        CALLBACKS,
+                                        details.descriptor_index());
     Object* value = descs->GetCallbacksObject(i);
     MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
     if (!maybe_value->ToObject(&value)) return maybe_value;
@@ -4309,7 +4313,7 @@
     map->set_function_with_prototype(true);
     InitializeFunction(JSFunction::cast(object), shared, the_hole_value());
     JSFunction::cast(object)->set_context(
-        isolate()->context()->global_context());
+        isolate()->context()->native_context());
   }
 
   // Put in filler if the new object is smaller than the old.
@@ -4604,10 +4608,10 @@
 MaybeObject* Heap::AllocateJSArray(
     ElementsKind elements_kind,
     PretenureFlag pretenure) {
-  Context* global_context = isolate()->context()->global_context();
-  JSFunction* array_function = global_context->array_function();
+  Context* native_context = isolate()->context()->native_context();
+  JSFunction* array_function = native_context->array_function();
   Map* map = array_function->initial_map();
-  Object* maybe_map_array = global_context->js_array_maps();
+  Object* maybe_map_array = native_context->js_array_maps();
   if (!maybe_map_array->IsUndefined()) {
     Object* maybe_transitioned_map =
         FixedArray::cast(maybe_map_array)->get(elements_kind);
@@ -4890,18 +4894,35 @@
 }
 
 
-MaybeObject* Heap::AllocateGlobalContext() {
+MaybeObject* Heap::AllocateNativeContext() {
   Object* result;
   { MaybeObject* maybe_result =
-        AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
+        AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS);
+    if (!maybe_result->ToObject(&result)) return maybe_result;
+  }
+  Context* context = reinterpret_cast<Context*>(result);
+  context->set_map_no_write_barrier(native_context_map());
+  context->set_js_array_maps(undefined_value());
+  ASSERT(context->IsNativeContext());
+  ASSERT(result->IsContext());
+  return result;
+}
+
+
+MaybeObject* Heap::AllocateGlobalContext(JSFunction* function,
+                                         ScopeInfo* scope_info) {
+  Object* result;
+  { MaybeObject* maybe_result =
+        AllocateFixedArray(scope_info->ContextLength(), TENURED);
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Context* context = reinterpret_cast<Context*>(result);
   context->set_map_no_write_barrier(global_context_map());
-  context->set_js_array_maps(undefined_value());
+  context->set_closure(function);
+  context->set_extension(scope_info);
   ASSERT(context->IsGlobalContext());
   ASSERT(result->IsContext());
-  return result;
+  return context;
 }
 
 
@@ -4930,7 +4951,7 @@
   context->set_closure(function);
   context->set_previous(function->context());
   context->set_extension(Smi::FromInt(0));
-  context->set_global(function->context()->global());
+  context->set_global_object(function->context()->global_object());
   return context;
 }
 
@@ -4950,7 +4971,7 @@
   context->set_closure(function);
   context->set_previous(previous);
   context->set_extension(name);
-  context->set_global(previous->global());
+  context->set_global_object(previous->global_object());
   context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
   return context;
 }
@@ -4968,7 +4989,7 @@
   context->set_closure(function);
   context->set_previous(previous);
   context->set_extension(extension);
-  context->set_global(previous->global());
+  context->set_global_object(previous->global_object());
   return context;
 }
 
@@ -4986,7 +5007,7 @@
   context->set_closure(function);
   context->set_previous(previous);
   context->set_extension(scope_info);
-  context->set_global(previous->global());
+  context->set_global_object(previous->global_object());
   return context;
 }
 
@@ -6163,7 +6184,7 @@
     // Create initial objects
     if (!CreateInitialObjects()) return false;
 
-    global_contexts_list_ = undefined_value();
+    native_contexts_list_ = undefined_value();
   }
 
   LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
@@ -6690,8 +6711,8 @@
 }
 
 
-static bool SafeIsGlobalContext(HeapObject* obj) {
-  return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map();
+static bool SafeIsNativeContext(HeapObject* obj) {
+  return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
 }
 
 
@@ -6713,7 +6734,7 @@
     return;
   }
 
-  bool is_global_context = SafeIsGlobalContext(obj);
+  bool is_native_context = SafeIsNativeContext(obj);
 
   // not visited yet
   Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
@@ -6723,7 +6744,7 @@
   obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
 
   // Scan the object body.
-  if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
+  if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
     // This is specialized to scan Context's properly.
     Object** start = reinterpret_cast<Object**>(obj->address() +
                                                 Context::kHeaderSize);