Push version 1.2.9 to trunk.

Improved math performance on ARM.

Fixed profiler name-inference bug.

Fixed handling of shared libraries in the profiler tick processor scripts.

Fixed handling of tests that time out in the test scripts.

Fixed compilation on MacOS X version 10.4.

Fixed two bugs in the regular expression engine.

Fixed a bug in the string type inference.

Fixed a bug in the handling of 'constant function' properties.
        
Improved overall performance.


git-svn-id: http://v8.googlecode.com/svn/trunk@2249 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/heap.cc b/src/heap.cc
index eb70f21..2573be8 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -500,7 +500,9 @@
 void Heap::MarkCompactPrologue(bool is_compacting) {
   // At any old GC clear the keyed lookup cache to enable collection of unused
   // maps.
-  ClearKeyedLookupCache();
+  KeyedLookupCache::Clear();
+  ContextSlotCache::Clear();
+  DescriptorLookupCache::Clear();
 
   CompilationCache::MarkCompactPrologue();
 
@@ -629,6 +631,9 @@
   // Implements Cheney's copying algorithm
   LOG(ResourceEvent("scavenge", "begin"));
 
+  // Clear descriptor cache.
+  DescriptorLookupCache::Clear();
+
   // Used for updating survived_since_last_expansion_ at function end.
   int survived_watermark = PromotedSpaceSize();
 
@@ -943,17 +948,15 @@
 
   // If the object should be promoted, we try to copy it to old space.
   if (ShouldBePromoted(object->address(), object_size)) {
-    OldSpace* target_space = Heap::TargetSpace(object);
-    ASSERT(target_space == Heap::old_pointer_space_ ||
-           target_space == Heap::old_data_space_);
-    Object* result = target_space->AllocateRaw(object_size);
-    if (!result->IsFailure()) {
-      HeapObject* target = HeapObject::cast(result);
-      if (target_space == Heap::old_pointer_space_) {
+    Object* result;
+    if (object_size > MaxObjectSizeInPagedSpace()) {
+      result = lo_space_->AllocateRawFixedArray(object_size);
+      if (!result->IsFailure()) {
         // Save the from-space object pointer and its map pointer at the
         // top of the to space to be swept and copied later.  Write the
         // forwarding address over the map word of the from-space
         // object.
+        HeapObject* target = HeapObject::cast(result);
         promotion_queue.insert(object, first_word.ToMap());
         object->set_map_word(MapWord::FromForwardingAddress(target));
 
@@ -964,21 +967,45 @@
         node->set_size(object_size);
 
         *p = target;
-      } else {
-        // Objects promoted to the data space can be copied immediately
-        // and not revisited---we will never sweep that space for
-        // pointers and the copied objects do not contain pointers to
-        // new space objects.
-        *p = MigrateObject(object, target, object_size);
-#ifdef DEBUG
-        VerifyNonPointerSpacePointersVisitor v;
-        (*p)->Iterate(&v);
-#endif
+        return;
       }
-      return;
+    } else {
+      OldSpace* target_space = Heap::TargetSpace(object);
+      ASSERT(target_space == Heap::old_pointer_space_ ||
+             target_space == Heap::old_data_space_);
+      result = target_space->AllocateRaw(object_size);
+      if (!result->IsFailure()) {
+        HeapObject* target = HeapObject::cast(result);
+        if (target_space == Heap::old_pointer_space_) {
+          // Save the from-space object pointer and its map pointer at the
+          // top of the to space to be swept and copied later.  Write the
+          // forwarding address over the map word of the from-space
+          // object.
+          promotion_queue.insert(object, first_word.ToMap());
+          object->set_map_word(MapWord::FromForwardingAddress(target));
+
+          // Give the space allocated for the result a proper map by
+          // treating it as a free list node (not linked into the free
+          // list).
+          FreeListNode* node = FreeListNode::FromAddress(target->address());
+          node->set_size(object_size);
+
+          *p = target;
+        } else {
+          // Objects promoted to the data space can be copied immediately
+          // and not revisited---we will never sweep that space for
+          // pointers and the copied objects do not contain pointers to
+          // new space objects.
+          *p = MigrateObject(object, target, object_size);
+#ifdef DEBUG
+          VerifyNonPointerSpacePointersVisitor v;
+          (*p)->Iterate(&v);
+#endif
+        }
+        return;
+      }
     }
   }
-
   // The object should remain in new space or the old space allocation failed.
   Object* result = new_space_.AllocateRaw(object_size);
   // Failed allocation at this point is utterly unexpected.
@@ -1364,7 +1391,13 @@
   last_script_id_ = undefined_value();
 
   // Initialize keyed lookup cache.
-  ClearKeyedLookupCache();
+  KeyedLookupCache::Clear();
+
+  // Initialize context slot cache.
+  ContextSlotCache::Clear();
+
+  // Initialize descriptor cache.
+  DescriptorLookupCache::Clear();
 
   // Initialize compilation cache.
   CompilationCache::Clear();
@@ -1488,6 +1521,8 @@
   share->set_name(name);
   Code* illegal = Builtins::builtin(Builtins::Illegal);
   share->set_code(illegal);
+  Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
+  share->set_construct_stub(construct_stub);
   share->set_expected_nof_properties(0);
   share->set_length(0);
   share->set_formal_parameter_count(0);
@@ -1698,7 +1733,7 @@
   }
   int size = ByteArray::SizeFor(length);
   AllocationSpace space =
-      size > MaxHeapObjectSize() ? LO_SPACE : OLD_DATA_SPACE;
+      size > MaxObjectSizeInPagedSpace() ? LO_SPACE : OLD_DATA_SPACE;
 
   Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
 
@@ -1713,7 +1748,7 @@
 Object* Heap::AllocateByteArray(int length) {
   int size = ByteArray::SizeFor(length);
   AllocationSpace space =
-      size > MaxHeapObjectSize() ? LO_SPACE : NEW_SPACE;
+      size > MaxObjectSizeInPagedSpace() ? LO_SPACE : NEW_SPACE;
 
   Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
 
@@ -1748,7 +1783,7 @@
   int obj_size = Code::SizeFor(body_size, sinfo_size);
   ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
   Object* result;
-  if (obj_size > MaxHeapObjectSize()) {
+  if (obj_size > MaxObjectSizeInPagedSpace()) {
     result = lo_space_->AllocateRawCode(obj_size);
   } else {
     result = code_space_->AllocateRaw(obj_size);
@@ -1788,7 +1823,7 @@
   // Allocate an object the same size as the code object.
   int obj_size = code->Size();
   Object* result;
-  if (obj_size > MaxHeapObjectSize()) {
+  if (obj_size > MaxObjectSizeInPagedSpace()) {
     result = lo_space_->AllocateRawCode(obj_size);
   } else {
     result = code_space_->AllocateRaw(obj_size);
@@ -1963,7 +1998,7 @@
   // Allocate the JSObject.
   AllocationSpace space =
       (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
-  if (map->instance_size() > MaxHeapObjectSize()) space = LO_SPACE;
+  if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
   Object* obj = Allocate(map, space);
   if (obj->IsFailure()) return obj;
 
@@ -2250,7 +2285,7 @@
 
   // Allocate string.
   AllocationSpace space =
-      (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_DATA_SPACE;
+      (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_DATA_SPACE;
   Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
   if (result->IsFailure()) return result;
 
@@ -2272,13 +2307,16 @@
 Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
   int size = SeqAsciiString::SizeFor(length);
-  if (size > MaxHeapObjectSize()) {
-    space = LO_SPACE;
-  }
 
-  // Use AllocateRaw rather than Allocate because the object's size cannot be
-  // determined from the map.
-  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
+  Object* result = Failure::OutOfMemoryException();
+  if (space == NEW_SPACE) {
+    result = size <= kMaxObjectSizeInNewSpace
+        ? new_space_.AllocateRaw(size)
+        : lo_space_->AllocateRawFixedArray(size);
+  } else {
+    if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
+    result = AllocateRaw(size, space, OLD_DATA_SPACE);
+  }
   if (result->IsFailure()) return result;
 
   // Determine the map based on the string's length.
@@ -2302,13 +2340,16 @@
 Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) {
   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
   int size = SeqTwoByteString::SizeFor(length);
-  if (size > MaxHeapObjectSize()) {
-    space = LO_SPACE;
-  }
 
-  // Use AllocateRaw rather than Allocate because the object's size cannot be
-  // determined from the map.
-  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
+  Object* result = Failure::OutOfMemoryException();
+  if (space == NEW_SPACE) {
+    result = size <= kMaxObjectSizeInNewSpace
+        ? new_space_.AllocateRaw(size)
+        : lo_space_->AllocateRawFixedArray(size);
+  } else {
+    if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
+    result = AllocateRaw(size, space, OLD_DATA_SPACE);
+  }
   if (result->IsFailure()) return result;
 
   // Determine the map based on the string's length.
@@ -2345,9 +2386,9 @@
   if (always_allocate()) return AllocateFixedArray(length, NOT_TENURED);
   // Allocate the raw data for a fixed array.
   int size = FixedArray::SizeFor(length);
-  return (size > MaxHeapObjectSize())
-      ? lo_space_->AllocateRawFixedArray(size)
-      : new_space_.AllocateRaw(size);
+  return size <= kMaxObjectSizeInNewSpace
+      ? new_space_.AllocateRaw(size)
+      : lo_space_->AllocateRawFixedArray(size);
 }
 
 
@@ -2395,16 +2436,22 @@
   if (length == 0) return empty_fixed_array();
 
   int size = FixedArray::SizeFor(length);
-  Object* result;
-  if (size > MaxHeapObjectSize()) {
-    result = lo_space_->AllocateRawFixedArray(size);
-  } else {
-    AllocationSpace space =
-        (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
-    result = AllocateRaw(size, space, OLD_POINTER_SPACE);
+  Object* result = Failure::OutOfMemoryException();
+  if (pretenure != TENURED) {
+    result = size <= kMaxObjectSizeInNewSpace
+        ? new_space_.AllocateRaw(size)
+        : lo_space_->AllocateRawFixedArray(size);
   }
-  if (result->IsFailure()) return result;
-
+  if (result->IsFailure()) {
+    if (size > MaxObjectSizeInPagedSpace()) {
+      result = lo_space_->AllocateRawFixedArray(size);
+    } else {
+      AllocationSpace space =
+          (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
+      result = AllocateRaw(size, space, OLD_POINTER_SPACE);
+    }
+    if (result->IsFailure()) return result;
+  }
   // Initialize the object.
   reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
   FixedArray* array = FixedArray::cast(result);
@@ -2504,7 +2551,7 @@
   }
   int size = map->instance_size();
   AllocationSpace space =
-      (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_POINTER_SPACE;
+      (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
   Object* result = Heap::Allocate(map, space);
   if (result->IsFailure()) return result;
   Struct::cast(result)->InitializeBody(size);
@@ -3478,6 +3525,58 @@
 }
 
 
+int KeyedLookupCache::Hash(Map* map, String* name) {
+  // Uses only lower 32 bits if pointers are larger.
+  uintptr_t addr_hash =
+      static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> 2;
+  return (addr_hash ^ name->Hash()) % kLength;
+}
+
+
+int KeyedLookupCache::Lookup(Map* map, String* name) {
+  int index = Hash(map, name);
+  Key& key = keys_[index];
+  if ((key.map == map) && key.name->Equals(name)) {
+    return field_offsets_[index];
+  }
+  return -1;
+}
+
+
+void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
+  String* symbol;
+  if (Heap::LookupSymbolIfExists(name, &symbol)) {
+    int index = Hash(map, symbol);
+    Key& key = keys_[index];
+    key.map = map;
+    key.name = symbol;
+    field_offsets_[index] = field_offset;
+  }
+}
+
+
+void KeyedLookupCache::Clear() {
+  for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
+}
+
+
+KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
+
+
+int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
+
+
+void DescriptorLookupCache::Clear() {
+  for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
+}
+
+
+DescriptorLookupCache::Key
+DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
+
+int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
+
+
 #ifdef DEBUG
 bool Heap::GarbageCollectionGreedyCheck() {
   ASSERT(FLAG_gc_greedy);