Version 3.16.6

Made the Isolate parameter mandatory in Locker and Unlocker classes. (issue 2487)

Avoid pointer underflow in CopyCharsUnsigned. (issue 2493)

Generate shim headers when using system v8. (Chromium issue 165264)

Fixed arguments materialization for inlined apply(). (issue 2489)

Sync'ed laziness between BuildFunctionInfo and MakeFunctionInfo. (Chromium issue 147497)

Added sanity check to CodeFlusher::AddCandidate. (Chromium issue 169209)

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@13439 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index e9d7509..c8fae64 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -4159,11 +4159,6 @@
     }
     DeoptimizeIf(below_equal, instr->environment());
   } else {
-    if (instr->hydrogen()->index()->representation().IsTagged() &&
-        !instr->hydrogen()->index()->type().IsSmi()) {
-      __ test(ToRegister(instr->index()), Immediate(kSmiTagMask));
-      DeoptimizeIf(not_zero, instr->environment());
-    }
     __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
     DeoptimizeIf(above_equal, instr->environment());
   }
@@ -5300,6 +5295,8 @@
   Handle<FixedArray> literals(instr->environment()->closure()->literals());
   ElementsKind boilerplate_elements_kind =
       instr->hydrogen()->boilerplate_elements_kind();
+  AllocationSiteMode allocation_site_mode =
+      instr->hydrogen()->allocation_site_mode();
 
   // Deopt if the array literal boilerplate ElementsKind is of a type different
   // than the expected one. The check isn't necessary if the boilerplate has
@@ -5330,7 +5327,7 @@
     ASSERT(instr->hydrogen()->depth() == 1);
     FastCloneShallowArrayStub::Mode mode =
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
-    FastCloneShallowArrayStub stub(mode, length);
+    FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   } else if (instr->hydrogen()->depth() > 1) {
     CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@@ -5339,9 +5336,9 @@
   } else {
     FastCloneShallowArrayStub::Mode mode =
         boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
-            ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
-            : FastCloneShallowArrayStub::CLONE_ELEMENTS;
-    FastCloneShallowArrayStub stub(mode, length);
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   }
 }
@@ -5350,10 +5347,14 @@
 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
                             Register result,
                             Register source,
-                            int* offset) {
+                            int* offset,
+                            AllocationSiteMode mode) {
   ASSERT(!source.is(ecx));
   ASSERT(!result.is(ecx));
 
+  bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
+      object->map()->CanTrackAllocationSite();
+
   if (FLAG_debug_code) {
     __ LoadHeapObject(ecx, object);
     __ cmp(source, ecx);
@@ -5376,8 +5377,13 @@
   // this object and its backing store.
   int object_offset = *offset;
   int object_size = object->map()->instance_size();
-  int elements_offset = *offset + object_size;
   int elements_size = has_elements ? elements->Size() : 0;
+  int elements_offset = *offset + object_size;
+  if (create_allocation_site_info) {
+    elements_offset += AllocationSiteInfo::kSize;
+    *offset += AllocationSiteInfo::kSize;
+  }
+
   *offset += object_size + elements_size;
 
   // Copy object header.
@@ -5402,7 +5408,8 @@
       __ lea(ecx, Operand(result, *offset));
       __ mov(FieldOperand(result, total_offset), ecx);
       __ LoadHeapObject(source, value_object);
-      EmitDeepCopy(value_object, result, source, offset);
+      EmitDeepCopy(value_object, result, source, offset,
+                   DONT_TRACK_ALLOCATION_SITE);
     } else if (value->IsHeapObject()) {
       __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
       __ mov(FieldOperand(result, total_offset), ecx);
@@ -5411,6 +5418,14 @@
     }
   }
 
+  // Build Allocation Site Info if desired
+  if (create_allocation_site_info) {
+    __ mov(FieldOperand(result, object_size),
+           Immediate(Handle<Map>(isolate()->heap()->
+                                 allocation_site_info_map())));
+    __ mov(FieldOperand(result, object_size + kPointerSize), source);
+  }
+
   if (has_elements) {
     // Copy elements backing store header.
     __ LoadHeapObject(source, elements);
@@ -5443,7 +5458,8 @@
           __ lea(ecx, Operand(result, *offset));
           __ mov(FieldOperand(result, total_offset), ecx);
           __ LoadHeapObject(source, value_object);
-          EmitDeepCopy(value_object, result, source, offset);
+          EmitDeepCopy(value_object, result, source, offset,
+                       DONT_TRACK_ALLOCATION_SITE);
         } else if (value->IsHeapObject()) {
           __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
           __ mov(FieldOperand(result, total_offset), ecx);
@@ -5493,7 +5509,8 @@
   __ bind(&allocated);
   int offset = 0;
   __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
-  EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset);
+  EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset,
+               instr->hydrogen()->allocation_site_mode());
   ASSERT_EQ(size, offset);
 }
 
@@ -5758,6 +5775,11 @@
 }
 
 
+void LCodeGen::DoDummyUse(LDummyUse* instr) {
+  // Nothing to see here, move on!
+}
+
+
 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
   LOperand* obj = instr->object();
   LOperand* key = instr->key();