Version 3.16.10

Avoid excessive memory usage during redundant phi elimination. (issue 2510)

Fixed additional spec violations wrt RegExp.lastIndex. (issue 2437)

Added Isolate parameter to Persistent class. (issue 2487)

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@13508 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index 1561f24..d3d472d 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -626,95 +626,34 @@
 }
 
 
-i::Object** V8::GlobalizeReference(i::Object** obj) {
-  i::Isolate* isolate = i::Isolate::Current();
+i::Object** V8::GlobalizeReference(i::Isolate* isolate, i::Object** obj) {
   if (IsDeadCheck(isolate, "V8::Persistent::New")) return NULL;
   LOG_API(isolate, "Persistent::New");
-  i::Handle<i::Object> result =
-      isolate->global_handles()->Create(*obj);
+  i::Handle<i::Object> result = isolate->global_handles()->Create(*obj);
   return result.location();
 }
 
 
-void V8::MakeWeak(i::Object** object, void* parameters,
-                  WeakReferenceCallback callback) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "MakeWeak");
-  isolate->global_handles()->MakeWeak(object, parameters,
-                                      callback);
-}
-
-
-void V8::MakeWeak(i::Isolate* isolate, i::Object** object,
-                  void* parameters, WeakReferenceCallback callback) {
+void V8::MakeWeak(i::Isolate* isolate,
+                  i::Object** object,
+                  void* parameters,
+                  WeakReferenceCallback weak_reference_callback,
+                  NearDeathCallback near_death_callback) {
   ASSERT(isolate == i::Isolate::Current());
   LOG_API(isolate, "MakeWeak");
-  isolate->global_handles()->MakeWeak(object, parameters,
-                                      callback);
+  isolate->global_handles()->MakeWeak(object,
+                                      parameters,
+                                      weak_reference_callback,
+                                      near_death_callback);
 }
 
 
-void V8::ClearWeak(i::Object** obj) {
-  i::Isolate* isolate = i::Isolate::Current();
+void V8::ClearWeak(i::Isolate* isolate, i::Object** obj) {
   LOG_API(isolate, "ClearWeak");
   isolate->global_handles()->ClearWeakness(obj);
 }
 
 
-void V8::MarkIndependent(i::Object** object) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "MarkIndependent");
-  isolate->global_handles()->MarkIndependent(object);
-}
-
-
-void V8::MarkPartiallyDependent(i::Object** object) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "MarkPartiallyDependent");
-  isolate->global_handles()->MarkPartiallyDependent(object);
-}
-
-
-bool V8::IsGlobalIndependent(i::Object** obj) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "IsGlobalIndependent");
-  if (!isolate->IsInitialized()) return false;
-  return i::GlobalHandles::IsIndependent(obj);
-}
-
-
-bool V8::IsGlobalNearDeath(i::Object** obj) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "IsGlobalNearDeath");
-  if (!isolate->IsInitialized()) return false;
-  return i::GlobalHandles::IsNearDeath(obj);
-}
-
-
-bool V8::IsGlobalWeak(i::Object** obj) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "IsGlobalWeak");
-  if (!isolate->IsInitialized()) return false;
-  return i::GlobalHandles::IsWeak(obj);
-}
-
-
-bool V8::IsGlobalWeak(i::Isolate* isolate, i::Object** obj) {
-  ASSERT(isolate == i::Isolate::Current());
-  LOG_API(isolate, "IsGlobalWeak");
-  if (!isolate->IsInitialized()) return false;
-  return i::GlobalHandles::IsWeak(obj);
-}
-
-
-void V8::DisposeGlobal(i::Object** obj) {
-  i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "DisposeGlobal");
-  if (!isolate->IsInitialized()) return;
-  isolate->global_handles()->Destroy(obj);
-}
-
-
 void V8::DisposeGlobal(i::Isolate* isolate, i::Object** obj) {
   ASSERT(isolate == i::Isolate::Current());
   LOG_API(isolate, "DisposeGlobal");
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 780eca3..8889e75 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -4601,6 +4601,76 @@
 }
 
 
+void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
+  // This accepts as a receiver anything JSArray::SetElementsLength accepts
+  // (currently anything except for external arrays which means anything with
+  // elements of FixedArray type).  Value must be a number, but only smis are
+  // accepted as the most common case.
+  Label miss;
+
+  Register receiver;
+  Register value;
+  if (kind() == Code::KEYED_STORE_IC) {
+    // ----------- S t a t e -------------
+    //  -- lr    : return address
+    //  -- r0    : value
+    //  -- r1    : key
+    //  -- r2    : receiver
+    // -----------------------------------
+    __ cmp(r1, Operand(masm->isolate()->factory()->length_symbol()));
+    __ b(ne, &miss);
+    receiver = r2;
+    value = r0;
+  } else {
+    ASSERT(kind() == Code::STORE_IC);
+    // ----------- S t a t e -------------
+    //  -- lr    : return address
+    //  -- r0    : value
+    //  -- r1    : receiver
+    //  -- r2    : key
+    // -----------------------------------
+    receiver = r1;
+    value = r0;
+  }
+  Register scratch = r3;
+
+  // Check that the receiver isn't a smi.
+  __ JumpIfSmi(receiver, &miss);
+
+  // Check that the object is a JS array.
+  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
+  __ b(ne, &miss);
+
+  // Check that elements are FixedArray.
+  // We rely on StoreIC_ArrayLength below to deal with all types of
+  // fast elements (including COW).
+  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
+  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
+  __ b(ne, &miss);
+
+  // Check that the array has fast properties, otherwise the length
+  // property might have been redefined.
+  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
+  __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
+  __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
+  __ b(eq, &miss);
+
+  // Check that value is a smi.
+  __ JumpIfNotSmi(value, &miss);
+
+  // Prepare tail call to StoreIC_ArrayLength.
+  __ Push(receiver, value);
+
+  ExternalReference ref =
+      ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
+  __ TailCallExternalReference(ref, 2, 1);
+
+  __ bind(&miss);
+
+  StubCompiler::GenerateStoreMiss(masm, kind());
+}
+
+
 Register InstanceofStub::left() { return r0; }
 
 
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 1867baf..4a5d920 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -1538,62 +1538,6 @@
 }
 
 
-void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
-  // ----------- S t a t e -------------
-  //  -- r0    : value
-  //  -- r1    : receiver
-  //  -- r2    : name
-  //  -- lr    : return address
-  // -----------------------------------
-  //
-  // This accepts as a receiver anything JSArray::SetElementsLength accepts
-  // (currently anything except for external arrays which means anything with
-  // elements of FixedArray type).  Value must be a number, but only smis are
-  // accepted as the most common case.
-
-  Label miss;
-
-  Register receiver = r1;
-  Register value = r0;
-  Register scratch = r3;
-
-  // Check that the receiver isn't a smi.
-  __ JumpIfSmi(receiver, &miss);
-
-  // Check that the object is a JS array.
-  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
-  __ b(ne, &miss);
-
-  // Check that elements are FixedArray.
-  // We rely on StoreIC_ArrayLength below to deal with all types of
-  // fast elements (including COW).
-  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
-  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
-  __ b(ne, &miss);
-
-  // Check that the array has fast properties, otherwise the length
-  // property might have been redefined.
-  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
-  __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
-  __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
-  __ b(eq, &miss);
-
-  // Check that value is a smi.
-  __ JumpIfNotSmi(value, &miss);
-
-  // Prepare tail call to StoreIC_ArrayLength.
-  __ Push(receiver, value);
-
-  ExternalReference ref =
-      ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
-  __ TailCallExternalReference(ref, 2, 1);
-
-  __ bind(&miss);
-
-  GenerateMiss(masm);
-}
-
-
 void StoreIC::GenerateNormal(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r0    : value
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 30c02da..f513c8a 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -592,6 +592,15 @@
 }
 
 
+void StubCompiler::GenerateStoreMiss(MacroAssembler* masm, Code::Kind kind) {
+  ASSERT(kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC);
+  Handle<Code> code = (kind == Code::STORE_IC)
+      ? masm->isolate()->builtins()->StoreIC_Miss()
+      : masm->isolate()->builtins()->KeyedStoreIC_Miss();
+  __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+
 static void GenerateCallFunction(MacroAssembler* masm,
                                  Handle<Object> object,
                                  const ParameterCount& arguments,
diff --git a/src/builtins.cc b/src/builtins.cc
index 72477c0..16e672c 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -1560,16 +1560,6 @@
 }
 
 
-static void Generate_StoreIC_ArrayLength(MacroAssembler* masm) {
-  StoreIC::GenerateArrayLength(masm);
-}
-
-
-static void Generate_StoreIC_ArrayLength_Strict(MacroAssembler* masm) {
-  StoreIC::GenerateArrayLength(masm);
-}
-
-
 static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
   StoreIC::GenerateGlobalProxy(masm, kNonStrictMode);
 }
diff --git a/src/builtins.h b/src/builtins.h
index c874d8b..b53297a 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -154,8 +154,6 @@
                                                                         \
   V(StoreIC_Initialize,             STORE_IC, UNINITIALIZED,            \
                                     Code::kNoExtraICState)              \
-  V(StoreIC_ArrayLength,            STORE_IC, MONOMORPHIC,              \
-                                    Code::kNoExtraICState)              \
   V(StoreIC_Normal,                 STORE_IC, MONOMORPHIC,              \
                                     Code::kNoExtraICState)              \
   V(StoreIC_Megamorphic,            STORE_IC, MEGAMORPHIC,              \
@@ -164,8 +162,6 @@
                                     Code::kNoExtraICState)              \
   V(StoreIC_Initialize_Strict,      STORE_IC, UNINITIALIZED,            \
                                     kStrictMode)                        \
-  V(StoreIC_ArrayLength_Strict,     STORE_IC, MONOMORPHIC,              \
-                                    kStrictMode)                        \
   V(StoreIC_Normal_Strict,          STORE_IC, MONOMORPHIC,              \
                                     kStrictMode)                        \
   V(StoreIC_Megamorphic_Strict,     STORE_IC, MEGAMORPHIC,              \
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 7b1d1b4..2d23ddb 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -98,7 +98,7 @@
 
   // Copy the generated code into a heap object.
   Code::Flags flags = Code::ComputeFlags(
-      static_cast<Code::Kind>(GetCodeKind()), GetICState());
+      static_cast<Code::Kind>(GetCodeKind()), GetICState(), GetExtraICState());
   Handle<Code> new_object = factory->NewCode(
       desc, flags, masm.CodeObject(), NeedsImmovableCode());
   return new_object;
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 7ec43c6..bf0e261 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -50,6 +50,7 @@
   V(ArrayLength)                         \
   V(StringLength)                        \
   V(FunctionPrototype)                   \
+  V(StoreArrayLength)                    \
   V(RecordWrite)                         \
   V(StoreBufferOverflow)                 \
   V(RegExpExec)                          \
@@ -179,6 +180,9 @@
   virtual InlineCacheState GetICState() {
     return UNINITIALIZED;
   }
+  virtual Code::ExtraICState GetExtraICState() {
+    return Code::kNoExtraICState;
+  }
 
   // Returns whether the code generated for this stub needs to be allocated as
   // a fixed (non-moveable) code object.
@@ -605,6 +609,37 @@
 };
 
 
+class StoreICStub: public ICStub {
+ public:
+  StoreICStub(Code::Kind kind, StrictModeFlag strict_mode)
+      : ICStub(kind), strict_mode_(strict_mode) { }
+
+ protected:
+  virtual Code::ExtraICState GetExtraICState() {
+    return strict_mode_;
+  }
+
+ private:
+  class StrictModeBits: public BitField<bool, 4, 1> {};
+  virtual int MinorKey() {
+    return KindBits::encode(kind()) | StrictModeBits::encode(strict_mode_);
+  }
+
+  StrictModeFlag strict_mode_;
+};
+
+
+class StoreArrayLengthStub: public StoreICStub {
+ public:
+  explicit StoreArrayLengthStub(Code::Kind kind, StrictModeFlag strict_mode)
+      : StoreICStub(kind, strict_mode) { }
+  virtual void Generate(MacroAssembler* masm);
+
+ private:
+  virtual CodeStub::Major MajorKey() { return StoreArrayLength; }
+};
+
+
 class BinaryOpStub: public PlatformCodeStub {
  public:
   BinaryOpStub(Token::Value op, OverwriteMode mode)
diff --git a/src/d8.cc b/src/d8.cc
index 407488f..6d63ef1 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -95,14 +95,14 @@
   explicit Symbols(Isolate* isolate) : isolate_(isolate) {
     HandleScope scope;
 #define INIT_SYMBOL(name, value) \
-    name##_ = Persistent<String>::New(String::NewSymbol(value));
+    name##_ = Persistent<String>::New(isolate, String::NewSymbol(value));
     FOR_EACH_SYMBOL(INIT_SYMBOL)
 #undef INIT_SYMBOL
     isolate->SetData(this);
   }
 
   ~Symbols() {
-#define DISPOSE_SYMBOL(name, value) name##_.Dispose();
+#define DISPOSE_SYMBOL(name, value) name##_.Dispose(isolate_);
     FOR_EACH_SYMBOL(DISPOSE_SYMBOL)
 #undef DISPOSE_SYMBOL
     isolate_->SetData(NULL);  // Not really needed, just to be sure...
@@ -399,9 +399,10 @@
   memset(data, 0, length);
 
   buffer->SetHiddenValue(Symbols::ArrayBufferMarkerPropName(isolate), True());
-  Persistent<Object> persistent_array = Persistent<Object>::New(buffer);
-  persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
-  persistent_array.MarkIndependent();
+  Persistent<Object> persistent_array =
+      Persistent<Object>::New(isolate, buffer);
+  persistent_array.MakeWeak(isolate, data, ExternalArrayWeakCallback);
+  persistent_array.MarkIndependent(isolate);
   V8::AdjustAmountOfExternalAllocatedMemory(length);
 
   buffer->SetIndexedPropertiesToExternalArrayData(
@@ -826,14 +827,15 @@
 }
 
 
-void Shell::ExternalArrayWeakCallback(Persistent<Value> object, void* data) {
+void Shell::ExternalArrayWeakCallback(v8::Isolate* isolate,
+                                      Persistent<Value> object,
+                                      void* data) {
   HandleScope scope;
-  Isolate* isolate = Isolate::GetCurrent();
   int32_t length =
       object->ToObject()->Get(Symbols::byteLength(isolate))->Uint32Value();
   V8::AdjustAmountOfExternalAllocatedMemory(-length);
   delete[] static_cast<uint8_t*>(data);
-  object.Dispose();
+  object.Dispose(isolate);
 }
 
 
@@ -1442,9 +1444,10 @@
   Isolate* isolate = args.GetIsolate();
   Handle<Object> buffer = Object::New();
   buffer->SetHiddenValue(Symbols::ArrayBufferMarkerPropName(isolate), True());
-  Persistent<Object> persistent_buffer = Persistent<Object>::New(buffer);
-  persistent_buffer.MakeWeak(data, ExternalArrayWeakCallback);
-  persistent_buffer.MarkIndependent();
+  Persistent<Object> persistent_buffer =
+      Persistent<Object>::New(isolate, buffer);
+  persistent_buffer.MakeWeak(isolate, data, ExternalArrayWeakCallback);
+  persistent_buffer.MarkIndependent(isolate);
   V8::AdjustAmountOfExternalAllocatedMemory(length);
 
   buffer->SetIndexedPropertiesToExternalArrayData(
@@ -1565,7 +1568,7 @@
       Shell::ExecuteString(str, String::New(filename), false, false);
     }
 
-    thread_context.Dispose();
+    thread_context.Dispose(thread_context->GetIsolate());
     ptr = next_line;
   }
 }
@@ -1649,7 +1652,7 @@
         Context::Scope cscope(context);
         Execute(isolate);
       }
-      context.Dispose();
+      context.Dispose(isolate);
       if (Shell::options.send_idle_notification) {
         const int kLongIdlePauseInMs = 1000;
         V8::ContextDisposedNotification();
@@ -1858,7 +1861,7 @@
       options.isolate_sources[0].Execute(isolate);
     }
     if (!options.last_run) {
-      context.Dispose();
+      context.Dispose(isolate);
       if (options.send_idle_notification) {
         const int kLongIdlePauseInMs = 1000;
         V8::ContextDisposedNotification();
diff --git a/src/d8.h b/src/d8.h
index 63efd66..1c3ce0a 100644
--- a/src/d8.h
+++ b/src/d8.h
@@ -404,7 +404,9 @@
   static Handle<Value> CreateExternalArray(const Arguments& args,
                                            ExternalArrayType type,
                                            int32_t element_size);
-  static void ExternalArrayWeakCallback(Persistent<Value> object, void* data);
+  static void ExternalArrayWeakCallback(Isolate* isolate,
+                                        Persistent<Value> object,
+                                        void* data);
 };
 
 
diff --git a/src/debug.cc b/src/debug.cc
index 7baed88..866839d 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -617,10 +617,10 @@
   Handle<Script> script_ =
       Handle<Script>::cast(
           (global_handles->Create(*script)));
-  global_handles->MakeWeak(
-      reinterpret_cast<Object**>(script_.location()),
-      this,
-      ScriptCache::HandleWeakScript);
+  global_handles->MakeWeak(reinterpret_cast<Object**>(script_.location()),
+                           this,
+                           NULL,
+                           ScriptCache::HandleWeakScript);
   entry->value = script_.location();
 }
 
@@ -663,7 +663,9 @@
 }
 
 
-void ScriptCache::HandleWeakScript(v8::Persistent<v8::Value> obj, void* data) {
+void ScriptCache::HandleWeakScript(v8::Isolate* isolate,
+                                   v8::Persistent<v8::Value> obj,
+                                   void* data) {
   ScriptCache* script_cache = reinterpret_cast<ScriptCache*>(data);
   // Find the location of the global handle.
   Script** location =
@@ -676,7 +678,7 @@
   script_cache->collected_scripts_.Add(id);
 
   // Clear the weak handle.
-  obj.Dispose();
+  obj.Dispose(isolate);
   obj.Clear();
 }
 
@@ -696,8 +698,10 @@
 }
 
 
-void Debug::HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data) {
-  Debug* debug = Isolate::Current()->debug();
+void Debug::HandleWeakDebugInfo(v8::Isolate* isolate,
+                                v8::Persistent<v8::Value> obj,
+                                void* data) {
+  Debug* debug = reinterpret_cast<Isolate*>(isolate)->debug();
   DebugInfoListNode* node = reinterpret_cast<DebugInfoListNode*>(data);
   // We need to clear all breakpoints associated with the function to restore
   // original code and avoid patching the code twice later because
@@ -721,10 +725,10 @@
   // Globalize the request debug info object and make it weak.
   debug_info_ = Handle<DebugInfo>::cast(
       (global_handles->Create(debug_info)));
-  global_handles->MakeWeak(
-      reinterpret_cast<Object**>(debug_info_.location()),
-      this,
-      Debug::HandleWeakDebugInfo);
+  global_handles->MakeWeak(reinterpret_cast<Object**>(debug_info_.location()),
+                           this,
+                           NULL,
+                           Debug::HandleWeakDebugInfo);
 }
 
 
diff --git a/src/debug.h b/src/debug.h
index 4363df9..0558afd 100644
--- a/src/debug.h
+++ b/src/debug.h
@@ -189,7 +189,9 @@
   void Clear();
 
   // Weak handle callback for scripts in the cache.
-  static void HandleWeakScript(v8::Persistent<v8::Value> obj, void* data);
+  static void HandleWeakScript(v8::Isolate* isolate,
+                               v8::Persistent<v8::Value> obj,
+                               void* data);
 
   // List used during GC to temporarily store id's of collected scripts.
   List<int> collected_scripts_;
@@ -384,7 +386,9 @@
   static const int kEstimatedNofBreakPointsInFunction = 16;
 
   // Passed to MakeWeak.
-  static void HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data);
+  static void HandleWeakDebugInfo(v8::Isolate* isolate,
+                                  v8::Persistent<v8::Value> obj,
+                                  void* data);
 
   friend class Debugger;
   friend Handle<FixedArray> GetDebuggedFunctions();  // In test-debug.cc
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index bc0c61c..3d07ca9 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -456,11 +456,13 @@
 }
 
 
-void Deoptimizer::HandleWeakDeoptimizedCode(v8::Persistent<v8::Value> obj,
+void Deoptimizer::HandleWeakDeoptimizedCode(v8::Isolate* isolate,
+                                            v8::Persistent<v8::Value> obj,
                                             void* parameter) {
   DeoptimizingCodeListNode* node =
       reinterpret_cast<DeoptimizingCodeListNode*>(parameter);
-  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+  DeoptimizerData* data =
+      reinterpret_cast<Isolate*>(isolate)->deoptimizer_data();
   data->RemoveDeoptimizingCode(*node->code());
 #ifdef DEBUG
   for (DeoptimizingCodeListNode* current = data->deoptimizing_code_list_;
@@ -1913,6 +1915,7 @@
   code_ = Handle<Code>::cast(global_handles->Create(code));
   global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()),
                            this,
+                           NULL,
                            Deoptimizer::HandleWeakDeoptimizedCode);
 }
 
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index 870fdb2..0c2252e 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -372,8 +372,9 @@
       MacroAssembler* masm, int count, BailoutType type);
 
   // Weak handle callback for deoptimizing code objects.
-  static void HandleWeakDeoptimizedCode(
-      v8::Persistent<v8::Value> obj, void* data);
+  static void HandleWeakDeoptimizedCode(v8::Isolate* isolate,
+                                        v8::Persistent<v8::Value> obj,
+                                        void* data);
 
   // Deoptimize function assuming that function->next_function_link() points
   // to a list that contains all functions that share the same optimized code.
diff --git a/src/global-handles.cc b/src/global-handles.cc
index 23b3918..51e904e 100644
--- a/src/global-handles.cc
+++ b/src/global-handles.cc
@@ -60,12 +60,16 @@
   }
 
   Node() {
-    ASSERT(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset);
     ASSERT(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset);
-    ASSERT(static_cast<int>(IsIndependent::kShift) ==
-           Internals::kNodeIsIndependentShift);
-    ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
-           Internals::kNodeIsPartiallyDependentShift);
+    ASSERT(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset);
+    STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
+                  Internals::kNodeStateMask);
+    STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
+    STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
+    STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
+                  Internals::kNodeIsIndependentShift);
+    STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
+                  Internals::kNodeIsPartiallyDependentShift);
   }
 
 #ifdef DEBUG
@@ -79,7 +83,8 @@
     set_partially_dependent(false);
     set_in_new_space_list(false);
     parameter_or_next_free_.next_free = NULL;
-    callback_ = NULL;
+    weak_reference_callback_ = NULL;
+    near_death_callback_ = NULL;
   }
 #endif
 
@@ -100,7 +105,8 @@
     set_partially_dependent(false);
     set_state(NORMAL);
     parameter_or_next_free_.parameter = NULL;
-    callback_ = NULL;
+    weak_reference_callback_ = NULL;
+    near_death_callback_ = NULL;
     IncreaseBlockUses(global_handles);
   }
 
@@ -189,7 +195,8 @@
   void clear_partially_dependent() { set_partially_dependent(false); }
 
   // Callback accessor.
-  WeakReferenceCallback callback() { return callback_; }
+  // TODO(svenpanne) Re-enable or nuke later.
+  // WeakReferenceCallback callback() { return callback_; }
 
   // Callback parameter accessors.
   void set_parameter(void* parameter) {
@@ -213,11 +220,13 @@
 
   void MakeWeak(GlobalHandles* global_handles,
                 void* parameter,
-                WeakReferenceCallback callback) {
+                WeakReferenceCallback weak_reference_callback,
+                NearDeathCallback near_death_callback) {
     ASSERT(state() != FREE);
     set_state(WEAK);
     set_parameter(parameter);
-    callback_ = callback;
+    weak_reference_callback_ = weak_reference_callback;
+    near_death_callback_ = near_death_callback;
   }
 
   void ClearWeakness(GlobalHandles* global_handles) {
@@ -229,8 +238,8 @@
   bool PostGarbageCollectionProcessing(Isolate* isolate,
                                        GlobalHandles* global_handles) {
     if (state() != Node::PENDING) return false;
-    WeakReferenceCallback func = callback();
-    if (func == NULL) {
+    if (weak_reference_callback_ == NULL &&
+        near_death_callback_ == NULL) {
       Release(global_handles);
       return false;
     }
@@ -248,7 +257,14 @@
              ExternalTwoByteString::cast(object_)->resource() != NULL);
       // Leaving V8.
       VMState state(isolate, EXTERNAL);
-      func(object, par);
+      if (weak_reference_callback_ != NULL) {
+        weak_reference_callback_(object, par);
+      }
+      if (near_death_callback_ != NULL) {
+        near_death_callback_(reinterpret_cast<v8::Isolate*>(isolate),
+                             object,
+                             par);
+      }
     }
     // Absence of explicit cleanup or revival of weak handle
     // in most of the cases would lead to memory leak.
@@ -284,7 +300,8 @@
   uint8_t flags_;
 
   // Handle specific callback.
-  WeakReferenceCallback callback_;
+  WeakReferenceCallback weak_reference_callback_;
+  NearDeathCallback near_death_callback_;
 
   // Provided data for callback.  In FREE state, this is used for
   // the free list link.
@@ -450,10 +467,16 @@
 }
 
 
-void GlobalHandles::MakeWeak(Object** location, void* parameter,
-                             WeakReferenceCallback callback) {
-  ASSERT(callback != NULL);
-  Node::FromLocation(location)->MakeWeak(this, parameter, callback);
+void GlobalHandles::MakeWeak(Object** location,
+                             void* parameter,
+                             WeakReferenceCallback weak_reference_callback,
+                             NearDeathCallback near_death_callback) {
+  ASSERT((weak_reference_callback != NULL) !=
+         (near_death_callback != NULL));
+  Node::FromLocation(location)->MakeWeak(this,
+                                         parameter,
+                                         weak_reference_callback,
+                                         near_death_callback);
 }
 
 
diff --git a/src/global-handles.h b/src/global-handles.h
index 18bfc11..9900144 100644
--- a/src/global-handles.h
+++ b/src/global-handles.h
@@ -126,7 +126,8 @@
   // reason is that Smi::FromInt(0) does not change during garage collection.
   void MakeWeak(Object** location,
                 void* parameter,
-                WeakReferenceCallback callback);
+                WeakReferenceCallback weak_reference_callback,
+                NearDeathCallback near_death_callback);
 
   void RecordStats(HeapStats* stats);
 
diff --git a/src/handles.cc b/src/handles.cc
index c0dea20..e97dcad 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -350,14 +350,16 @@
 // collector will call the weak callback on the global handle
 // associated with the wrapper and get rid of both the wrapper and the
 // handle.
-static void ClearWrapperCache(Persistent<v8::Value> handle, void*) {
+static void ClearWrapperCache(v8::Isolate* v8_isolate,
+                              Persistent<v8::Value> handle,
+                              void*) {
   Handle<Object> cache = Utils::OpenHandle(*handle);
   JSValue* wrapper = JSValue::cast(*cache);
   Foreign* foreign = Script::cast(wrapper->value())->wrapper();
   ASSERT(foreign->foreign_address() ==
          reinterpret_cast<Address>(cache.location()));
   foreign->set_foreign_address(0);
-  Isolate* isolate = Isolate::Current();
+  Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
   isolate->global_handles()->Destroy(cache.location());
   isolate->counters()->script_wrappers()->Decrement();
 }
@@ -390,7 +392,9 @@
   // for future use. The cache will automatically be cleared by the
   // garbage collector when it is not used anymore.
   Handle<Object> handle = isolate->global_handles()->Create(*result);
-  isolate->global_handles()->MakeWeak(handle.location(), NULL,
+  isolate->global_handles()->MakeWeak(handle.location(),
+                                      NULL,
+                                      NULL,
                                       &ClearWrapperCache);
   script->wrapper()->set_foreign_address(
       reinterpret_cast<Address>(handle.location()));
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 0bf208b..38f3dac 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -571,6 +571,11 @@
 }
 
 
+bool HValue::HasMonomorphicJSObjectType() {
+  return !GetMonomorphicJSObjectMap().is_null();
+}
+
+
 bool HValue::UpdateInferredType() {
   HType type = CalculateInferredType();
   bool result = (!type.Equals(type_));
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 367d2b3..aa513e7 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -770,6 +770,14 @@
 
   const char* Mnemonic() const;
 
+  // Type information helpers.
+  bool HasMonomorphicJSObjectType();
+
+  // TODO(mstarzinger): For now instructions can override this function to
+  // specify statically known types, once HType can convey more information
+  // it should be based on the HType.
+  virtual Handle<Map> GetMonomorphicJSObjectMap() { return Handle<Map>(); }
+
   // Updated the inferred type of this instruction and returns true if
   // it has changed.
   bool UpdateInferredType();
@@ -5019,6 +5027,10 @@
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
   }
+  virtual Handle<Map> GetMonomorphicJSObjectMap() {
+    ASSERT(constructor()->has_initial_map());
+    return Handle<Map>(constructor()->initial_map());
+  }
   virtual HType CalculateInferredType();
 
   DECLARE_CONCRETE_INSTRUCTION(AllocateObject)
@@ -5086,6 +5098,9 @@
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
   }
+  virtual Handle<Map> GetMonomorphicJSObjectMap() {
+    return Handle<Map>(boilerplate()->map());
+  }
   virtual HType CalculateInferredType();
 
   DECLARE_CONCRETE_INSTRUCTION(FastLiteral)
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 8de8e52..e9e5746 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -1329,37 +1329,50 @@
 }
 
 
+// Replace all phis consisting of a single non-loop operand plus any number of
+// loop operands by that single non-loop operand.
 void HGraph::EliminateRedundantPhis() {
   HPhase phase("H_Redundant phi elimination", this);
 
-  // Worklist of phis that can potentially be eliminated. Initialized with
-  // all phi nodes. When elimination of a phi node modifies another phi node
-  // the modified phi node is added to the worklist.
-  ZoneList<HPhi*> worklist(blocks_.length(), zone());
-  for (int i = 0; i < blocks_.length(); ++i) {
-    worklist.AddAll(*blocks_[i]->phis(), zone());
-  }
-
-  while (!worklist.is_empty()) {
-    HPhi* phi = worklist.RemoveLast();
-    HBasicBlock* block = phi->block();
-
-    // Skip phi node if it was already replaced.
-    if (block == NULL) continue;
-
-    // Get replacement value if phi is redundant.
-    HValue* replacement = phi->GetRedundantReplacement();
-
-    if (replacement != NULL) {
-      // Iterate through the uses and replace them all.
-      for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
-        HValue* value = it.value();
-        value->SetOperandAt(it.index(), replacement);
-        if (value->IsPhi()) worklist.Add(HPhi::cast(value), zone());
+  // We do a simple fixed point iteration without any work list, because
+  // machine-generated JavaScript can lead to a very dense Hydrogen graph with
+  // an enormous work list and will consequently result in OOM. Experiments
+  // showed that this simple algorithm is good enough, and even e.g. tracking
+  // the set or range of blocks to consider is not a real improvement.
+  bool need_another_iteration;
+  ZoneList<HPhi*> redundant_phis(blocks_.length(), zone());
+  do {
+    need_another_iteration = false;
+    for (int i = 0; i < blocks_.length(); ++i) {
+      HBasicBlock* block = blocks_[i];
+      for (int j = 0; j < block->phis()->length(); j++) {
+        HPhi* phi = block->phis()->at(j);
+        HValue* replacement = phi->GetRedundantReplacement();
+        if (replacement != NULL) {
+          // Remember phi to avoid concurrent modification of the block's phis.
+          redundant_phis.Add(phi, zone());
+          for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
+            HValue* value = it.value();
+            value->SetOperandAt(it.index(), replacement);
+            need_another_iteration |= value->IsPhi();
+          }
+        }
       }
-      block->RemovePhi(phi);
+      for (int i = 0; i < redundant_phis.length(); i++) {
+        block->RemovePhi(redundant_phis[i]);
+      }
+      redundant_phis.Clear();
+    }
+  } while (need_another_iteration);
+
+#if DEBUG
+  // Make sure that we *really* removed all redundant phis.
+  for (int i = 0; i < blocks_.length(); ++i) {
+    for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
+      ASSERT(blocks_[i]->phis()->at(j)->GetRedundantReplacement() == NULL);
     }
   }
+#endif
 }
 
 
@@ -6796,12 +6809,16 @@
   } else if (expr->key()->IsPropertyName()) {
     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
     SmallMapList* types = expr->GetReceiverTypes();
+    HValue* object = Top();
 
-    bool monomorphic = expr->IsMonomorphic();
     Handle<Map> map;
+    bool monomorphic = false;
     if (expr->IsMonomorphic()) {
       map = types->first();
-      if (map->is_dictionary_map()) monomorphic = false;
+      monomorphic = !map->is_dictionary_map();
+    } else if (object->HasMonomorphicJSObjectType()) {
+      map = object->GetMonomorphicJSObjectMap();
+      monomorphic = !map->is_dictionary_map();
     }
     if (monomorphic) {
       Handle<JSFunction> getter;
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 4e4a609..35bda31 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -3317,6 +3317,70 @@
 }
 
 
+void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- eax    : value
+  //  -- ecx    : name
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+  //
+  // This accepts as a receiver anything JSArray::SetElementsLength accepts
+  // (currently anything except for external arrays which means anything with
+  // elements of FixedArray type).  Value must be a number, but only smis are
+  // accepted as the most common case.
+
+  Label miss;
+
+  Register receiver = edx;
+  Register value = eax;
+  Register scratch = ebx;
+
+  if (kind() == Code::KEYED_LOAD_IC) {
+    __ cmp(ecx, Immediate(masm->isolate()->factory()->length_symbol()));
+    __ j(not_equal, &miss);
+  }
+
+  // Check that the receiver isn't a smi.
+  __ JumpIfSmi(receiver, &miss);
+
+  // Check that the object is a JS array.
+  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
+  __ j(not_equal, &miss);
+
+  // Check that elements are FixedArray.
+  // We rely on StoreIC_ArrayLength below to deal with all types of
+  // fast elements (including COW).
+  __ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
+  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
+  __ j(not_equal, &miss);
+
+  // Check that the array has fast properties, otherwise the length
+  // property might have been redefined.
+  __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
+  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
+                 Heap::kHashTableMapRootIndex);
+  __ j(equal, &miss);
+
+  // Check that value is a smi.
+  __ JumpIfNotSmi(value, &miss);
+
+  // Prepare tail call to StoreIC_ArrayLength.
+  __ pop(scratch);
+  __ push(receiver);
+  __ push(value);
+  __ push(scratch);  // return address
+
+  ExternalReference ref =
+      ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
+  __ TailCallExternalReference(ref, 2, 1);
+
+  __ bind(&miss);
+
+  StubCompiler::GenerateStoreMiss(masm, kind());
+}
+
+
 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   // The key is in edx and the parameter count is in eax.
 
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index bfca89f..3c7b285 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -1426,65 +1426,6 @@
 }
 
 
-void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
-  // ----------- S t a t e -------------
-  //  -- eax    : value
-  //  -- ecx    : name
-  //  -- edx    : receiver
-  //  -- esp[0] : return address
-  // -----------------------------------
-  //
-  // This accepts as a receiver anything JSArray::SetElementsLength accepts
-  // (currently anything except for external arrays which means anything with
-  // elements of FixedArray type).  Value must be a number, but only smis are
-  // accepted as the most common case.
-
-  Label miss;
-
-  Register receiver = edx;
-  Register value = eax;
-  Register scratch = ebx;
-
-  // Check that the receiver isn't a smi.
-  __ JumpIfSmi(receiver, &miss);
-
-  // Check that the object is a JS array.
-  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
-  __ j(not_equal, &miss);
-
-  // Check that elements are FixedArray.
-  // We rely on StoreIC_ArrayLength below to deal with all types of
-  // fast elements (including COW).
-  __ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
-  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
-  __ j(not_equal, &miss);
-
-  // Check that the array has fast properties, otherwise the length
-  // property might have been redefined.
-  __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
-  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
-                 Heap::kHashTableMapRootIndex);
-  __ j(equal, &miss);
-
-  // Check that value is a smi.
-  __ JumpIfNotSmi(value, &miss);
-
-  // Prepare tail call to StoreIC_ArrayLength.
-  __ pop(scratch);
-  __ push(receiver);
-  __ push(value);
-  __ push(scratch);  // return address
-
-  ExternalReference ref =
-      ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
-  __ TailCallExternalReference(ref, 2, 1);
-
-  __ bind(&miss);
-
-  GenerateMiss(masm);
-}
-
-
 void StoreIC::GenerateNormal(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax    : value
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 68e7b97..805016e 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -737,6 +737,15 @@
 }
 
 
+void StubCompiler::GenerateStoreMiss(MacroAssembler* masm, Code::Kind kind) {
+  ASSERT(kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC);
+  Handle<Code> code = (kind == Code::STORE_IC)
+      ? masm->isolate()->builtins()->StoreIC_Miss()
+      : masm->isolate()->builtins()->KeyedStoreIC_Miss();
+  __ jmp(code, RelocInfo::CODE_TARGET);
+}
+
+
 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
   Handle<Code> code =
       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
diff --git a/src/ic.cc b/src/ic.cc
index 65d3f65..7fa7eaf 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -795,23 +795,23 @@
     return TypeError("non_object_property_call", object, key);
   }
 
-  if (FLAG_use_ic && state != MEGAMORPHIC && object->IsHeapObject()) {
-    ASSERT(state != GENERIC);
+  bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded();
+  ASSERT(!(use_ic && object->IsJSGlobalProxy()));
+
+  if (use_ic && state != MEGAMORPHIC) {
     int argc = target()->arguments_count();
-    Handle<Map> map =
-        isolate()->factory()->non_strict_arguments_elements_map();
-    if (object->IsJSObject() &&
-        Handle<JSObject>::cast(object)->elements()->map() == *map) {
-      Handle<Code> code = isolate()->stub_cache()->ComputeCallArguments(
-          argc, Code::KEYED_CALL_IC);
-      set_target(*code);
-      TRACE_IC("KeyedCallIC", key, state, target());
-    } else if (!object->IsAccessCheckNeeded()) {
-      Handle<Code> code = isolate()->stub_cache()->ComputeCallMegamorphic(
-          argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
-      set_target(*code);
-      TRACE_IC("KeyedCallIC", key, state, target());
+    Handle<Code> stub = isolate()->stub_cache()->ComputeCallMegamorphic(
+        argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
+    if (object->IsJSObject()) {
+      Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+      if (receiver->elements()->map() ==
+          isolate()->heap()->non_strict_arguments_elements_map()) {
+        stub = isolate()->stub_cache()->ComputeCallArguments(argc);
+      }
     }
+    ASSERT(!stub.is_null());
+    set_target(*stub);
+    TRACE_IC("KeyedCallIC", key, state, target());
   }
 
   Handle<Object> result = GetProperty(object, key);
@@ -963,6 +963,45 @@
 }
 
 
+void IC::PatchCache(State state,
+                    StrictModeFlag strict_mode,
+                    Handle<JSObject> receiver,
+                    Handle<String> name,
+                    Handle<Code> code) {
+  switch (state) {
+    case UNINITIALIZED:
+    case PREMONOMORPHIC:
+    case MONOMORPHIC_PROTOTYPE_FAILURE:
+    case POLYMORPHIC:
+      set_target(*code);
+      break;
+    case MONOMORPHIC:
+      // Only move to megamorphic if the target changes.
+      if (target() != *code) {
+        // We are transitioning from monomorphic to megamorphic case.
+        // Place the current monomorphic stub and stub compiled for
+        // the receiver into stub cache.
+        Map* map = target()->FindFirstMap();
+        if (map != NULL) {
+          UpdateMegamorphicCache(map, *name, target());
+        }
+        UpdateMegamorphicCache(receiver->map(), *name, *code);
+        set_target((strict_mode == kStrictMode)
+                     ? *megamorphic_stub_strict()
+                     : *megamorphic_stub());
+      }
+      break;
+    case MEGAMORPHIC:
+      // Update the stub cache.
+      UpdateMegamorphicCache(receiver->map(), *name, *code);
+      break;
+    case GENERIC:
+    case DEBUG_STUB:
+      break;
+  }
+}
+
+
 void LoadIC::UpdateCaches(LookupResult* lookup,
                           State state,
                           Handle<Object> object,
@@ -988,42 +1027,12 @@
     if (code.is_null()) return;
   }
 
-  // Patch the call site depending on the state of the cache.
-  switch (state) {
-    case UNINITIALIZED:
-    case PREMONOMORPHIC:
-    case MONOMORPHIC_PROTOTYPE_FAILURE:
-      set_target(*code);
-      break;
-    case MONOMORPHIC:
-      if (target() != *code) {
-        // We are transitioning from monomorphic to megamorphic case.
-        // Place the current monomorphic stub and stub compiled for
-        // the receiver into stub cache.
-        Map* map = target()->FindFirstMap();
-        if (map != NULL) {
-          UpdateMegamorphicCache(map, *name, target());
-        }
-        UpdateMegamorphicCache(receiver->map(), *name, *code);
-        set_target(*megamorphic_stub());
-      }
-      break;
-    case MEGAMORPHIC:
-      UpdateMegamorphicCache(receiver->map(), *name, *code);
-      break;
-    case DEBUG_STUB:
-      break;
-    case POLYMORPHIC:
-    case GENERIC:
-      UNREACHABLE();
-      break;
-  }
-
+  PatchCache(state, kNonStrictMode, receiver, name, code);
   TRACE_IC("LoadIC", name, state, target());
 }
 
 
-void LoadIC::UpdateMegamorphicCache(Map* map, String* name, Code* code) {
+void IC::UpdateMegamorphicCache(Map* map, String* name, Code* code) {
   // Cache code holding map should be consistent with
   // GenerateMonomorphicCacheProbe.
   isolate()->stub_cache()->Set(name, map, code);
@@ -1234,9 +1243,8 @@
     return LoadIC::Load(state, object, Handle<String>::cast(key));
   }
 
-  // Do not use ICs for objects that require access checks (including
-  // the global object).
   bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded();
+  ASSERT(!(use_ic && object->IsJSGlobalProxy()));
 
   if (use_ic) {
     Handle<Code> stub = generic_stub();
@@ -1259,12 +1267,12 @@
     } else {
       TRACE_GENERIC_IC("KeyedLoadIC", "force generic");
     }
-    if (!stub.is_null()) set_target(*stub);
+    ASSERT(!stub.is_null());
+    set_target(*stub);
+    TRACE_IC("KeyedLoadIC", key, state, target());
   }
 
-  TRACE_IC("KeyedLoadIC", key, state, target());
 
-  // Get the property.
   return Runtime::GetObjectProperty(isolate(), object, key);
 }
 
@@ -1398,11 +1406,8 @@
       receiver->IsJSArray() &&
       name->Equals(isolate()->heap()->length_symbol()) &&
       Handle<JSArray>::cast(receiver)->AllowsSetElementsLength() &&
-      receiver->HasFastProperties() &&
-      kind() != Code::KEYED_STORE_IC) {
-    Handle<Code> stub = (strict_mode == kStrictMode)
-        ? isolate()->builtins()->StoreIC_ArrayLength_Strict()
-        : isolate()->builtins()->StoreIC_ArrayLength();
+      receiver->HasFastProperties()) {
+    Handle<Code> stub = StoreArrayLengthStub(kind(), strict_mode).GetCode();
     set_target(*stub);
     TRACE_IC("StoreIC", name, state, *stub);
     return receiver->SetProperty(*name, *value, NONE, strict_mode, store_mode);
@@ -1424,7 +1429,7 @@
   LookupResult lookup(isolate());
   if (LookupForWrite(receiver, name, &lookup)) {
     if (FLAG_use_ic) {
-      UpdateStoreCaches(&lookup, state, strict_mode, receiver, name, value);
+      UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
     }
   } else if (strict_mode == kStrictMode &&
              !(lookup.IsProperty() && lookup.IsReadOnly()) &&
@@ -1438,12 +1443,12 @@
 }
 
 
-void StoreIC::UpdateStoreCaches(LookupResult* lookup,
-                                State state,
-                                StrictModeFlag strict_mode,
-                                Handle<JSObject> receiver,
-                                Handle<String> name,
-                                Handle<Object> value) {
+void StoreIC::UpdateCaches(LookupResult* lookup,
+                           State state,
+                           StrictModeFlag strict_mode,
+                           Handle<JSObject> receiver,
+                           Handle<String> name,
+                           Handle<Object> value) {
   ASSERT(!receiver->IsJSGlobalProxy());
   ASSERT(StoreICableLookup(lookup));
   ASSERT(lookup->IsFound());
@@ -1451,22 +1456,25 @@
   // These are not cacheable, so we never see such LookupResults here.
   ASSERT(!lookup->IsHandler());
 
-  // If the property has a non-field type allowing map transitions
-  // where there is extra room in the object, we leave the IC in its
-  // current state.
-  PropertyType type = lookup->type();
+  Handle<Code> code =
+      ComputeStoreMonomorphic(lookup, strict_mode, receiver, name);
+  if (code.is_null()) return;
 
-  // Compute the code stub for this store; used for rewriting to
-  // monomorphic state and making sure that the code stub is in the
-  // stub cache.
+  PatchCache(state, strict_mode, receiver, name, code);
+  TRACE_IC("StoreIC", name, state, target());
+}
+
+
+Handle<Code> StoreIC::ComputeStoreMonomorphic(LookupResult* lookup,
+                                              StrictModeFlag strict_mode,
+                                              Handle<JSObject> receiver,
+                                              Handle<String> name) {
   Handle<JSObject> holder(lookup->holder());
-  Handle<Code> code;
-  switch (type) {
+  switch (lookup->type()) {
     case FIELD:
-      code = isolate()->stub_cache()->ComputeStoreField(
+      return isolate()->stub_cache()->ComputeStoreField(
           name, receiver, lookup->GetFieldIndex().field_index(),
           Handle<Map>::null(), strict_mode);
-      break;
     case NORMAL:
       if (receiver->IsGlobalObject()) {
         // The stub generated for the global object picks the value directly
@@ -1474,44 +1482,39 @@
         // global object.
         Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
         Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup));
-        code = isolate()->stub_cache()->ComputeStoreGlobal(
+        return isolate()->stub_cache()->ComputeStoreGlobal(
             name, global, cell, strict_mode);
-      } else {
-        if (!holder.is_identical_to(receiver)) return;
-        code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
       }
-      break;
+      if (!holder.is_identical_to(receiver)) break;
+      return isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
     case CALLBACKS: {
       Handle<Object> callback(lookup->GetCallbackObject());
       if (callback->IsAccessorInfo()) {
         Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(callback);
-        if (v8::ToCData<Address>(info->setter()) == 0) return;
-        if (!holder->HasFastProperties()) return;
-        if (!info->IsCompatibleReceiver(*receiver)) return;
-        code = isolate()->stub_cache()->ComputeStoreCallback(
+        if (v8::ToCData<Address>(info->setter()) == 0) break;
+        if (!holder->HasFastProperties()) break;
+        if (!info->IsCompatibleReceiver(*receiver)) break;
+        return isolate()->stub_cache()->ComputeStoreCallback(
             name, receiver, holder, info, strict_mode);
       } else if (callback->IsAccessorPair()) {
         Handle<Object> setter(Handle<AccessorPair>::cast(callback)->setter());
-        if (!setter->IsJSFunction()) return;
-        if (holder->IsGlobalObject()) return;
-        if (!holder->HasFastProperties()) return;
-        code = isolate()->stub_cache()->ComputeStoreViaSetter(
+        if (!setter->IsJSFunction()) break;
+        if (holder->IsGlobalObject()) break;
+        if (!holder->HasFastProperties()) break;
+        return isolate()->stub_cache()->ComputeStoreViaSetter(
             name, receiver, holder, Handle<JSFunction>::cast(setter),
             strict_mode);
-      } else {
-        ASSERT(callback->IsForeign());
-        // No IC support for old-style native accessors.
-        return;
       }
+      ASSERT(callback->IsForeign());
+      // No IC support for old-style native accessors.
       break;
     }
     case INTERCEPTOR:
       ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
-      code = isolate()->stub_cache()->ComputeStoreInterceptor(
+      return isolate()->stub_cache()->ComputeStoreInterceptor(
           name, receiver, strict_mode);
-      break;
     case CONSTANT_FUNCTION:
-      return;
+      break;
     case TRANSITION: {
       Handle<Map> transition(lookup->GetTransitionTarget());
       int descriptor = transition->LastAdded();
@@ -1519,56 +1522,18 @@
       DescriptorArray* target_descriptors = transition->instance_descriptors();
       PropertyDetails details = target_descriptors->GetDetails(descriptor);
 
-      if (details.type() != FIELD || details.attributes() != NONE) return;
+      if (details.type() != FIELD || details.attributes() != NONE) break;
 
       int field_index = target_descriptors->GetFieldIndex(descriptor);
-      code = isolate()->stub_cache()->ComputeStoreField(
+      return isolate()->stub_cache()->ComputeStoreField(
           name, receiver, field_index, transition, strict_mode);
-
-      break;
     }
     case NONEXISTENT:
     case HANDLER:
       UNREACHABLE();
-      return;
-  }
-
-  // Patch the call site depending on the state of the cache.
-  switch (state) {
-    case UNINITIALIZED:
-    case PREMONOMORPHIC:
-    case MONOMORPHIC_PROTOTYPE_FAILURE:
-      set_target(*code);
-      break;
-    case MONOMORPHIC:
-      // Only move to megamorphic if the target changes.
-      if (target() != *code) {
-        // We are transitioning from monomorphic to megamorphic case.
-        // Place the current monomorphic stub and stub compiled for
-        // the receiver into stub cache.
-        Map* map = target()->FindFirstMap();
-        if (map != NULL) {
-          isolate()->stub_cache()->Set(*name, map, target());
-        }
-        isolate()->stub_cache()->Set(*name, receiver->map(), *code);
-        set_target((strict_mode == kStrictMode)
-                     ? *megamorphic_stub_strict()
-                     : *megamorphic_stub());
-      }
-      break;
-    case MEGAMORPHIC:
-      // Update the stub cache.
-      isolate()->stub_cache()->Set(*name, receiver->map(), *code);
-      break;
-    case DEBUG_STUB:
-      break;
-    case POLYMORPHIC:
-    case GENERIC:
-      UNREACHABLE();
       break;
   }
-
-  TRACE_IC("StoreIC", name, state, target());
+  return Handle<Code>::null();
 }
 
 
@@ -1759,17 +1724,14 @@
   key = TryConvertKey(key, isolate());
 
   if (key->IsSymbol()) {
-    Handle<String> name = Handle<String>::cast(key);
     return StoreIC::Store(state,
                           strict_mode,
                           object,
-                          name,
+                          Handle<String>::cast(key),
                           value,
                           JSReceiver::MAY_BE_STORE_FROM_KEYED);
   }
 
-  // Do not use ICs for objects that require access checks (including
-  // the global object), or are observed.
   bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded() &&
       !(FLAG_harmony_observation && object->IsJSObject() &&
           JSObject::cast(*object)->map()->is_observed());
@@ -1779,60 +1741,42 @@
     Handle<Code> stub = (strict_mode == kStrictMode)
         ? generic_stub_strict()
         : generic_stub();
-    if (object->IsJSObject()) {
-      Handle<JSObject> receiver = Handle<JSObject>::cast(object);
-      if (receiver->elements()->map() ==
-          isolate()->heap()->non_strict_arguments_elements_map()) {
-        stub = non_strict_arguments_stub();
-      } else if (miss_mode != MISS_FORCE_GENERIC) {
-        if (key->IsSmi() && (target() != *non_strict_arguments_stub())) {
+    if (miss_mode != MISS_FORCE_GENERIC) {
+      if (object->IsJSObject()) {
+        Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+        if (receiver->elements()->map() ==
+            isolate()->heap()->non_strict_arguments_elements_map()) {
+          stub = non_strict_arguments_stub();
+        } else if (key->IsSmi() && (target() != *non_strict_arguments_stub())) {
           StubKind stub_kind = GetStubKind(receiver, key, value);
           stub = StoreElementStub(receiver, stub_kind, strict_mode);
         }
-      } else {
-        TRACE_GENERIC_IC("KeyedStoreIC", "force generic");
       }
+    } else {
+      TRACE_GENERIC_IC("KeyedStoreIC", "force generic");
     }
-    if (!stub.is_null()) set_target(*stub);
+    ASSERT(!stub.is_null());
+    set_target(*stub);
+    TRACE_IC("KeyedStoreIC", key, state, target());
   }
 
-  TRACE_IC("KeyedStoreIC", key, state, target());
-
-  // Set the property.
   return Runtime::SetObjectProperty(
       isolate(), object , key, value, NONE, strict_mode);
 }
 
 
-void KeyedStoreIC::UpdateStoreCaches(LookupResult* lookup,
-                                     State state,
-                                     StrictModeFlag strict_mode,
-                                     Handle<JSObject> receiver,
-                                     Handle<String> name,
-                                     Handle<Object> value) {
-  ASSERT(!receiver->IsJSGlobalProxy());
-  ASSERT(StoreICableLookup(lookup));
-  ASSERT(lookup->IsFound());
-
-  // These are not cacheable, so we never see such LookupResults here.
-  ASSERT(!lookup->IsHandler());
-
+Handle<Code> KeyedStoreIC::ComputeStoreMonomorphic(LookupResult* lookup,
+                                                   StrictModeFlag strict_mode,
+                                                   Handle<JSObject> receiver,
+                                                   Handle<String> name) {
   // If the property has a non-field type allowing map transitions
   // where there is extra room in the object, we leave the IC in its
   // current state.
-  PropertyType type = lookup->type();
-
-  // Compute the code stub for this store; used for rewriting to
-  // monomorphic state and making sure that the code stub is in the
-  // stub cache.
-  Handle<Code> code;
-
-  switch (type) {
+  switch (lookup->type()) {
     case FIELD:
-      code = isolate()->stub_cache()->ComputeKeyedStoreField(
+      return isolate()->stub_cache()->ComputeKeyedStoreField(
           name, receiver, lookup->GetFieldIndex().field_index(),
           Handle<Map>::null(), strict_mode);
-      break;
     case TRANSITION: {
       Handle<Map> transition(lookup->GetTransitionTarget());
       int descriptor = transition->LastAdded();
@@ -1842,9 +1786,8 @@
 
       if (details.type() == FIELD && details.attributes() == NONE) {
         int field_index = target_descriptors->GetFieldIndex(descriptor);
-        code = isolate()->stub_cache()->ComputeKeyedStoreField(
+        return isolate()->stub_cache()->ComputeKeyedStoreField(
             name, receiver, field_index, transition, strict_mode);
-        break;
       }
       // fall through.
     }
@@ -1854,43 +1797,15 @@
     case INTERCEPTOR:
       // Always rewrite to the generic case so that we do not
       // repeatedly try to rewrite.
-      code = (strict_mode == kStrictMode)
+      return (strict_mode == kStrictMode)
           ? generic_stub_strict()
           : generic_stub();
-      break;
     case HANDLER:
     case NONEXISTENT:
       UNREACHABLE();
-      return;
-  }
-
-  ASSERT(!code.is_null());
-
-  // Patch the call site depending on the state of the cache.
-  switch (state) {
-    case UNINITIALIZED:
-    case PREMONOMORPHIC:
-    case POLYMORPHIC:
-      set_target(*code);
-      break;
-    case MONOMORPHIC:
-      // Only move to megamorphic if the target changes.
-      if (target() != *code) {
-        set_target((strict_mode == kStrictMode)
-                     ? *megamorphic_stub_strict()
-                     : *megamorphic_stub());
-      }
-      break;
-    case MEGAMORPHIC:
-    case GENERIC:
-    case DEBUG_STUB:
-      break;
-    case MONOMORPHIC_PROTOTYPE_FAILURE:
-      UNREACHABLE();
       break;
   }
-
-  TRACE_IC("KeyedStoreIC", name, state, target());
+  return Handle<Code>::null();
 }
 
 
@@ -2013,9 +1928,9 @@
 #endif
 
   Object* result;
-  { MaybeObject* maybe_result = receiver->SetElementsLength(len);
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  MaybeObject* maybe_result = receiver->SetElementsLength(len);
+  if (!maybe_result->To(&result)) return maybe_result;
+
   return len;
 }
 
diff --git a/src/ic.h b/src/ic.h
index 3ffe4e0..cea308b 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -165,6 +165,21 @@
   static inline void SetTargetAtAddress(Address address, Code* target);
   static void PostPatching(Address address, Code* target, Code* old_target);
 
+  void PatchCache(State state,
+                  StrictModeFlag strict_mode,
+                  Handle<JSObject> receiver,
+                  Handle<String> name,
+                  Handle<Code> code);
+  virtual void UpdateMegamorphicCache(Map* map, String* name, Code* code);
+  virtual Handle<Code> megamorphic_stub() {
+    UNREACHABLE();
+    return Handle<Code>::null();
+  }
+  virtual Handle<Code> megamorphic_stub_strict() {
+    UNREACHABLE();
+    return Handle<Code>::null();
+  }
+
  private:
   // Frame pointer for the frame that uses (calls) the IC.
   Address fp_;
@@ -332,10 +347,6 @@
   static void GenerateMegamorphic(MacroAssembler* masm);
   static void GenerateNormal(MacroAssembler* masm);
 
-  // Specialized code generator routines.
-  static void GenerateArrayLength(MacroAssembler* masm);
-  static void GenerateFunctionPrototype(MacroAssembler* masm);
-
   MUST_USE_RESULT MaybeObject* Load(State state,
                                     Handle<Object> object,
                                     Handle<String> name);
@@ -361,7 +372,6 @@
   virtual Handle<Code> ComputeLoadMonomorphic(LookupResult* lookup,
                                               Handle<JSObject> receiver,
                                               Handle<String> name);
-  virtual void UpdateMegamorphicCache(Map* map, String* name, Code* code);
 
  private:
   // Stub accessors.
@@ -469,7 +479,6 @@
   static void GenerateMiss(MacroAssembler* masm);
   static void GenerateMegamorphic(MacroAssembler* masm,
                                   StrictModeFlag strict_mode);
-  static void GenerateArrayLength(MacroAssembler* masm);
   static void GenerateNormal(MacroAssembler* masm);
   static void GenerateGlobalProxy(MacroAssembler* masm,
                                   StrictModeFlag strict_mode);
@@ -502,12 +511,19 @@
 
   // Update the inline cache and the global stub cache based on the
   // lookup result.
-  virtual void UpdateStoreCaches(LookupResult* lookup,
-                                 State state,
-                                 StrictModeFlag strict_mode,
-                                 Handle<JSObject> receiver,
-                                 Handle<String> name,
-                                 Handle<Object> value);
+  void UpdateCaches(LookupResult* lookup,
+                    State state,
+                    StrictModeFlag strict_mode,
+                    Handle<JSObject> receiver,
+                    Handle<String> name,
+                    Handle<Object> value);
+  // Compute the code stub for this store; used for rewriting to
+  // monomorphic state and making sure that the code stub is in the
+  // stub cache.
+  virtual Handle<Code> ComputeStoreMonomorphic(LookupResult* lookup,
+                                               StrictModeFlag strict_mode,
+                                               Handle<JSObject> receiver,
+                                               Handle<String> name);
 
  private:
   void set_target(Code* code) {
@@ -607,13 +623,11 @@
  protected:
   virtual Code::Kind kind() const { return Code::KEYED_STORE_IC; }
 
-  // Update the inline cache.
-  virtual void UpdateStoreCaches(LookupResult* lookup,
-                                 State state,
-                                 StrictModeFlag strict_mode,
-                                 Handle<JSObject> receiver,
-                                 Handle<String> name,
-                                 Handle<Object> value);
+  virtual Handle<Code> ComputeStoreMonomorphic(LookupResult* lookup,
+                                               StrictModeFlag strict_mode,
+                                               Handle<JSObject> receiver,
+                                               Handle<String> name);
+  virtual void UpdateMegamorphicCache(Map* map, String* name, Code* code) { }
 
   virtual Handle<Code> megamorphic_stub() {
     return isolate()->builtins()->KeyedStoreIC_Generic();
diff --git a/src/mksnapshot.cc b/src/mksnapshot.cc
index d777551..a3665e9 100644
--- a/src/mksnapshot.cc
+++ b/src/mksnapshot.cc
@@ -384,7 +384,7 @@
   // context even after we have disposed of the context.
   HEAP->CollectAllGarbage(i::Heap::kNoGCFlags, "mksnapshot");
   i::Object* raw_context = *(v8::Utils::OpenHandle(*context));
-  context.Dispose();
+  context.Dispose(context->GetIsolate());
   CppByteSink sink(argv[1]);
   // This results in a somewhat smaller snapshot, probably because it gets rid
   // of some things that are cached between garbage collections.
diff --git a/src/objects-inl.h b/src/objects-inl.h
index ef506fa..28d8762 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -3522,6 +3522,8 @@
          kind() == COMPARE_IC ||
          kind() == LOAD_IC ||
          kind() == KEYED_LOAD_IC ||
+         kind() == STORE_IC ||
+         kind() == KEYED_STORE_IC ||
          kind() == TO_BOOLEAN_IC);
   ASSERT(0 <= major && major < 256);
   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
@@ -4927,7 +4929,9 @@
   ASSERT(kind() == COMPARE_IC ||
          kind() == BINARY_OP_IC ||
          kind() == LOAD_IC ||
-         kind() == KEYED_LOAD_IC);
+         kind() == KEYED_LOAD_IC ||
+         kind() == STORE_IC ||
+         kind() == KEYED_STORE_IC);
   WRITE_FIELD(this, kTypeFeedbackInfoOffset, Smi::FromInt(value));
 }
 
@@ -5075,17 +5079,6 @@
 }
 
 
-void JSRegExp::ResetLastIndex(Isolate* isolate,
-                              Handle<JSRegExp> regexp) {
-  // Reset lastIndex property to 0.
-  SetProperty(regexp,
-              isolate->factory()->last_index_symbol(),
-              Handle<Smi>(Smi::FromInt(0)),
-              ::NONE,
-              kNonStrictMode);
-}
-
-
 ElementsKind JSObject::GetElementsKind() {
   ElementsKind kind = map()->elements_kind();
 #if DEBUG
diff --git a/src/objects.h b/src/objects.h
index 958e1ef..975fdf8 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -6622,8 +6622,6 @@
   inline void SetDataAtUnchecked(int index, Object* value, Heap* heap);
   inline Type TypeTagUnchecked();
 
-  static inline void ResetLastIndex(Isolate* isolate, Handle<JSRegExp> regexp);
-
   static int code_index(bool is_ascii) {
     if (is_ascii) {
       return kIrregexpASCIICodeIndex;
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index a4e6596..0c7dc14 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -66,7 +66,9 @@
   Handle<Object> handle = isolate->global_handles()->Create(token);
   // handle.location() points to a memory cell holding a pointer
   // to a token object in the V8's heap.
-  isolate->global_handles()->MakeWeak(handle.location(), this,
+  isolate->global_handles()->MakeWeak(handle.location(),
+                                      this,
+                                      NULL,
                                       TokenRemovedCallback);
   token_locations_.Add(handle.location());
   token_removed_.Add(false);
@@ -74,11 +76,12 @@
 }
 
 
-void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
+void TokenEnumerator::TokenRemovedCallback(v8::Isolate* isolate,
+                                           v8::Persistent<v8::Value> handle,
                                            void* parameter) {
   reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
       Utils::OpenHandle(*handle).location());
-  handle.Dispose();
+  handle.Dispose(isolate);
 }
 
 
diff --git a/src/profile-generator.h b/src/profile-generator.h
index f306659..086e5c6 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -45,7 +45,8 @@
   static const int kInheritsSecurityToken = -2;
 
  private:
-  static void TokenRemovedCallback(v8::Persistent<v8::Value> handle,
+  static void TokenRemovedCallback(v8::Isolate* isolate,
+                                   v8::Persistent<v8::Value> handle,
                                    void* parameter);
   void TokenRemoved(Object** token_location);
 
diff --git a/src/runtime.cc b/src/runtime.cc
index 5047dbd..e8003b6 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -2912,8 +2912,7 @@
 
   int matches = indices.length();
   if (matches == 0) {
-    JSRegExp::ResetLastIndex(isolate, pattern_regexp);
-    return *subject;
+    return isolate->heap()->undefined_value();
   }
 
   // Detect integer overflow.
@@ -3015,8 +3014,7 @@
   int32_t* current_match = global_cache.FetchNext();
   if (current_match == NULL) {
     if (global_cache.HasException()) return Failure::Exception();
-    JSRegExp::ResetLastIndex(isolate, regexp);
-    return *subject;
+    return isolate->heap()->undefined_value();
   }
 
   // Guessing the number of parts that the final result string is built
@@ -3114,8 +3112,7 @@
   int32_t* current_match = global_cache.FetchNext();
   if (current_match == NULL) {
     if (global_cache.HasException()) return Failure::Exception();
-    JSRegExp::ResetLastIndex(isolate, regexp);
-    return *subject;
+    return isolate->heap()->undefined_value();
   }
 
   int start = current_match[0];
diff --git a/src/string.js b/src/string.js
index 60a5abe..b39976c 100644
--- a/src/string.js
+++ b/src/string.js
@@ -194,6 +194,7 @@
     // lastMatchInfo is defined in regexp.js.
     var result = %StringMatch(subject, regexp, lastMatchInfo);
     if (result !== null) lastMatchInfoOverride = null;
+    regexp.lastIndex = 0;
     return result;
   }
   // Non-regexp argument.
@@ -244,13 +245,19 @@
       }
     } else {
       if (lastMatchInfoOverride == null) {
-        return %StringReplaceRegExpWithString(subject,
-                                              search,
-                                              TO_STRING_INLINE(replace),
-                                              lastMatchInfo);
+        var answer = %StringReplaceRegExpWithString(subject,
+                                                    search,
+                                                    TO_STRING_INLINE(replace),
+                                                    lastMatchInfo);
+        if (IS_UNDEFINED(answer)) {  // No match.  Return subject string.
+          search.lastIndex = 0;
+          return subject;
+        }
+        if (search.global) search.lastIndex = 0;
+        return answer;
       } else {
         // We use this hack to detect whether StringReplaceRegExpWithString
-        // found at least one hit.  In that case we need to remove any
+        // found at least one hit. In that case we need to remove any
         // override.
         var saved_subject = lastMatchInfo[LAST_SUBJECT_INDEX];
         lastMatchInfo[LAST_SUBJECT_INDEX] = 0;
@@ -258,11 +265,17 @@
                                                     search,
                                                     TO_STRING_INLINE(replace),
                                                     lastMatchInfo);
+        if (IS_UNDEFINED(answer)) {  // No match.  Return subject string.
+          search.lastIndex = 0;
+          lastMatchInfo[LAST_SUBJECT_INDEX] = saved_subject;
+          return subject;
+        }
         if (%_IsSmi(lastMatchInfo[LAST_SUBJECT_INDEX])) {
           lastMatchInfo[LAST_SUBJECT_INDEX] = saved_subject;
         } else {
           lastMatchInfoOverride = null;
         }
+        if (search.global) search.lastIndex = 0;
         return answer;
       }
     }
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 43d0ab9..e6722f7 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -774,11 +774,10 @@
 }
 
 
-Handle<Code> StubCache::ComputeCallArguments(int argc, Code::Kind kind) {
-  ASSERT(kind == Code::KEYED_CALL_IC);
+Handle<Code> StubCache::ComputeCallArguments(int argc) {
   Code::Flags flags =
-      Code::ComputeFlags(kind, MEGAMORPHIC, Code::kNoExtraICState,
-                         Code::NORMAL, argc);
+      Code::ComputeFlags(Code::KEYED_CALL_IC, MEGAMORPHIC,
+                         Code::kNoExtraICState, Code::NORMAL, argc);
   Handle<UnseededNumberDictionary> cache =
       isolate_->factory()->non_monomorphic_cache();
   int entry = cache->FindEntry(isolate_, flags);
diff --git a/src/stub-cache.h b/src/stub-cache.h
index b26519c..5becb11 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -227,7 +227,7 @@
                                  Code::Kind kind,
                                  Code::ExtraICState state);
 
-  Handle<Code> ComputeCallArguments(int argc, Code::Kind kind);
+  Handle<Code> ComputeCallArguments(int argc);
 
   Handle<Code> ComputeCallMegamorphic(int argc,
                                       Code::Kind kind,
@@ -485,8 +485,8 @@
                           Register scratch2,
                           Label* miss_label);
 
-  static void GenerateLoadMiss(MacroAssembler* masm,
-                               Code::Kind kind);
+  static void GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind);
+  static void GenerateStoreMiss(MacroAssembler* masm, Code::Kind kind);
 
   static void GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm);
 
diff --git a/src/version.cc b/src/version.cc
index d1f6a48..e9e38f2 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     16
-#define BUILD_NUMBER      9
+#define BUILD_NUMBER      10
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 6bef42f..a0424ac 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -2442,6 +2442,68 @@
 }
 
 
+void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- rax    : value
+  //  -- rcx    : key
+  //  -- rdx    : receiver
+  //  -- rsp[0] : return address
+  // -----------------------------------
+  //
+  // This accepts as a receiver anything JSArray::SetElementsLength accepts
+  // (currently anything except for external arrays which means anything with
+  // elements of FixedArray type).  Value must be a number, but only smis are
+  // accepted as the most common case.
+
+  Label miss;
+
+  Register receiver = rdx;
+  Register value = rax;
+  Register scratch = rbx;
+  if (kind() == Code::KEYED_STORE_IC) {
+    __ Cmp(rcx, masm->isolate()->factory()->length_symbol());
+  }
+
+  // Check that the receiver isn't a smi.
+  __ JumpIfSmi(receiver, &miss);
+
+  // Check that the object is a JS array.
+  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
+  __ j(not_equal, &miss);
+
+  // Check that elements are FixedArray.
+  // We rely on StoreIC_ArrayLength below to deal with all types of
+  // fast elements (including COW).
+  __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
+  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
+  __ j(not_equal, &miss);
+
+  // Check that the array has fast properties, otherwise the length
+  // property might have been redefined.
+  __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
+  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
+                 Heap::kHashTableMapRootIndex);
+  __ j(equal, &miss);
+
+  // Check that value is a smi.
+  __ JumpIfNotSmi(value, &miss);
+
+  // Prepare tail call to StoreIC_ArrayLength.
+  __ pop(scratch);
+  __ push(receiver);
+  __ push(value);
+  __ push(scratch);  // return address
+
+  ExternalReference ref =
+      ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
+  __ TailCallExternalReference(ref, 2, 1);
+
+  __ bind(&miss);
+
+  StubCompiler::GenerateStoreMiss(masm, kind());
+}
+
+
 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   // The key is in rdx and the parameter count is in rax.
 
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index fc7f9b1..3911438 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -1455,65 +1455,6 @@
 }
 
 
-void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
-  // ----------- S t a t e -------------
-  //  -- rax    : value
-  //  -- rcx    : name
-  //  -- rdx    : receiver
-  //  -- rsp[0] : return address
-  // -----------------------------------
-  //
-  // This accepts as a receiver anything JSArray::SetElementsLength accepts
-  // (currently anything except for external arrays which means anything with
-  // elements of FixedArray type).  Value must be a number, but only smis are
-  // accepted as the most common case.
-
-  Label miss;
-
-  Register receiver = rdx;
-  Register value = rax;
-  Register scratch = rbx;
-
-  // Check that the receiver isn't a smi.
-  __ JumpIfSmi(receiver, &miss);
-
-  // Check that the object is a JS array.
-  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
-  __ j(not_equal, &miss);
-
-  // Check that elements are FixedArray.
-  // We rely on StoreIC_ArrayLength below to deal with all types of
-  // fast elements (including COW).
-  __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
-  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
-  __ j(not_equal, &miss);
-
-  // Check that the array has fast properties, otherwise the length
-  // property might have been redefined.
-  __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
-  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
-                 Heap::kHashTableMapRootIndex);
-  __ j(equal, &miss);
-
-  // Check that value is a smi.
-  __ JumpIfNotSmi(value, &miss);
-
-  // Prepare tail call to StoreIC_ArrayLength.
-  __ pop(scratch);
-  __ push(receiver);
-  __ push(value);
-  __ push(scratch);  // return address
-
-  ExternalReference ref =
-      ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
-  __ TailCallExternalReference(ref, 2, 1);
-
-  __ bind(&miss);
-
-  GenerateMiss(masm);
-}
-
-
 void StoreIC::GenerateNormal(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax    : value
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 384e5c0..ba6321a 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -723,6 +723,15 @@
 }
 
 
+void StubCompiler::GenerateStoreMiss(MacroAssembler* masm, Code::Kind kind) {
+  ASSERT(kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC);
+  Handle<Code> code = (kind == Code::STORE_IC)
+      ? masm->isolate()->builtins()->StoreIC_Miss()
+      : masm->isolate()->builtins()->KeyedStoreIC_Miss();
+  __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+
 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
   Handle<Code> code =
       masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();