Push version 2.3.8 to trunk.

Fixed build with strict aliasing on GCC 4.4 (issue 463).

Fixed issue with incorrect handling of custom valueOf methods on string wrappers (issue 760).

Fixed compilation for ARMv4 (issue 590).

Improved performance.


git-svn-id: http://v8.googlecode.com/svn/trunk@5276 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index caa63fe..4c96de0 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,15 @@
+2010-08-16: Version 2.3.8
+
+        Fixed build with strict aliasing on GCC 4.4 (issue 463).
+
+        Fixed issue with incorrect handling of custom valueOf methods on
+        string wrappers (issue 760).
+
+        Fixed compilation for ARMv4 (issue 590).
+
+        Improved performance.
+
+
 2010-08-11: Version 2.3.7
 
         Reduced size of heap snapshots produced by heap profiler (issue 783).
diff --git a/include/v8.h b/include/v8.h
index 20cef79..ff73226 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -1824,19 +1824,10 @@
 
 /**
  * Returns a non-empty handle if the interceptor intercepts the request.
- * The result is true if either a boolean (true if property exists and false
- * otherwise) or an integer encoding property attributes.
+ * The result is an integer encoding property attributes.
  */
-#ifdef USE_NEW_QUERY_CALLBACKS
 typedef Handle<Integer> (*IndexedPropertyQuery)(uint32_t index,
                                                 const AccessorInfo& info);
-#else
-typedef Handle<Boolean> (*IndexedPropertyQuery)(uint32_t index,
-                                                const AccessorInfo& info);
-#endif
-
-typedef Handle<Value> (*IndexedPropertyQueryImpl)(uint32_t index,
-                                                  const AccessorInfo& info);
 
 /**
  * Returns a non-empty handle if the deleter intercepts the request.
@@ -2054,23 +2045,7 @@
                                          IndexedPropertyQuery query,
                                          IndexedPropertyDeleter remover,
                                          IndexedPropertyEnumerator enumerator,
-                                         Handle<Value> data) {
-    IndexedPropertyQueryImpl casted =
-        reinterpret_cast<IndexedPropertyQueryImpl>(query);
-    SetIndexedInstancePropertyHandlerImpl(getter,
-                                          setter,
-                                          casted,
-                                          remover,
-                                          enumerator,
-                                          data);
-  }
-  void SetIndexedInstancePropertyHandlerImpl(
-      IndexedPropertyGetter getter,
-      IndexedPropertySetter setter,
-      IndexedPropertyQueryImpl query,
-      IndexedPropertyDeleter remover,
-      IndexedPropertyEnumerator enumerator,
-      Handle<Value> data);
+                                         Handle<Value> data);
   void SetInstanceCallAsFunctionHandler(InvocationCallback callback,
                                         Handle<Value> data);
 
@@ -2169,24 +2144,7 @@
                                  IndexedPropertyQuery query = 0,
                                  IndexedPropertyDeleter deleter = 0,
                                  IndexedPropertyEnumerator enumerator = 0,
-                                 Handle<Value> data = Handle<Value>()) {
-    IndexedPropertyQueryImpl casted =
-        reinterpret_cast<IndexedPropertyQueryImpl>(query);
-    SetIndexedPropertyHandlerImpl(getter,
-                                  setter,
-                                  casted,
-                                  deleter,
-                                  enumerator,
-                                  data);
-  }
- private:
-  void SetIndexedPropertyHandlerImpl(IndexedPropertyGetter getter,
-                                     IndexedPropertySetter setter,
-                                     IndexedPropertyQueryImpl query,
-                                     IndexedPropertyDeleter deleter,
-                                     IndexedPropertyEnumerator enumerator,
-                                     Handle<Value> data);
- public:
+                                 Handle<Value> data = Handle<Value>());
 
   /**
    * Sets the callback to be used when calling instances created from
diff --git a/src/SConscript b/src/SConscript
index 8466a0c..e6b4e38 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -84,6 +84,7 @@
     mark-compact.cc
     messages.cc
     objects.cc
+    objects-visiting.cc
     oprofile-agent.cc
     parser.cc
     profile-generator.cc
@@ -117,7 +118,6 @@
     zone.cc
     """),
   'arch:arm': Split("""
-    fast-codegen.cc
     jump-target-light.cc
     virtual-frame-light.cc
     arm/builtins-arm.cc
@@ -126,7 +126,6 @@
     arm/cpu-arm.cc
     arm/debug-arm.cc
     arm/disasm-arm.cc
-    arm/fast-codegen-arm.cc
     arm/frames-arm.cc
     arm/full-codegen-arm.cc
     arm/ic-arm.cc
@@ -139,7 +138,6 @@
     arm/assembler-arm.cc
     """),
   'arch:mips': Split("""
-    fast-codegen.cc
     mips/assembler-mips.cc
     mips/builtins-mips.cc
     mips/codegen-mips.cc
@@ -147,7 +145,6 @@
     mips/cpu-mips.cc
     mips/debug-mips.cc
     mips/disasm-mips.cc
-    mips/fast-codegen-mips.cc
     mips/full-codegen-mips.cc
     mips/frames-mips.cc
     mips/ic-mips.cc
@@ -166,7 +163,6 @@
     ia32/cpu-ia32.cc
     ia32/debug-ia32.cc
     ia32/disasm-ia32.cc
-    ia32/fast-codegen-ia32.cc
     ia32/frames-ia32.cc
     ia32/full-codegen-ia32.cc
     ia32/ic-ia32.cc
@@ -178,7 +174,6 @@
     ia32/virtual-frame-ia32.cc
     """),
   'arch:x64': Split("""
-    fast-codegen.cc
     jump-target-heavy.cc
     virtual-frame-heavy.cc
     x64/assembler-x64.cc
@@ -187,7 +182,6 @@
     x64/cpu-x64.cc
     x64/debug-x64.cc
     x64/disasm-x64.cc
-    x64/fast-codegen-x64.cc
     x64/frames-x64.cc
     x64/full-codegen-x64.cc
     x64/ic-x64.cc
diff --git a/src/api.cc b/src/api.cc
index b3164dd..7a967db 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -174,6 +174,8 @@
   heap_stats.objects_per_type = objects_per_type;
   int size_per_type[LAST_TYPE + 1] = {0};
   heap_stats.size_per_type = size_per_type;
+  int os_error;
+  heap_stats.os_error = &os_error;
   int end_marker;
   heap_stats.end_marker = &end_marker;
   i::Heap::RecordStats(&heap_stats, take_snapshot);
@@ -886,10 +888,10 @@
 }
 
 
-void FunctionTemplate::SetIndexedInstancePropertyHandlerImpl(
+void FunctionTemplate::SetIndexedInstancePropertyHandler(
       IndexedPropertyGetter getter,
       IndexedPropertySetter setter,
-      IndexedPropertyQueryImpl query,
+      IndexedPropertyQuery query,
       IndexedPropertyDeleter remover,
       IndexedPropertyEnumerator enumerator,
       Handle<Value> data) {
@@ -1054,10 +1056,10 @@
 }
 
 
-void ObjectTemplate::SetIndexedPropertyHandlerImpl(
+void ObjectTemplate::SetIndexedPropertyHandler(
       IndexedPropertyGetter getter,
       IndexedPropertySetter setter,
-      IndexedPropertyQueryImpl query,
+      IndexedPropertyQuery query,
       IndexedPropertyDeleter remover,
       IndexedPropertyEnumerator enumerator,
       Handle<Value> data) {
@@ -1068,12 +1070,12 @@
   i::FunctionTemplateInfo* constructor =
       i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
   i::Handle<i::FunctionTemplateInfo> cons(constructor);
-  Utils::ToLocal(cons)->SetIndexedInstancePropertyHandlerImpl(getter,
-                                                              setter,
-                                                              query,
-                                                              remover,
-                                                              enumerator,
-                                                              data);
+  Utils::ToLocal(cons)->SetIndexedInstancePropertyHandler(getter,
+                                                          setter,
+                                                          query,
+                                                          remover,
+                                                          enumerator,
+                                                          data);
 }
 
 
diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h
index 5be5770..f72ad76 100644
--- a/src/arm/assembler-arm-inl.h
+++ b/src/arm/assembler-arm-inl.h
@@ -190,6 +190,29 @@
 }
 
 
+template<typename StaticVisitor>
+void RelocInfo::Visit() {
+  RelocInfo::Mode mode = rmode();
+  if (mode == RelocInfo::EMBEDDED_OBJECT) {
+    StaticVisitor::VisitPointer(target_object_address());
+  } else if (RelocInfo::IsCodeTarget(mode)) {
+    StaticVisitor::VisitCodeTarget(this);
+  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
+    StaticVisitor::VisitExternalReference(target_reference_address());
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  } else if (Debug::has_break_points() &&
+             ((RelocInfo::IsJSReturn(mode) &&
+              IsPatchedReturnSequence()) ||
+             (RelocInfo::IsDebugBreakSlot(mode) &&
+              IsPatchedDebugBreakSlotSequence()))) {
+    StaticVisitor::VisitDebugTarget(this);
+#endif
+  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
+    StaticVisitor::VisitRuntimeEntry(this);
+  }
+}
+
+
 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
   rm_ = no_reg;
   imm32_ = immediate;
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index b1705df..136c82e 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -2276,6 +2276,21 @@
 }
 
 
+void Assembler::vcmp(const DwVfpRegister src1,
+                     const double src2,
+                     const SBit s,
+                     const Condition cond) {
+  // vcmp(Dd, Dm) double precision floating point comparison.
+  // Instruction details available in ARM DDI 0406A, A8-570.
+  // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) |
+  // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=? | 1(6) | M(5)=? | 0(4) | 0000(3-0)
+  ASSERT(CpuFeatures::IsEnabled(VFP3));
+  ASSERT(src2 == 0.0);
+  emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 |
+       src1.code()*B12 | 0x5*B9 | B8 | B6);
+}
+
+
 void Assembler::vmrs(Register dst, Condition cond) {
   // Instruction details available in ARM DDI 0406A, A8-652.
   // cond(31-28) | 1110 (27-24) | 1111(23-20)| 0001 (19-16) |
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 16e69e2..218eb97 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -1031,6 +1031,10 @@
             const DwVfpRegister src2,
             const SBit s = LeaveCC,
             const Condition cond = al);
+  void vcmp(const DwVfpRegister src1,
+            const double src2,
+            const SBit s = LeaveCC,
+            const Condition cond = al);
   void vmrs(const Register dst,
             const Condition cond = al);
   void vsqrt(const DwVfpRegister dst,
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 37768e8..7e7e358 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -911,6 +911,29 @@
 }
 
 
+void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
+  // Enter an internal frame.
+  __ EnterInternalFrame();
+
+  // Preserve the function.
+  __ push(r1);
+
+  // Push the function on the stack as the argument to the runtime function.
+  __ push(r1);
+  __ CallRuntime(Runtime::kLazyCompile, 1);
+  // Calculate the entry point.
+  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
+  // Restore saved function.
+  __ pop(r1);
+
+  // Tear down temporary frame.
+  __ LeaveInternalFrame();
+
+  // Do a tail-call of the compiled function.
+  __ Jump(r2);
+}
+
+
 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   // 1. Make sure we have at least one argument.
   // r0: actual number of arguments
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index aec80d7..df17b6f 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -217,93 +217,80 @@
     }
 #endif
 
-    if (info->mode() == CompilationInfo::PRIMARY) {
-      frame_->Enter();
-      // tos: code slot
+    frame_->Enter();
+    // tos: code slot
 
-      // Allocate space for locals and initialize them.  This also checks
-      // for stack overflow.
-      frame_->AllocateStackSlots();
+    // Allocate space for locals and initialize them.  This also checks
+    // for stack overflow.
+    frame_->AllocateStackSlots();
 
-      frame_->AssertIsSpilled();
-      int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
-      if (heap_slots > 0) {
-        // Allocate local context.
-        // Get outer context and create a new context based on it.
-        __ ldr(r0, frame_->Function());
-        frame_->EmitPush(r0);
-        if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-          FastNewContextStub stub(heap_slots);
-          frame_->CallStub(&stub, 1);
-        } else {
-          frame_->CallRuntime(Runtime::kNewContext, 1);
-        }
+    frame_->AssertIsSpilled();
+    int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+    if (heap_slots > 0) {
+      // Allocate local context.
+      // Get outer context and create a new context based on it.
+      __ ldr(r0, frame_->Function());
+      frame_->EmitPush(r0);
+      if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+        FastNewContextStub stub(heap_slots);
+        frame_->CallStub(&stub, 1);
+      } else {
+        frame_->CallRuntime(Runtime::kNewContext, 1);
+      }
 
 #ifdef DEBUG
-        JumpTarget verified_true;
-        __ cmp(r0, cp);
-        verified_true.Branch(eq);
-        __ stop("NewContext: r0 is expected to be the same as cp");
-        verified_true.Bind();
+      JumpTarget verified_true;
+      __ cmp(r0, cp);
+      verified_true.Branch(eq);
+      __ stop("NewContext: r0 is expected to be the same as cp");
+      verified_true.Bind();
 #endif
-        // Update context local.
-        __ str(cp, frame_->Context());
-      }
+      // Update context local.
+      __ str(cp, frame_->Context());
+    }
 
-      // TODO(1241774): Improve this code:
-      // 1) only needed if we have a context
-      // 2) no need to recompute context ptr every single time
-      // 3) don't copy parameter operand code from SlotOperand!
-      {
-        Comment cmnt2(masm_, "[ copy context parameters into .context");
-        // Note that iteration order is relevant here! If we have the same
-        // parameter twice (e.g., function (x, y, x)), and that parameter
-        // needs to be copied into the context, it must be the last argument
-        // passed to the parameter that needs to be copied. This is a rare
-        // case so we don't check for it, instead we rely on the copying
-        // order: such a parameter is copied repeatedly into the same
-        // context location and thus the last value is what is seen inside
-        // the function.
-        frame_->AssertIsSpilled();
-        for (int i = 0; i < scope()->num_parameters(); i++) {
-          Variable* par = scope()->parameter(i);
-          Slot* slot = par->slot();
-          if (slot != NULL && slot->type() == Slot::CONTEXT) {
-            ASSERT(!scope()->is_global_scope());  // No params in global scope.
-            __ ldr(r1, frame_->ParameterAt(i));
-            // Loads r2 with context; used below in RecordWrite.
-            __ str(r1, SlotOperand(slot, r2));
-            // Load the offset into r3.
-            int slot_offset =
-                FixedArray::kHeaderSize + slot->index() * kPointerSize;
-            __ RecordWrite(r2, Operand(slot_offset), r3, r1);
-          }
+    // TODO(1241774): Improve this code:
+    // 1) only needed if we have a context
+    // 2) no need to recompute context ptr every single time
+    // 3) don't copy parameter operand code from SlotOperand!
+    {
+      Comment cmnt2(masm_, "[ copy context parameters into .context");
+      // Note that iteration order is relevant here! If we have the same
+      // parameter twice (e.g., function (x, y, x)), and that parameter
+      // needs to be copied into the context, it must be the last argument
+      // passed to the parameter that needs to be copied. This is a rare
+      // case so we don't check for it, instead we rely on the copying
+      // order: such a parameter is copied repeatedly into the same
+      // context location and thus the last value is what is seen inside
+      // the function.
+      frame_->AssertIsSpilled();
+      for (int i = 0; i < scope()->num_parameters(); i++) {
+        Variable* par = scope()->parameter(i);
+        Slot* slot = par->slot();
+        if (slot != NULL && slot->type() == Slot::CONTEXT) {
+          ASSERT(!scope()->is_global_scope());  // No params in global scope.
+          __ ldr(r1, frame_->ParameterAt(i));
+          // Loads r2 with context; used below in RecordWrite.
+          __ str(r1, SlotOperand(slot, r2));
+          // Load the offset into r3.
+          int slot_offset =
+              FixedArray::kHeaderSize + slot->index() * kPointerSize;
+          __ RecordWrite(r2, Operand(slot_offset), r3, r1);
         }
       }
+    }
 
-      // Store the arguments object.  This must happen after context
-      // initialization because the arguments object may be stored in
-      // the context.
-      if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
-        StoreArgumentsObject(true);
-      }
+    // Store the arguments object.  This must happen after context
+    // initialization because the arguments object may be stored in
+    // the context.
+    if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
+      StoreArgumentsObject(true);
+    }
 
-      // Initialize ThisFunction reference if present.
-      if (scope()->is_function_scope() && scope()->function() != NULL) {
-        frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
-        StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
-      }
-    } else {
-      // When used as the secondary compiler for splitting, r1, cp,
-      // fp, and lr have been pushed on the stack.  Adjust the virtual
-      // frame to match this state.
-      frame_->Adjust(4);
-
-      // Bind all the bailout labels to the beginning of the function.
-      List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
-      for (int i = 0; i < bailouts->length(); i++) {
-        __ bind(bailouts->at(i)->label());
-      }
+    // Initialize ThisFunction reference if present.
+    if (scope()->is_function_scope() && scope()->function() != NULL) {
+      frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
+      StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
     }
 
     // Initialize the function return target after the locals are set
@@ -532,6 +519,10 @@
 
 
 void CodeGenerator::Load(Expression* expr) {
+  // We generally assume that we are not in a spilled scope for most
+  // of the code generator.  A failure to ensure this caused issue 815
+  // and this assert is designed to catch similar issues.
+  frame_->AssertIsNotSpilled();
 #ifdef DEBUG
   int original_height = frame_->height();
 #endif
@@ -688,6 +679,10 @@
       expression_(expression),
       type_(ILLEGAL),
       persist_after_get_(persist_after_get) {
+  // We generally assume that we are not in a spilled scope for most
+  // of the code generator.  A failure to ensure this caused issue 815
+  // and this assert is designed to catch similar issues.
+  cgen->frame()->AssertIsNotSpilled();
   cgen->LoadReference(this);
 }
 
@@ -784,12 +779,26 @@
     __ tst(tos, Operand(kSmiTagMask));
     true_target->Branch(eq);
 
-    // Slow case: call the runtime.
-    frame_->EmitPush(tos);
-    frame_->CallRuntime(Runtime::kToBool, 1);
-    // Convert the result (r0) to a condition code.
-    __ LoadRoot(ip, Heap::kFalseValueRootIndex);
-    __ cmp(r0, ip);
+    // Slow case.
+    if (CpuFeatures::IsSupported(VFP3)) {
+      CpuFeatures::Scope scope(VFP3);
+      // Implements the slow case by using ToBooleanStub.
+      // The ToBooleanStub takes a single argument, and
+      // returns a non-zero value for true, or zero for false.
+      // Both the argument value and the return value use the
+      // register assigned to tos_
+      ToBooleanStub stub(tos);
+      frame_->CallStub(&stub, 0);
+      // Convert the result in "tos" to a condition code.
+      __ cmp(tos, Operand(0));
+    } else {
+      // Implements slow case by calling the runtime.
+      frame_->EmitPush(tos);
+      frame_->CallRuntime(Runtime::kToBool, 1);
+      // Convert the result (r0) to a condition code.
+      __ LoadRoot(ip, Heap::kFalseValueRootIndex);
+      __ cmp(r0, ip);
+    }
   }
 
   cc_reg_ = ne;
@@ -1213,7 +1222,21 @@
     case Token::SHR:
     case Token::SAR: {
       ASSERT(!reversed);
-      TypeInfo result = TypeInfo::Integer32();
+      TypeInfo result =
+          (op == Token::SAR) ? TypeInfo::Integer32() : TypeInfo::Number();
+      if (!reversed) {
+        if (op == Token::SHR) {
+          if (int_value >= 2) {
+            result = TypeInfo::Smi();
+          } else if (int_value >= 1) {
+            result = TypeInfo::Integer32();
+          }
+        } else {
+          if (int_value >= 1) {
+            result = TypeInfo::Smi();
+          }
+        }
+      }
       Register scratch = VirtualFrame::scratch0();
       Register scratch2 = VirtualFrame::scratch1();
       int shift_value = int_value & 0x1f;  // least significant 5 bits
@@ -1898,19 +1921,17 @@
 
 
 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
-  frame_->SpillAll();
   Comment cmnt(masm_, "[ ReturnStatement");
 
   CodeForStatementPosition(node);
   Load(node->expression());
+  frame_->PopToR0();
+  frame_->PrepareForReturn();
   if (function_return_is_shadowed_) {
-    frame_->EmitPop(r0);
     function_return_.Jump();
   } else {
     // Pop the result from the frame and prepare the frame for
     // returning thus making it easier to merge.
-    frame_->PopToR0();
-    frame_->PrepareForReturn();
     if (function_return_.is_bound()) {
       // If the function return label is already bound we reuse the
       // code by jumping to the return site.
@@ -2306,7 +2327,6 @@
 #ifdef DEBUG
   int original_height = frame_->height();
 #endif
-  VirtualFrame::SpilledScope spilled_scope(frame_);
   Comment cmnt(masm_, "[ ForInStatement");
   CodeForStatementPosition(node);
 
@@ -2320,6 +2340,7 @@
   // Get the object to enumerate over (converted to JSObject).
   Load(node->enumerable());
 
+  VirtualFrame::SpilledScope spilled_scope(frame_);
   // Both SpiderMonkey and kjs ignore null and undefined in contrast
   // to the specification.  12.6.4 mandates a call to ToObject.
   frame_->EmitPop(r0);
@@ -2481,36 +2502,39 @@
   frame_->EmitPush(r0);
   frame_->EmitPush(r3);  // push entry
   frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
-  __ mov(r3, Operand(r0));
-
+  __ mov(r3, Operand(r0), SetCC);
   // If the property has been removed while iterating, we just skip it.
-  __ LoadRoot(ip, Heap::kNullValueRootIndex);
-  __ cmp(r3, ip);
   node->continue_target()->Branch(eq);
 
   end_del_check.Bind();
   // Store the entry in the 'each' expression and take another spin in the
   // loop.  r3: i'th entry of the enum cache (or string there of)
   frame_->EmitPush(r3);  // push entry
-  { Reference each(this, node->each());
+  { VirtualFrame::RegisterAllocationScope scope(this);
+    Reference each(this, node->each());
     if (!each.is_illegal()) {
       if (each.size() > 0) {
+        // Loading a reference may leave the frame in an unspilled state.
+        frame_->SpillAll();  // Sync stack to memory.
+        // Get the value (under the reference on the stack) from memory.
         __ ldr(r0, frame_->ElementAt(each.size()));
         frame_->EmitPush(r0);
         each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
-        frame_->Drop(2);
+        frame_->Drop(2);  // The result of the set and the extra pushed value.
       } else {
         // If the reference was to a slot we rely on the convenient property
-        // that it doesn't matter whether a value (eg, r3 pushed above) is
+        // that it doesn't matter whether a value (eg, ebx pushed above) is
         // right on top of or right underneath a zero-sized reference.
         each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
-        frame_->Drop();
+        frame_->Drop(1);  // Drop the result of the set operation.
       }
     }
   }
   // Body.
   CheckStack();  // TODO(1222600): ignore if body contains calls.
-  Visit(node->body());
+  { VirtualFrame::RegisterAllocationScope scope(this);
+    Visit(node->body());
+  }
 
   // Next.  Reestablish a spilled frame in case we are coming here via
   // a continue in the body.
@@ -2557,7 +2581,9 @@
   // Remove the exception from the stack.
   frame_->Drop();
 
-  VisitStatements(node->catch_block()->statements());
+  { VirtualFrame::RegisterAllocationScope scope(this);
+    VisitStatements(node->catch_block()->statements());
+  }
   if (frame_ != NULL) {
     exit.Jump();
   }
@@ -2592,7 +2618,9 @@
   }
 
   // Generate code for the statements in the try block.
-  VisitStatements(node->try_block()->statements());
+  { VirtualFrame::RegisterAllocationScope scope(this);
+    VisitStatements(node->try_block()->statements());
+  }
 
   // Stop the introduced shadowing and count the number of required unlinks.
   // After shadowing stops, the original labels are unshadowed and the
@@ -2613,7 +2641,7 @@
     // the handler list and drop the rest of this handler from the
     // frame.
     STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
-    frame_->EmitPop(r1);
+    frame_->EmitPop(r1);  // r0 can contain the return value.
     __ mov(r3, Operand(handler_address));
     __ str(r1, MemOperand(r3));
     frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
@@ -2639,7 +2667,7 @@
       frame_->Forget(frame_->height() - handler_height);
 
       STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
-      frame_->EmitPop(r1);
+      frame_->EmitPop(r1);  // r0 can contain the return value.
       __ str(r1, MemOperand(r3));
       frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
 
@@ -2706,7 +2734,9 @@
   }
 
   // Generate code for the statements in the try block.
-  VisitStatements(node->try_block()->statements());
+  { VirtualFrame::RegisterAllocationScope scope(this);
+    VisitStatements(node->try_block()->statements());
+  }
 
   // Stop the introduced shadowing and count the number of required unlinks.
   // After shadowing stops, the original labels are unshadowed and the
@@ -2796,7 +2826,9 @@
   // and the state - while evaluating the finally block.
   //
   // Generate code for the statements in the finally block.
-  VisitStatements(node->finally_block()->statements());
+  { VirtualFrame::RegisterAllocationScope scope(this);
+    VisitStatements(node->finally_block()->statements());
+  }
 
   if (has_valid_frame()) {
     // Restore state and return value or faked TOS.
@@ -3976,7 +4008,6 @@
 
   } else if (var != NULL && var->slot() != NULL &&
              var->slot()->type() == Slot::LOOKUP) {
-    VirtualFrame::SpilledScope spilled_scope(frame_);
     // ----------------------------------
     // JavaScript examples:
     //
@@ -3989,8 +4020,6 @@
     //  }
     // ----------------------------------
 
-    // JumpTargets do not yet support merging frames so the frame must be
-    // spilled when jumping to these targets.
     JumpTarget slow, done;
 
     // Generate fast case for loading functions from slots that
@@ -4004,8 +4033,7 @@
     slow.Bind();
     // Load the function
     frame_->EmitPush(cp);
-    __ mov(r0, Operand(var->name()));
-    frame_->EmitPush(r0);
+    frame_->EmitPush(Operand(var->name()));
     frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
     // r0: slot value; r1: receiver
 
@@ -4021,7 +4049,7 @@
       call.Jump();
       done.Bind();
       frame_->EmitPush(r0);  // function
-      LoadGlobalReceiver(r1);  // receiver
+      LoadGlobalReceiver(VirtualFrame::scratch0());  // receiver
       call.Bind();
     }
 
@@ -4076,8 +4104,6 @@
       // -------------------------------------------
       // JavaScript example: 'array[index](1, 2, 3)'
       // -------------------------------------------
-      VirtualFrame::SpilledScope spilled_scope(frame_);
-
       Load(property->obj());
       if (property->is_synthetic()) {
         Load(property->key());
@@ -4085,7 +4111,7 @@
         // Put the function below the receiver.
         // Use the global receiver.
         frame_->EmitPush(r0);  // Function.
-        LoadGlobalReceiver(r0);
+        LoadGlobalReceiver(VirtualFrame::scratch0());
         // Call the function.
         CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
         frame_->EmitPush(r0);
@@ -4098,6 +4124,7 @@
 
         // Set the name register and call the IC initialization code.
         Load(property->key());
+        frame_->SpillAll();
         frame_->EmitPop(r2);  // Function name.
 
         InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
@@ -4117,10 +4144,8 @@
     // Load the function.
     Load(function);
 
-    VirtualFrame::SpilledScope spilled_scope(frame_);
-
     // Pass the global proxy as the receiver.
-    LoadGlobalReceiver(r0);
+    LoadGlobalReceiver(VirtualFrame::scratch0());
 
     // Call the function.
     CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
@@ -4175,8 +4200,8 @@
 
 
 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
-  JumpTarget leave, null, function, non_function_constructor;
   Register scratch = VirtualFrame::scratch0();
+  JumpTarget null, function, leave, non_function_constructor;
 
   // Load the object into register.
   ASSERT(args->length() == 1);
@@ -4789,6 +4814,152 @@
 }
 
 
+// Deferred code to check whether the String JavaScript object is safe for using
+// default value of. This code is called after the bit caching this information
+// in the map has been checked with the map for the object in the map_result_
+// register. On return the register map_result_ contains 1 for true and 0 for
+// false.
+class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
+ public:
+  DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
+                                               Register map_result,
+                                               Register scratch1,
+                                               Register scratch2)
+      : object_(object),
+        map_result_(map_result),
+        scratch1_(scratch1),
+        scratch2_(scratch2) { }
+
+  virtual void Generate() {
+    Label false_result;
+
+    // Check that map is loaded as expected.
+    if (FLAG_debug_code) {
+      __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset));
+      __ cmp(map_result_, ip);
+      __ Assert(eq, "Map not in expected register");
+    }
+
+    // Check for fast case object. Generate false result for slow case object.
+    __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset));
+    __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
+    __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
+    __ cmp(scratch1_, ip);
+    __ b(eq, &false_result);
+
+    // Look for valueOf symbol in the descriptor array, and indicate false if
+    // found. The type is not checked, so if it is a transition it is a false
+    // negative.
+    __ ldr(map_result_,
+           FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset));
+    __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset));
+    // map_result_: descriptor array
+    // scratch2_: length of descriptor array
+    // Calculate the end of the descriptor array.
+    STATIC_ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTagSize == 1);
+    STATIC_ASSERT(kPointerSize == 4);
+    __ add(scratch1_,
+           map_result_,
+           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+    __ add(scratch1_,
+           scratch1_,
+           Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize));
+
+    // Calculate location of the first key name.
+    __ add(map_result_,
+           map_result_,
+           Operand(FixedArray::kHeaderSize - kHeapObjectTag +
+                   DescriptorArray::kFirstIndex * kPointerSize));
+    // Loop through all the keys in the descriptor array. If one of these is the
+    // symbol valueOf the result is false.
+    Label entry, loop;
+    // The use of ip to store the valueOf symbol asumes that it is not otherwise
+    // used in the loop below.
+    __ mov(ip, Operand(Factory::value_of_symbol()));
+    __ jmp(&entry);
+    __ bind(&loop);
+    __ ldr(scratch2_, MemOperand(map_result_, 0));
+    __ cmp(scratch2_, ip);
+    __ b(eq, &false_result);
+    __ add(map_result_, map_result_, Operand(kPointerSize));
+    __ bind(&entry);
+    __ cmp(map_result_, Operand(scratch1_));
+    __ b(ne, &loop);
+
+    // Reload map as register map_result_ was used as temporary above.
+    __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset));
+
+    // If a valueOf property is not found on the object check that it's
+    // prototype is the un-modified String prototype. If not result is false.
+    __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset));
+    __ tst(scratch1_, Operand(kSmiTagMask));
+    __ b(eq, &false_result);
+    __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
+    __ ldr(scratch2_,
+           CodeGenerator::ContextOperand(cp, Context::GLOBAL_INDEX));
+    __ ldr(scratch2_,
+           FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset));
+    __ ldr(scratch2_,
+           CodeGenerator::ContextOperand(
+               scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
+    __ cmp(scratch1_, scratch2_);
+    __ b(ne, &false_result);
+
+    // Set the bit in the map to indicate that it has been checked safe for
+    // default valueOf and set true result.
+    __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
+    __ orr(scratch1_,
+           scratch1_,
+           Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
+    __ str(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
+    __ mov(map_result_, Operand(1));
+    __ jmp(exit_label());
+    __ bind(&false_result);
+    // Set false result.
+    __ mov(map_result_, Operand(0));
+  }
+
+ private:
+  Register object_;
+  Register map_result_;
+  Register scratch1_;
+  Register scratch2_;
+};
+
+
+void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
+    ZoneList<Expression*>* args) {
+  ASSERT(args->length() == 1);
+  Load(args->at(0));
+  Register obj = frame_->PopToRegister();  // Pop the string wrapper.
+  if (FLAG_debug_code) {
+    __ AbortIfSmi(obj);
+  }
+
+  // Check whether this map has already been checked to be safe for default
+  // valueOf.
+  Register map_result = VirtualFrame::scratch0();
+  __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset));
+  __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset));
+  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
+  true_target()->Branch(ne);
+
+  // We need an additional two scratch registers for the deferred code.
+  Register scratch1 = VirtualFrame::scratch1();
+  // Use r6 without notifying the virtual frame.
+  Register scratch2 = r6;
+
+  DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
+      new DeferredIsStringWrapperSafeForDefaultValueOf(
+          obj, map_result, scratch1, scratch2);
+  deferred->Branch(eq);
+  deferred->BindExit();
+  __ tst(map_result, Operand(map_result));
+  cc_reg_ = ne;
+}
+
+
 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
   // This generates a fast version of:
   // (%_ClassOf(arg) === 'Function')
@@ -7809,6 +7980,77 @@
 }
 
 
+// This stub does not handle the inlined cases (Smis, Booleans, undefined).
+// The stub returns zero for false, and a non-zero value for true.
+void ToBooleanStub::Generate(MacroAssembler* masm) {
+  Label false_result;
+  Label not_heap_number;
+  Register scratch0 = VirtualFrame::scratch0();
+
+  // HeapNumber => false iff +0, -0, or NaN.
+  __ ldr(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset));
+  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
+  __ cmp(scratch0, ip);
+  __ b(&not_heap_number, ne);
+
+  __ sub(ip, tos_, Operand(kHeapObjectTag));
+  __ vldr(d1, ip, HeapNumber::kValueOffset);
+  __ vcmp(d1, 0.0);
+  __ vmrs(pc);
+  // "tos_" is a register, and contains a non zero value by default.
+  // Hence we only need to overwrite "tos_" with zero to return false for
+  // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
+  __ mov(tos_, Operand(0), LeaveCC, eq);  // for FP_ZERO
+  __ mov(tos_, Operand(0), LeaveCC, vs);  // for FP_NAN
+  __ Ret();
+
+  __ bind(&not_heap_number);
+
+  // Check if the value is 'null'.
+  // 'null' => false.
+  __ LoadRoot(ip, Heap::kNullValueRootIndex);
+  __ cmp(tos_, ip);
+  __ b(&false_result, eq);
+
+  // It can be an undetectable object.
+  // Undetectable => false.
+  __ ldr(ip, FieldMemOperand(tos_, HeapObject::kMapOffset));
+  __ ldrb(scratch0, FieldMemOperand(ip, Map::kBitFieldOffset));
+  __ and_(scratch0, scratch0, Operand(1 << Map::kIsUndetectable));
+  __ cmp(scratch0, Operand(1 << Map::kIsUndetectable));
+  __ b(&false_result, eq);
+
+  // JavaScript object => true.
+  __ ldr(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset));
+  __ ldrb(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset));
+  __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
+  // "tos_" is a register and contains a non-zero value.
+  // Hence we implicitly return true if the greater than
+  // condition is satisfied.
+  __ Ret(gt);
+
+  // Check for string
+  __ ldr(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset));
+  __ ldrb(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset));
+  __ cmp(scratch0, Operand(FIRST_NONSTRING_TYPE));
+  // "tos_" is a register and contains a non-zero value.
+  // Hence we implicitly return true if the greater than
+  // condition is satisfied.
+  __ Ret(gt);
+
+  // String value => false iff empty, i.e., length is zero
+  __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset));
+  // If length is zero, "tos_" contains zero ==> false.
+  // If length is not zero, "tos_" contains a non-zero value ==> true.
+  __ Ret();
+
+  // Return 0 in "tos_" for false .
+  __ bind(&false_result);
+  __ mov(tos_, Operand(0));
+  __ Ret();
+}
+
+
 // We fall into this code if the operands were Smis, but the result was
 // not (eg. overflow).  We branch into this code (to the not_smi label) if
 // the operands were not both Smi.  The operands are in r0 and r1.  In order
diff --git a/src/arm/codegen-arm.h b/src/arm/codegen-arm.h
index bfe2080..029d599 100644
--- a/src/arm/codegen-arm.h
+++ b/src/arm/codegen-arm.h
@@ -286,6 +286,10 @@
     return inlined_write_barrier_size_ + 4;
   }
 
+  static MemOperand ContextOperand(Register context, int index) {
+    return MemOperand(context, Context::SlotOffset(index));
+  }
+
  private:
   // Construction/Destruction
   explicit CodeGenerator(MacroAssembler* masm);
@@ -338,10 +342,6 @@
   void LoadReference(Reference* ref);
   void UnloadReference(Reference* ref);
 
-  static MemOperand ContextOperand(Register context, int index) {
-    return MemOperand(context, Context::SlotOffset(index));
-  }
-
   MemOperand SlotOperand(Slot* slot, Register tmp);
 
   MemOperand ContextSlotOperandCheckExtensions(Slot* slot,
@@ -482,6 +482,8 @@
   void GenerateIsSpecObject(ZoneList<Expression*>* args);
   void GenerateIsFunction(ZoneList<Expression*>* args);
   void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
+  void GenerateIsStringWrapperSafeForDefaultValueOf(
+      ZoneList<Expression*>* args);
 
   // Support for construct call checks.
   void GenerateIsConstructCall(ZoneList<Expression*>* args);
@@ -623,6 +625,19 @@
 };
 
 
+class ToBooleanStub: public CodeStub {
+ public:
+  explicit ToBooleanStub(Register tos) : tos_(tos) { }
+
+  void Generate(MacroAssembler* masm);
+
+ private:
+  Register tos_;
+  Major MajorKey() { return ToBoolean; }
+  int MinorKey() { return tos_.code(); }
+};
+
+
 class GenericBinaryOpStub : public CodeStub {
  public:
   GenericBinaryOpStub(Token::Value op,
diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc
index e87d265..3a94845 100644
--- a/src/arm/debug-arm.cc
+++ b/src/arm/debug-arm.cc
@@ -293,15 +293,11 @@
   masm->Abort("LiveEdit frame dropping is not supported on arm");
 }
 
+const bool Debug::kFrameDropperSupported = false;
+
 #undef __
 
 
-Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
-                                       Handle<Code> code) {
-  UNREACHABLE();
-  return NULL;
-}
-const int Debug::kFrameDropperFrameSize = -1;
 
 #endif  // ENABLE_DEBUGGER_SUPPORT
 
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index fd142bd..0029ed1 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -1188,7 +1188,13 @@
   bool raise_exception_for_qnan = (instr->Bit(7) == 0x1);
 
   if (dp_operation && !raise_exception_for_qnan) {
-    Format(instr, "vcmp.f64'cond 'Dd, 'Dm");
+    if (instr->Opc2Field() == 0x4) {
+      Format(instr, "vcmp.f64'cond 'Dd, 'Dm");
+    } else if (instr->Opc2Field() == 0x5) {
+      Format(instr, "vcmp.f64'cond 'Dd, #0.0");
+    } else {
+      Unknown(instr);  // invalid
+    }
   } else {
     Unknown(instr);  // Not used by V8.
   }
diff --git a/src/arm/fast-codegen-arm.cc b/src/arm/fast-codegen-arm.cc
deleted file mode 100644
index 36ac2aa..0000000
--- a/src/arm/fast-codegen-arm.cc
+++ /dev/null
@@ -1,241 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#if defined(V8_TARGET_ARCH_ARM)
-
-#include "codegen-inl.h"
-#include "fast-codegen.h"
-#include "scopes.h"
-
-namespace v8 {
-namespace internal {
-
-#define __ ACCESS_MASM(masm())
-
-Register FastCodeGenerator::accumulator0() { return r0; }
-Register FastCodeGenerator::accumulator1() { return r1; }
-Register FastCodeGenerator::scratch0() { return r3; }
-Register FastCodeGenerator::scratch1() { return r4; }
-Register FastCodeGenerator::scratch2() { return r5; }
-Register FastCodeGenerator::receiver_reg() { return r2; }
-Register FastCodeGenerator::context_reg() { return cp; }
-
-
-void FastCodeGenerator::EmitLoadReceiver() {
-  // Offset 2 is due to return address and saved frame pointer.
-  int index = 2 + scope()->num_parameters();
-  __ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize));
-}
-
-
-void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
-  ASSERT(!destination().is(no_reg));
-  ASSERT(cell->IsJSGlobalPropertyCell());
-
-  __ mov(destination(), Operand(cell));
-  __ ldr(destination(),
-         FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset));
-  if (FLAG_debug_code) {
-    __ mov(ip, Operand(Factory::the_hole_value()));
-    __ cmp(destination(), ip);
-    __ Check(ne, "DontDelete cells can't contain the hole");
-  }
-
-  // The loaded value is not known to be a smi.
-  clear_as_smi(destination());
-}
-
-
-void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
-  LookupResult lookup;
-  info()->receiver()->Lookup(*name, &lookup);
-
-  ASSERT(lookup.holder() == *info()->receiver());
-  ASSERT(lookup.type() == FIELD);
-  Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
-  int index = lookup.GetFieldIndex() - map->inobject_properties();
-  int offset = index * kPointerSize;
-
-  // We will emit the write barrier unless the stored value is statically
-  // known to be a smi.
-  bool needs_write_barrier = !is_smi(accumulator0());
-
-  // Negative offsets are inobject properties.
-  if (offset < 0) {
-    offset += map->instance_size();
-    __ str(accumulator0(), FieldMemOperand(receiver_reg(), offset));
-    if (needs_write_barrier) {
-      // Preserve receiver from write barrier.
-      __ mov(scratch0(), receiver_reg());
-    }
-  } else {
-    offset += FixedArray::kHeaderSize;
-    __ ldr(scratch0(),
-           FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
-    __ str(accumulator0(), FieldMemOperand(scratch0(), offset));
-  }
-
-  if (needs_write_barrier) {
-    __ RecordWrite(scratch0(), Operand(offset), scratch1(), scratch2());
-  }
-
-  if (destination().is(accumulator1())) {
-    __ mov(accumulator1(), accumulator0());
-    if (is_smi(accumulator0())) {
-      set_as_smi(accumulator1());
-    } else {
-      clear_as_smi(accumulator1());
-    }
-  }
-}
-
-
-void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
-  ASSERT(!destination().is(no_reg));
-  LookupResult lookup;
-  info()->receiver()->Lookup(*name, &lookup);
-
-  ASSERT(lookup.holder() == *info()->receiver());
-  ASSERT(lookup.type() == FIELD);
-  Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
-  int index = lookup.GetFieldIndex() - map->inobject_properties();
-  int offset = index * kPointerSize;
-
-  // Perform the load.  Negative offsets are inobject properties.
-  if (offset < 0) {
-    offset += map->instance_size();
-    __ ldr(destination(), FieldMemOperand(receiver_reg(), offset));
-  } else {
-    offset += FixedArray::kHeaderSize;
-    __ ldr(scratch0(),
-           FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
-    __ ldr(destination(), FieldMemOperand(scratch0(), offset));
-  }
-
-  // The loaded value is not known to be a smi.
-  clear_as_smi(destination());
-}
-
-
-void FastCodeGenerator::EmitBitOr() {
-  if (is_smi(accumulator0()) && is_smi(accumulator1())) {
-    // If both operands are known to be a smi then there is no need to check
-    // the operands or result.  There is no need to perform the operation in
-    // an effect context.
-    if (!destination().is(no_reg)) {
-      __ orr(destination(), accumulator1(), Operand(accumulator0()));
-    }
-  } else {
-    // Left is in accumulator1, right in accumulator0.
-    if (destination().is(accumulator0())) {
-      __ mov(scratch0(), accumulator0());
-      __ orr(destination(), accumulator1(), Operand(accumulator1()));
-      Label* bailout =
-          info()->AddBailout(accumulator1(), scratch0());  // Left, right.
-      __ BranchOnNotSmi(destination(), bailout);
-    } else if (destination().is(accumulator1())) {
-      __ mov(scratch0(), accumulator1());
-      __ orr(destination(), accumulator1(), Operand(accumulator0()));
-      Label* bailout = info()->AddBailout(scratch0(), accumulator0());
-      __ BranchOnNotSmi(destination(), bailout);
-    } else {
-      ASSERT(destination().is(no_reg));
-      __ orr(scratch0(), accumulator1(), Operand(accumulator0()));
-      Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
-      __ BranchOnNotSmi(scratch0(), bailout);
-    }
-  }
-
-  // If we didn't bailout, the result (in fact, both inputs too) is known to
-  // be a smi.
-  set_as_smi(accumulator0());
-  set_as_smi(accumulator1());
-}
-
-
-void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
-  ASSERT(info_ == NULL);
-  info_ = compilation_info;
-  Comment cmnt(masm_, "[ function compiled by fast code generator");
-
-  // Save the caller's frame pointer and set up our own.
-  Comment prologue_cmnt(masm(), ";; Prologue");
-  __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
-  __ add(fp, sp, Operand(2 * kPointerSize));
-  // Note that we keep a live register reference to cp (context) at
-  // this point.
-
-  Label* bailout_to_beginning = info()->AddBailout();
-  // Receiver (this) is allocated to a fixed register.
-  if (info()->has_this_properties()) {
-    Comment cmnt(masm(), ";; MapCheck(this)");
-    if (FLAG_print_ir) {
-      PrintF("MapCheck(this)\n");
-    }
-    ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
-    Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
-    Handle<Map> map(object->map());
-    EmitLoadReceiver();
-    __ CheckMap(receiver_reg(), scratch0(), map, bailout_to_beginning, false);
-  }
-
-  // If there is a global variable access check if the global object is the
-  // same as at lazy-compilation time.
-  if (info()->has_globals()) {
-    Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
-    if (FLAG_print_ir) {
-      PrintF("MapCheck(GLOBAL)\n");
-    }
-    ASSERT(info()->has_global_object());
-    Handle<Map> map(info()->global_object()->map());
-    __ ldr(scratch0(), CodeGenerator::GlobalObject());
-    __ CheckMap(scratch0(), scratch1(), map, bailout_to_beginning, true);
-  }
-
-  VisitStatements(function()->body());
-
-  Comment return_cmnt(masm(), ";; Return(<undefined>)");
-  if (FLAG_print_ir) {
-    PrintF("Return(<undefined>)\n");
-  }
-  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
-  __ mov(sp, fp);
-  __ ldm(ia_w, sp, fp.bit() | lr.bit());
-  int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
-  __ add(sp, sp, Operand(sp_delta));
-  __ Jump(lr);
-}
-
-
-#undef __
-
-
-} }  // namespace v8::internal
-
-#endif  // V8_TARGET_ARCH_ARM
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index ea5a8f2..b58a4a5 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -55,99 +55,97 @@
 //
 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
 // frames-arm.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) {
+void FullCodeGenerator::Generate(CompilationInfo* info) {
   ASSERT(info_ == NULL);
   info_ = info;
   SetFunctionPosition(function());
   Comment cmnt(masm_, "[ function compiled by full code generator");
 
-  if (mode == PRIMARY) {
-    int locals_count = scope()->num_stack_slots();
+  int locals_count = scope()->num_stack_slots();
 
-    __ Push(lr, fp, cp, r1);
-    if (locals_count > 0) {
-      // Load undefined value here, so the value is ready for the loop
-      // below.
-      __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+  __ Push(lr, fp, cp, r1);
+  if (locals_count > 0) {
+    // Load undefined value here, so the value is ready for the loop
+    // below.
+    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+  }
+  // Adjust fp to point to caller's fp.
+  __ add(fp, sp, Operand(2 * kPointerSize));
+
+  { Comment cmnt(masm_, "[ Allocate locals");
+    for (int i = 0; i < locals_count; i++) {
+      __ push(ip);
     }
-    // Adjust fp to point to caller's fp.
-    __ add(fp, sp, Operand(2 * kPointerSize));
+  }
 
-    { Comment cmnt(masm_, "[ Allocate locals");
-      for (int i = 0; i < locals_count; i++) {
-        __ push(ip);
-      }
-    }
+  bool function_in_register = true;
 
-    bool function_in_register = true;
-
-    // Possibly allocate a local context.
-    int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
-    if (heap_slots > 0) {
-      Comment cmnt(masm_, "[ Allocate local context");
-      // Argument to NewContext is the function, which is in r1.
-      __ push(r1);
-      if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-        FastNewContextStub stub(heap_slots);
-        __ CallStub(&stub);
-      } else {
-        __ CallRuntime(Runtime::kNewContext, 1);
-      }
-      function_in_register = false;
-      // Context is returned in both r0 and cp.  It replaces the context
-      // passed to us.  It's saved in the stack and kept live in cp.
-      __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-      // Copy any necessary parameters into the context.
-      int num_parameters = scope()->num_parameters();
-      for (int i = 0; i < num_parameters; i++) {
-        Slot* slot = scope()->parameter(i)->slot();
-        if (slot != NULL && slot->type() == Slot::CONTEXT) {
-          int parameter_offset = StandardFrameConstants::kCallerSPOffset +
-                                   (num_parameters - 1 - i) * kPointerSize;
-          // Load parameter from stack.
-          __ ldr(r0, MemOperand(fp, parameter_offset));
-          // Store it in the context.
-          __ mov(r1, Operand(Context::SlotOffset(slot->index())));
-          __ str(r0, MemOperand(cp, r1));
-          // Update the write barrier. This clobbers all involved
-          // registers, so we have to use two more registers to avoid
-          // clobbering cp.
-          __ mov(r2, Operand(cp));
-          __ RecordWrite(r2, Operand(r1), r3, r0);
-        }
-      }
-    }
-
-    Variable* arguments = scope()->arguments()->AsVariable();
-    if (arguments != NULL) {
-      // Function uses arguments object.
-      Comment cmnt(masm_, "[ Allocate arguments object");
-      if (!function_in_register) {
-        // Load this again, if it's used by the local context below.
-        __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
-      } else {
-        __ mov(r3, r1);
-      }
-      // Receiver is just before the parameters on the caller's stack.
-      int offset = scope()->num_parameters() * kPointerSize;
-      __ add(r2, fp,
-             Operand(StandardFrameConstants::kCallerSPOffset + offset));
-      __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters())));
-      __ Push(r3, r2, r1);
-
-      // Arguments to ArgumentsAccessStub:
-      //   function, receiver address, parameter count.
-      // The stub will rewrite receiever and parameter count if the previous
-      // stack frame was an arguments adapter frame.
-      ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+  // Possibly allocate a local context.
+  int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+  if (heap_slots > 0) {
+    Comment cmnt(masm_, "[ Allocate local context");
+    // Argument to NewContext is the function, which is in r1.
+    __ push(r1);
+    if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+      FastNewContextStub stub(heap_slots);
       __ CallStub(&stub);
-      // Duplicate the value; move-to-slot operation might clobber registers.
-      __ mov(r3, r0);
-      Move(arguments->slot(), r0, r1, r2);
-      Slot* dot_arguments_slot =
-          scope()->arguments_shadow()->AsVariable()->slot();
-      Move(dot_arguments_slot, r3, r1, r2);
+    } else {
+      __ CallRuntime(Runtime::kNewContext, 1);
     }
+    function_in_register = false;
+    // Context is returned in both r0 and cp.  It replaces the context
+    // passed to us.  It's saved in the stack and kept live in cp.
+    __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+    // Copy any necessary parameters into the context.
+    int num_parameters = scope()->num_parameters();
+    for (int i = 0; i < num_parameters; i++) {
+      Slot* slot = scope()->parameter(i)->slot();
+      if (slot != NULL && slot->type() == Slot::CONTEXT) {
+        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
+            (num_parameters - 1 - i) * kPointerSize;
+        // Load parameter from stack.
+        __ ldr(r0, MemOperand(fp, parameter_offset));
+        // Store it in the context.
+        __ mov(r1, Operand(Context::SlotOffset(slot->index())));
+        __ str(r0, MemOperand(cp, r1));
+        // Update the write barrier. This clobbers all involved
+        // registers, so we have to use two more registers to avoid
+        // clobbering cp.
+        __ mov(r2, Operand(cp));
+        __ RecordWrite(r2, Operand(r1), r3, r0);
+      }
+    }
+  }
+
+  Variable* arguments = scope()->arguments()->AsVariable();
+  if (arguments != NULL) {
+    // Function uses arguments object.
+    Comment cmnt(masm_, "[ Allocate arguments object");
+    if (!function_in_register) {
+      // Load this again, if it's used by the local context below.
+      __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+    } else {
+      __ mov(r3, r1);
+    }
+    // Receiver is just before the parameters on the caller's stack.
+    int offset = scope()->num_parameters() * kPointerSize;
+    __ add(r2, fp,
+           Operand(StandardFrameConstants::kCallerSPOffset + offset));
+    __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters())));
+    __ Push(r3, r2, r1);
+
+    // Arguments to ArgumentsAccessStub:
+    //   function, receiver address, parameter count.
+    // The stub will rewrite receiever and parameter count if the previous
+    // stack frame was an arguments adapter frame.
+    ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+    __ CallStub(&stub);
+    // Duplicate the value; move-to-slot operation might clobber registers.
+    __ mov(r3, r0);
+    Move(arguments->slot(), r0, r1, r2);
+    Slot* dot_arguments_slot =
+        scope()->arguments_shadow()->AsVariable()->slot();
+    Move(dot_arguments_slot, r3, r1, r2);
   }
 
   { Comment cmnt(masm_, "[ Declarations");
@@ -956,15 +954,13 @@
   __ cmp(r4, Operand(r2));
   __ b(eq, &update_each);
 
-  // Convert the entry to a string or null if it isn't a property
-  // anymore. If the property has been removed while iterating, we
+  // Convert the entry to a string or (smi) 0 if it isn't a property
+  // any more. If the property has been removed while iterating, we
   // just skip it.
   __ push(r1);  // Enumerable.
   __ push(r3);  // Current entry.
   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
-  __ mov(r3, Operand(r0));
-  __ LoadRoot(ip, Heap::kNullValueRootIndex);
-  __ cmp(r3, ip);
+  __ mov(r3, Operand(r0), SetCC);
   __ b(eq, loop_statement.continue_target());
 
   // Update the 'each' property or variable from the possibly filtered
@@ -1959,6 +1955,26 @@
 }
 
 
+void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
+    ZoneList<Expression*>* args) {
+
+  ASSERT(args->length() == 1);
+
+  VisitForValue(args->at(0), kAccumulator);
+
+  Label materialize_true, materialize_false;
+  Label* if_true = NULL;
+  Label* if_false = NULL;
+  PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+  // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
+  // used in a few functions in runtime.js which should not normally be hit by
+  // this compiler.
+  __ jmp(if_false);
+  Apply(context_, if_true, if_false);
+}
+
+
 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
   ASSERT(args->length() == 1);
 
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 7a03641..38c7c28 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -1655,6 +1655,13 @@
 }
 
 
+void MacroAssembler::AbortIfSmi(Register object) {
+  ASSERT_EQ(0, kSmiTag);
+  tst(object, Operand(kSmiTagMask));
+  Assert(ne, "Operand is a smi");
+}
+
+
 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
     Register first,
     Register second,
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 37a1b1c..836ed74 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -618,6 +618,9 @@
   // Jump if either of the registers contain a smi.
   void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
 
+  // Abort execution if argument is a smi. Used in debug code.
+  void AbortIfSmi(Register object);
+
   // ---------------------------------------------------------------------------
   // String utilities
 
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 04635e3..c4cc8d4 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -2431,11 +2431,17 @@
   }
 
   int d = GlueRegCode(!dp_operation, instr->VdField(), instr->DField());
-  int m = GlueRegCode(!dp_operation, instr->VmField(), instr->MField());
+  int m = 0;
+  if (instr->Opc2Field() == 0x4) {
+    m = GlueRegCode(!dp_operation, instr->VmField(), instr->MField());
+  }
 
   if (dp_operation) {
     double dd_value = get_double_from_d_register(d);
-    double dm_value = get_double_from_d_register(m);
+    double dm_value = 0.0;
+    if (instr->Opc2Field() == 0x4) {
+      dm_value = get_double_from_d_register(m);
+    }
 
     Compute_FPSCR_Flags(dd_value, dm_value);
   } else {
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 8c8e702..fa90ca7 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -1212,38 +1212,6 @@
 }
 
 
-Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
-  // ----------- S t a t e -------------
-  //  -- r1: function
-  //  -- lr: return address
-  // -----------------------------------
-
-  // Enter an internal frame.
-  __ EnterInternalFrame();
-
-  // Preserve the function.
-  __ push(r1);
-
-  // Push the function on the stack as the argument to the runtime function.
-  __ push(r1);
-  __ CallRuntime(Runtime::kLazyCompile, 1);
-
-  // Calculate the entry point.
-  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-
-  // Restore saved function.
-  __ pop(r1);
-
-  // Tear down temporary frame.
-  __ LeaveInternalFrame();
-
-  // Do a tail-call of the compiled function.
-  __ Jump(r2);
-
-  return GetCodeWithFlags(flags, "LazyCompileStub");
-}
-
-
 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
   if (kind_ == Code::KEYED_CALL_IC) {
     __ cmp(r2, Operand(Handle<String>(name)));
diff --git a/src/assembler.h b/src/assembler.h
index cf7020e..1577433 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -235,6 +235,7 @@
   INLINE(void set_call_object(Object* target));
   INLINE(Object** call_object_address());
 
+  template<typename StaticVisitor> inline void Visit();
   inline void Visit(ObjectVisitor* v);
 
   // Patch the code with some other code.
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 0d59505..ce8e98d 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -36,6 +36,7 @@
 #include "global-handles.h"
 #include "macro-assembler.h"
 #include "natives.h"
+#include "objects-visiting.h"
 #include "snapshot.h"
 #include "stub-cache.h"
 
@@ -813,9 +814,7 @@
     initial_map->set_instance_size(
         initial_map->instance_size() + 5 * kPointerSize);
     initial_map->set_instance_descriptors(*descriptors);
-    initial_map->set_scavenger(
-        Heap::GetScavenger(initial_map->instance_type(),
-                           initial_map->instance_size()));
+    initial_map->set_visitor_id(StaticVisitorBase::GetVisitorId(*initial_map));
   }
 
   {  // -- J S O N
@@ -1235,6 +1234,14 @@
 
   InstallNativeFunctions();
 
+  // Store the map for the string prototype after the natives has been compiled
+  // and the String function has been setup.
+  Handle<JSFunction> string_function(global_context()->string_function());
+  ASSERT(JSObject::cast(
+      string_function->initial_map()->prototype())->HasFastProperties());
+  global_context()->set_string_function_prototype_map(
+      HeapObject::cast(string_function->initial_map()->prototype())->map());
+
   InstallCustomCallGenerators();
 
   // Install Function.prototype.call and apply.
diff --git a/src/builtins.h b/src/builtins.h
index 3dcab62..375e8f3 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -69,6 +69,7 @@
   V(JSConstructStubApi,         BUILTIN, UNINITIALIZED)                   \
   V(JSEntryTrampoline,          BUILTIN, UNINITIALIZED)                   \
   V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED)                   \
+  V(LazyCompile,                BUILTIN, UNINITIALIZED)                   \
                                                                           \
   V(LoadIC_Miss,                BUILTIN, UNINITIALIZED)                   \
   V(KeyedLoadIC_Miss,           BUILTIN, UNINITIALIZED)                   \
@@ -249,6 +250,7 @@
   static void Generate_JSConstructStubApi(MacroAssembler* masm);
   static void Generate_JSEntryTrampoline(MacroAssembler* masm);
   static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm);
+  static void Generate_LazyCompile(MacroAssembler* masm);
   static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm);
 
   static void Generate_FunctionCall(MacroAssembler* masm);
diff --git a/src/codegen.h b/src/codegen.h
index 588468f..3b31c04 100644
--- a/src/codegen.h
+++ b/src/codegen.h
@@ -101,7 +101,8 @@
   F(IsObject, 1, 1)                                                          \
   F(IsFunction, 1, 1)                                                        \
   F(IsUndetectableObject, 1, 1)                                              \
-  F(IsSpecObject, 1, 1)                                            \
+  F(IsSpecObject, 1, 1)                                                      \
+  F(IsStringWrapperSafeForDefaultValueOf, 1, 1)                              \
   F(StringAdd, 2, 1)                                                         \
   F(SubString, 3, 1)                                                         \
   F(StringCompare, 2, 1)                                                     \
@@ -730,18 +731,6 @@
 };
 
 
-class ToBooleanStub: public CodeStub {
- public:
-  ToBooleanStub() { }
-
-  void Generate(MacroAssembler* masm);
-
- private:
-  Major MajorKey() { return ToBoolean; }
-  int MinorKey() { return 0; }
-};
-
-
 enum StringIndexFlags {
   // Accepts smis or heap numbers.
   STRING_INDEX_IS_NUMBER,
diff --git a/src/compiler.cc b/src/compiler.cc
index d87d9da..9f0162e 100755
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -33,7 +33,6 @@
 #include "compiler.h"
 #include "data-flow.h"
 #include "debug.h"
-#include "fast-codegen.h"
 #include "flow-graph.h"
 #include "full-codegen.h"
 #include "liveedit.h"
@@ -120,14 +119,9 @@
   //
   //  --full-compiler enables the dedicated backend for code we expect to be
   //    run once
-  //  --fast-compiler enables a speculative optimizing backend (for
-  //    non-run-once code)
   //
   // The normal choice of backend can be overridden with the flags
-  // --always-full-compiler and --always-fast-compiler, which are mutually
-  // incompatible.
-  CHECK(!FLAG_always_full_compiler || !FLAG_always_fast_compiler);
-
+  // --always-full-compiler.
   Handle<SharedFunctionInfo> shared = info->shared_info();
   bool is_run_once = (shared.is_null())
       ? info->scope()->is_global_scope()
@@ -141,13 +135,6 @@
     if (checker.has_supported_syntax()) {
       return FullCodeGenerator::MakeCode(info);
     }
-  } else if (FLAG_always_fast_compiler ||
-             (FLAG_fast_compiler && !is_run_once)) {
-    FastCodeGenSyntaxChecker checker;
-    checker.Check(info);
-    if (checker.has_supported_syntax()) {
-      return FastCodeGenerator::MakeCode(info);
-    }
   }
 
   return CodeGenerator::MakeCode(info);
@@ -494,7 +481,7 @@
   // Generate code
   Handle<Code> code;
   if (FLAG_lazy && allow_lazy) {
-    code = ComputeLazyCompile(literal->num_parameters());
+    code = Handle<Code>(Builtins::builtin(Builtins::LazyCompile));
   } else {
     // The bodies of function literals have not yet been visited by
     // the AST optimizer/analyzer.
@@ -528,7 +515,6 @@
     // the static helper function MakeCode.
     CompilationInfo info(literal, script, false);
 
-    CHECK(!FLAG_always_full_compiler || !FLAG_always_fast_compiler);
     bool is_run_once = literal->try_full_codegen();
     bool is_compiled = false;
 
@@ -542,16 +528,6 @@
         code = FullCodeGenerator::MakeCode(&info);
         is_compiled = true;
       }
-    } else if (FLAG_always_fast_compiler ||
-               (FLAG_fast_compiler && !is_run_once)) {
-      // Since we are not lazily compiling we do not have a receiver to
-      // specialize for.
-      FastCodeGenSyntaxChecker checker;
-      checker.Check(&info);
-      if (checker.has_supported_syntax()) {
-        code = FastCodeGenerator::MakeCode(&info);
-        is_compiled = true;
-      }
     }
 
     if (!is_compiled) {
diff --git a/src/compiler.h b/src/compiler.h
index ade21f5..ed26603 100644
--- a/src/compiler.h
+++ b/src/compiler.h
@@ -41,37 +41,6 @@
 // is constructed based on the resources available at compile-time.
 class CompilationInfo BASE_EMBEDDED {
  public:
-  // Compilation mode.  Either the compiler is used as the primary
-  // compiler and needs to setup everything or the compiler is used as
-  // the secondary compiler for split compilation and has to handle
-  // bailouts.
-  enum Mode {
-    PRIMARY,
-    SECONDARY
-  };
-
-  // A description of the compilation state at a bailout to the secondary
-  // code generator.
-  //
-  // The state is currently simple: there are no parameters or local
-  // variables to worry about ('this' can be found in the stack frame).
-  // There are at most two live values.
-  //
-  // There is a label that should be bound to the beginning of the bailout
-  // stub code.
-  class Bailout : public ZoneObject {
-   public:
-    Bailout(Register left, Register right) : left_(left), right_(right) {}
-
-    Label* label() { return &label_; }
-
-   private:
-    Register left_;
-    Register right_;
-    Label label_;
-  };
-
-
   // Lazy compilation of a JSFunction.
   CompilationInfo(Handle<JSFunction> closure,
                   int loop_nesting,
@@ -145,12 +114,6 @@
   int loop_nesting() { return loop_nesting_; }
   bool has_receiver() { return !receiver_.is_null(); }
   Handle<Object> receiver() { return receiver_; }
-  List<Bailout*>* bailouts() { return &bailouts_; }
-
-  // Accessors for mutable fields (possibly set by analysis passes) with
-  // default values given by Initialize.
-  Mode mode() { return mode_; }
-  void set_mode(Mode mode) { mode_ = mode; }
 
   bool has_this_properties() { return has_this_properties_; }
   void set_has_this_properties(bool flag) { has_this_properties_ = flag; }
@@ -169,19 +132,8 @@
   // Derived accessors.
   Scope* scope() { return function()->scope(); }
 
-  // Add a bailout with two live values.
-  Label* AddBailout(Register left, Register right) {
-    Bailout* bailout = new Bailout(left, right);
-    bailouts_.Add(bailout);
-    return bailout->label();
-  }
-
-  // Add a bailout with no live values.
-  Label* AddBailout() { return AddBailout(no_reg, no_reg); }
-
  private:
   void Initialize() {
-    mode_ = PRIMARY;
     has_this_properties_ = false;
     has_globals_ = false;
   }
@@ -191,7 +143,6 @@
   Handle<Script> script_;
 
   FunctionLiteral* function_;
-  Mode mode_;
 
   bool is_eval_;
   int loop_nesting_;
@@ -201,10 +152,6 @@
   bool has_this_properties_;
   bool has_globals_;
 
-  // An ordered list of bailout points encountered during fast-path
-  // compilation.
-  List<Bailout*> bailouts_;
-
   DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
 };
 
diff --git a/src/contexts.h b/src/contexts.h
index 01bb21b..d1c98bd 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -56,6 +56,7 @@
   V(BOOLEAN_FUNCTION_INDEX, JSFunction, boolean_function) \
   V(NUMBER_FUNCTION_INDEX, JSFunction, number_function) \
   V(STRING_FUNCTION_INDEX, JSFunction, string_function) \
+  V(STRING_FUNCTION_PROTOTYPE_MAP_INDEX, Map, string_function_prototype_map) \
   V(OBJECT_FUNCTION_INDEX, JSFunction, object_function) \
   V(ARRAY_FUNCTION_INDEX, JSFunction, array_function) \
   V(DATE_FUNCTION_INDEX, JSFunction, date_function) \
@@ -186,6 +187,7 @@
     BOOLEAN_FUNCTION_INDEX,
     NUMBER_FUNCTION_INDEX,
     STRING_FUNCTION_INDEX,
+    STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
     OBJECT_FUNCTION_INDEX,
     ARRAY_FUNCTION_INDEX,
     DATE_FUNCTION_INDEX,
diff --git a/src/debug.cc b/src/debug.cc
index dbf9df9..c13c8c9 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -582,6 +582,35 @@
 }
 
 
+// Frame structure (conforms InternalFrame structure):
+//   -- code
+//   -- SMI maker
+//   -- function (slot is called "context")
+//   -- frame base
+Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
+                                       Handle<Code> code) {
+  ASSERT(bottom_js_frame->is_java_script());
+
+  Address fp = bottom_js_frame->fp();
+
+  // Move function pointer into "context" slot.
+  Memory::Object_at(fp + StandardFrameConstants::kContextOffset) =
+      Memory::Object_at(fp + JavaScriptFrameConstants::kFunctionOffset);
+
+  Memory::Object_at(fp + InternalFrameConstants::kCodeOffset) = *code;
+  Memory::Object_at(fp + StandardFrameConstants::kMarkerOffset) =
+      Smi::FromInt(StackFrame::INTERNAL);
+
+  return reinterpret_cast<Object**>(&Memory::Object_at(
+      fp + StandardFrameConstants::kContextOffset));
+}
+
+const int Debug::kFrameDropperFrameSize = 4;
+
+
+
+
+
 // Default break enabled.
 bool Debug::disable_break_ = false;
 
diff --git a/src/debug.h b/src/debug.h
index b6aba5a..98d1919 100644
--- a/src/debug.h
+++ b/src/debug.h
@@ -400,6 +400,11 @@
   static void GenerateStubNoRegistersDebugBreak(MacroAssembler* masm);
   static void GenerateSlotDebugBreak(MacroAssembler* masm);
   static void GeneratePlainReturnLiveEdit(MacroAssembler* masm);
+
+  // FrameDropper is a code replacement for a JavaScript frame with possibly
+  // several frames above.
+  // There is no calling conventions here, because it never actually gets
+  // called, it only gets returned to.
   static void GenerateFrameDropperLiveEdit(MacroAssembler* masm);
 
   // Called from stub-cache.cc.
@@ -431,13 +436,14 @@
   // the value that is called 'restarter_frame_function_pointer'. The value
   // at this address (possibly updated by GC) may be used later when preparing
   // 'step in' operation.
-  // The implementation is architecture-specific.
-  // TODO(LiveEdit): consider reviewing it as architecture-independent.
   static Object** SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
                                          Handle<Code> code);
 
   static const int kFrameDropperFrameSize;
 
+  // Architecture-specific constant.
+  static const bool kFrameDropperSupported;
+
  private:
   static bool CompileDebuggerScript(int index);
   static void ClearOneShot();
diff --git a/src/factory.cc b/src/factory.cc
index 14042e8..7c8c934 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -32,6 +32,7 @@
 #include "execution.h"
 #include "factory.h"
 #include "macro-assembler.h"
+#include "objects-visiting.h"
 
 namespace v8 {
 namespace internal {
@@ -277,8 +278,7 @@
   copy->set_inobject_properties(inobject_properties);
   copy->set_unused_property_fields(inobject_properties);
   copy->set_instance_size(copy->instance_size() + instance_size_delta);
-  copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
-                                         copy->instance_size()));
+  copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
   return copy;
 }
 
diff --git a/src/fast-codegen.cc b/src/fast-codegen.cc
deleted file mode 100644
index 832cf74..0000000
--- a/src/fast-codegen.cc
+++ /dev/null
@@ -1,746 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#include "codegen-inl.h"
-#include "data-flow.h"
-#include "fast-codegen.h"
-#include "scopes.h"
-
-namespace v8 {
-namespace internal {
-
-#define BAILOUT(reason)                         \
-  do {                                          \
-    if (FLAG_trace_bailout) {                   \
-      PrintF("%s\n", reason);                   \
-    }                                           \
-    has_supported_syntax_ = false;              \
-    return;                                     \
-  } while (false)
-
-
-#define CHECK_BAILOUT                           \
-  do {                                          \
-    if (!has_supported_syntax_) return;         \
-  } while (false)
-
-
-void FastCodeGenSyntaxChecker::Check(CompilationInfo* info) {
-  info_ = info;
-
-  // We do not specialize if we do not have a receiver or if it is not a
-  // JS object with fast mode properties.
-  if (!info->has_receiver()) BAILOUT("No receiver");
-  if (!info->receiver()->IsJSObject()) BAILOUT("Receiver is not an object");
-  Handle<JSObject> object = Handle<JSObject>::cast(info->receiver());
-  if (!object->HasFastProperties()) BAILOUT("Receiver is in dictionary mode");
-
-  // We do not support stack or heap slots (both of which require
-  // allocation).
-  Scope* scope = info->scope();
-  if (scope->num_stack_slots() > 0) {
-    BAILOUT("Function has stack-allocated locals");
-  }
-  if (scope->num_heap_slots() > 0) {
-    BAILOUT("Function has context-allocated locals");
-  }
-
-  VisitDeclarations(scope->declarations());
-  CHECK_BAILOUT;
-
-  // We do not support empty function bodies.
-  if (info->function()->body()->is_empty()) {
-    BAILOUT("Function has an empty body");
-  }
-  VisitStatements(info->function()->body());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDeclarations(
-    ZoneList<Declaration*>* decls) {
-  if (!decls->is_empty()) BAILOUT("Function has declarations");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitStatements(ZoneList<Statement*>* stmts) {
-  if (stmts->length() != 1) {
-    BAILOUT("Function body is not a singleton statement.");
-  }
-  Visit(stmts->at(0));
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDeclaration(Declaration* decl) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenSyntaxChecker::VisitBlock(Block* stmt) {
-  VisitStatements(stmt->statements());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitExpressionStatement(
-    ExpressionStatement* stmt) {
-  Visit(stmt->expression());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitEmptyStatement(EmptyStatement* stmt) {
-  // Supported.
-}
-
-
-void FastCodeGenSyntaxChecker::VisitIfStatement(IfStatement* stmt) {
-  BAILOUT("IfStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitContinueStatement(ContinueStatement* stmt) {
-  BAILOUT("Continuestatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitBreakStatement(BreakStatement* stmt) {
-  BAILOUT("BreakStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitReturnStatement(ReturnStatement* stmt) {
-  BAILOUT("ReturnStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitWithEnterStatement(
-    WithEnterStatement* stmt) {
-  BAILOUT("WithEnterStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitWithExitStatement(WithExitStatement* stmt) {
-  BAILOUT("WithExitStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitSwitchStatement(SwitchStatement* stmt) {
-  BAILOUT("SwitchStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
-  BAILOUT("DoWhileStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitWhileStatement(WhileStatement* stmt) {
-  BAILOUT("WhileStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitForStatement(ForStatement* stmt) {
-  BAILOUT("ForStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitForInStatement(ForInStatement* stmt) {
-  BAILOUT("ForInStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitTryCatchStatement(TryCatchStatement* stmt) {
-  BAILOUT("TryCatchStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitTryFinallyStatement(
-    TryFinallyStatement* stmt) {
-  BAILOUT("TryFinallyStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDebuggerStatement(
-    DebuggerStatement* stmt) {
-  BAILOUT("DebuggerStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
-  BAILOUT("FunctionLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitSharedFunctionInfoLiteral(
-    SharedFunctionInfoLiteral* expr) {
-  BAILOUT("SharedFunctionInfoLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitConditional(Conditional* expr) {
-  BAILOUT("Conditional");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitSlot(Slot* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenSyntaxChecker::VisitVariableProxy(VariableProxy* expr) {
-  // Only global variable references are supported.
-  Variable* var = expr->var();
-  if (!var->is_global() || var->is_this()) BAILOUT("Non-global variable");
-
-  // Check if the global variable is existing and non-deletable.
-  if (info()->has_global_object()) {
-    LookupResult lookup;
-    info()->global_object()->Lookup(*expr->name(), &lookup);
-    if (!lookup.IsProperty()) {
-      BAILOUT("Non-existing global variable");
-    }
-    // We do not handle global variables with accessors or interceptors.
-    if (lookup.type() != NORMAL) {
-      BAILOUT("Global variable with accessors or interceptors.");
-    }
-    // We do not handle deletable global variables.
-    if (!lookup.IsDontDelete()) {
-      BAILOUT("Deletable global variable");
-    }
-  }
-}
-
-
-void FastCodeGenSyntaxChecker::VisitLiteral(Literal* expr) {
-  BAILOUT("Literal");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
-  BAILOUT("RegExpLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitObjectLiteral(ObjectLiteral* expr) {
-  BAILOUT("ObjectLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitArrayLiteral(ArrayLiteral* expr) {
-  BAILOUT("ArrayLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCatchExtensionObject(
-    CatchExtensionObject* expr) {
-  BAILOUT("CatchExtensionObject");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitAssignment(Assignment* expr) {
-  // Simple assignments to (named) this properties are supported.
-  if (expr->op() != Token::ASSIGN) BAILOUT("Non-simple assignment");
-
-  Property* prop = expr->target()->AsProperty();
-  if (prop == NULL) BAILOUT("Non-property assignment");
-  VariableProxy* proxy = prop->obj()->AsVariableProxy();
-  if (proxy == NULL || !proxy->var()->is_this()) {
-    BAILOUT("Non-this-property assignment");
-  }
-  if (!prop->key()->IsPropertyName()) {
-    BAILOUT("Non-named-property assignment");
-  }
-
-  // We will only specialize for fields on the object itself.
-  // Expression::IsPropertyName implies that the name is a literal
-  // symbol but we do not assume that.
-  Literal* key = prop->key()->AsLiteral();
-  if (key != NULL && key->handle()->IsString()) {
-    Handle<Object> receiver = info()->receiver();
-    Handle<String> name = Handle<String>::cast(key->handle());
-    LookupResult lookup;
-    receiver->Lookup(*name, &lookup);
-    if (!lookup.IsProperty()) {
-      BAILOUT("Assigned property not found at compile time");
-    }
-    if (lookup.holder() != *receiver) BAILOUT("Non-own property assignment");
-    if (!lookup.type() == FIELD) BAILOUT("Non-field property assignment");
-  } else {
-    UNREACHABLE();
-    BAILOUT("Unexpected non-string-literal property key");
-  }
-
-  Visit(expr->value());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitThrow(Throw* expr) {
-  BAILOUT("Throw");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitProperty(Property* expr) {
-  // We support named this property references.
-  VariableProxy* proxy = expr->obj()->AsVariableProxy();
-  if (proxy == NULL || !proxy->var()->is_this()) {
-    BAILOUT("Non-this-property reference");
-  }
-  if (!expr->key()->IsPropertyName()) {
-    BAILOUT("Non-named-property reference");
-  }
-
-  // We will only specialize for fields on the object itself.
-  // Expression::IsPropertyName implies that the name is a literal
-  // symbol but we do not assume that.
-  Literal* key = expr->key()->AsLiteral();
-  if (key != NULL && key->handle()->IsString()) {
-    Handle<Object> receiver = info()->receiver();
-    Handle<String> name = Handle<String>::cast(key->handle());
-    LookupResult lookup;
-    receiver->Lookup(*name, &lookup);
-    if (!lookup.IsProperty()) {
-      BAILOUT("Referenced property not found at compile time");
-    }
-    if (lookup.holder() != *receiver) BAILOUT("Non-own property reference");
-    if (!lookup.type() == FIELD) BAILOUT("Non-field property reference");
-  } else {
-    UNREACHABLE();
-    BAILOUT("Unexpected non-string-literal property key");
-  }
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCall(Call* expr) {
-  BAILOUT("Call");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCallNew(CallNew* expr) {
-  BAILOUT("CallNew");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCallRuntime(CallRuntime* expr) {
-  BAILOUT("CallRuntime");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitUnaryOperation(UnaryOperation* expr) {
-  BAILOUT("UnaryOperation");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCountOperation(CountOperation* expr) {
-  BAILOUT("CountOperation");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitBinaryOperation(BinaryOperation* expr) {
-  // We support bitwise OR.
-  switch (expr->op()) {
-    case Token::COMMA:
-      BAILOUT("BinaryOperation COMMA");
-    case Token::OR:
-      BAILOUT("BinaryOperation OR");
-    case Token::AND:
-      BAILOUT("BinaryOperation AND");
-
-    case Token::BIT_OR:
-      // We support expressions nested on the left because they only require
-      // a pair of registers to keep all intermediate values in registers
-      // (i.e., the expression stack has height no more than two).
-      if (!expr->right()->IsLeaf()) BAILOUT("expression nested on right");
-
-      // We do not allow subexpressions with side effects because we
-      // (currently) bail out to the beginning of the full function.  The
-      // only expressions with side effects that we would otherwise handle
-      // are assignments.
-      if (expr->left()->AsAssignment() != NULL ||
-          expr->right()->AsAssignment() != NULL) {
-        BAILOUT("subexpression of binary operation has side effects");
-      }
-
-      Visit(expr->left());
-      CHECK_BAILOUT;
-      Visit(expr->right());
-      break;
-
-    case Token::BIT_XOR:
-      BAILOUT("BinaryOperation BIT_XOR");
-    case Token::BIT_AND:
-      BAILOUT("BinaryOperation BIT_AND");
-    case Token::SHL:
-      BAILOUT("BinaryOperation SHL");
-    case Token::SAR:
-      BAILOUT("BinaryOperation SAR");
-    case Token::SHR:
-      BAILOUT("BinaryOperation SHR");
-    case Token::ADD:
-      BAILOUT("BinaryOperation ADD");
-    case Token::SUB:
-      BAILOUT("BinaryOperation SUB");
-    case Token::MUL:
-      BAILOUT("BinaryOperation MUL");
-    case Token::DIV:
-      BAILOUT("BinaryOperation DIV");
-    case Token::MOD:
-      BAILOUT("BinaryOperation MOD");
-    default:
-      UNREACHABLE();
-  }
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCompareOperation(CompareOperation* expr) {
-  BAILOUT("CompareOperation");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitThisFunction(ThisFunction* expr) {
-  BAILOUT("ThisFunction");
-}
-
-#undef BAILOUT
-#undef CHECK_BAILOUT
-
-
-#define __ ACCESS_MASM(masm())
-
-Handle<Code> FastCodeGenerator::MakeCode(CompilationInfo* info) {
-  // Label the AST before calling MakeCodePrologue, so AST node numbers are
-  // printed with the AST.
-  AstLabeler labeler;
-  labeler.Label(info);
-
-  CodeGenerator::MakeCodePrologue(info);
-
-  const int kInitialBufferSize = 4 * KB;
-  MacroAssembler masm(NULL, kInitialBufferSize);
-
-  // Generate the fast-path code.
-  FastCodeGenerator fast_cgen(&masm);
-  fast_cgen.Generate(info);
-  if (fast_cgen.HasStackOverflow()) {
-    ASSERT(!Top::has_pending_exception());
-    return Handle<Code>::null();
-  }
-
-  // Generate the full code for the function in bailout mode, using the same
-  // macro assembler.
-  CodeGenerator cgen(&masm);
-  CodeGeneratorScope scope(&cgen);
-  info->set_mode(CompilationInfo::SECONDARY);
-  cgen.Generate(info);
-  if (cgen.HasStackOverflow()) {
-    ASSERT(!Top::has_pending_exception());
-    return Handle<Code>::null();
-  }
-
-  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP);
-  return CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
-}
-
-
-void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitBlock(Block* stmt) {
-  VisitStatements(stmt->statements());
-}
-
-
-void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
-  Visit(stmt->expression());
-}
-
-
-void FastCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
-  // Nothing to do.
-}
-
-
-void FastCodeGenerator::VisitIfStatement(IfStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitWithEnterStatement(WithEnterStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitWithExitStatement(WithExitStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitForStatement(ForStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitSharedFunctionInfoLiteral(
-    SharedFunctionInfoLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitConditional(Conditional* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitSlot(Slot* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
-  ASSERT(expr->var()->is_global() && !expr->var()->is_this());
-  // Check if we can compile a global variable load directly from the cell.
-  ASSERT(info()->has_global_object());
-  LookupResult lookup;
-  info()->global_object()->Lookup(*expr->name(), &lookup);
-  // We only support normal (non-accessor/interceptor) DontDelete properties
-  // for now.
-  ASSERT(lookup.IsProperty());
-  ASSERT_EQ(NORMAL, lookup.type());
-  ASSERT(lookup.IsDontDelete());
-  Handle<Object> cell(info()->global_object()->GetPropertyCell(&lookup));
-
-  // Global variable lookups do not have side effects, so we do not need to
-  // emit code if we are in an effect context.
-  if (!destination().is(no_reg)) {
-    Comment cmnt(masm(), ";; Global");
-    if (FLAG_print_ir) {
-      SmartPointer<char> name = expr->name()->ToCString();
-      PrintF("%d: t%d = Global(%s)\n", expr->num(),
-             expr->num(), *name);
-    }
-    EmitGlobalVariableLoad(cell);
-  }
-}
-
-
-void FastCodeGenerator::VisitLiteral(Literal* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitAssignment(Assignment* expr) {
-  // Known to be a simple this property assignment.  Effectively a unary
-  // operation.
-  { Register my_destination = destination();
-    set_destination(accumulator0());
-    Visit(expr->value());
-    set_destination(my_destination);
-  }
-
-  Property* prop = expr->target()->AsProperty();
-  ASSERT_NOT_NULL(prop);
-  ASSERT_NOT_NULL(prop->obj()->AsVariableProxy());
-  ASSERT(prop->obj()->AsVariableProxy()->var()->is_this());
-  ASSERT(prop->key()->IsPropertyName());
-  Handle<String> name =
-      Handle<String>::cast(prop->key()->AsLiteral()->handle());
-
-  Comment cmnt(masm(), ";; Store to this");
-  if (FLAG_print_ir) {
-    SmartPointer<char> name_string = name->ToCString();
-    PrintF("%d: ", expr->num());
-    if (!destination().is(no_reg)) PrintF("t%d = ", expr->num());
-    PrintF("Store(this, \"%s\", t%d)\n", *name_string,
-           expr->value()->num());
-  }
-
-  EmitThisPropertyStore(name);
-}
-
-
-void FastCodeGenerator::VisitThrow(Throw* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitProperty(Property* expr) {
-  ASSERT_NOT_NULL(expr->obj()->AsVariableProxy());
-  ASSERT(expr->obj()->AsVariableProxy()->var()->is_this());
-  ASSERT(expr->key()->IsPropertyName());
-  if (!destination().is(no_reg)) {
-    Handle<String> name =
-        Handle<String>::cast(expr->key()->AsLiteral()->handle());
-
-    Comment cmnt(masm(), ";; Load from this");
-    if (FLAG_print_ir) {
-      SmartPointer<char> name_string = name->ToCString();
-      PrintF("%d: t%d = Load(this, \"%s\")\n",
-             expr->num(), expr->num(), *name_string);
-    }
-    EmitThisPropertyLoad(name);
-  }
-}
-
-
-void FastCodeGenerator::VisitCall(Call* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCallNew(CallNew* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCountOperation(CountOperation* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
-  // We support limited binary operations: bitwise OR only allowed to be
-  // nested on the left.
-  ASSERT(expr->op() == Token::BIT_OR);
-  ASSERT(expr->right()->IsLeaf());
-
-  { Register my_destination = destination();
-    set_destination(accumulator1());
-    Visit(expr->left());
-    set_destination(accumulator0());
-    Visit(expr->right());
-    set_destination(my_destination);
-  }
-
-  Comment cmnt(masm(), ";; BIT_OR");
-  if (FLAG_print_ir) {
-    PrintF("%d: ", expr->num());
-    if (!destination().is(no_reg)) PrintF("t%d = ", expr->num());
-    PrintF("BIT_OR(t%d, t%d)\n", expr->left()->num(), expr->right()->num());
-  }
-  EmitBitOr();
-}
-
-
-void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) {
-  UNREACHABLE();
-}
-
-#undef __
-
-
-} }  // namespace v8::internal
diff --git a/src/fast-codegen.h b/src/fast-codegen.h
deleted file mode 100644
index a0282bb..0000000
--- a/src/fast-codegen.h
+++ /dev/null
@@ -1,161 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_FAST_CODEGEN_H_
-#define V8_FAST_CODEGEN_H_
-
-#if V8_TARGET_ARCH_IA32
-#include "ia32/fast-codegen-ia32.h"
-#else
-
-#include "v8.h"
-
-#include "ast.h"
-#include "compiler.h"
-#include "list.h"
-
-namespace v8 {
-namespace internal {
-
-class FastCodeGenSyntaxChecker: public AstVisitor {
- public:
-  explicit FastCodeGenSyntaxChecker()
-      : info_(NULL), has_supported_syntax_(true) {
-  }
-
-  void Check(CompilationInfo* info);
-
-  CompilationInfo* info() { return info_; }
-  bool has_supported_syntax() { return has_supported_syntax_; }
-
- private:
-  void VisitDeclarations(ZoneList<Declaration*>* decls);
-  void VisitStatements(ZoneList<Statement*>* stmts);
-
-  // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
-  AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
-  CompilationInfo* info_;
-  bool has_supported_syntax_;
-
-  DISALLOW_COPY_AND_ASSIGN(FastCodeGenSyntaxChecker);
-};
-
-
-class FastCodeGenerator: public AstVisitor {
- public:
-  explicit FastCodeGenerator(MacroAssembler* masm)
-      : masm_(masm), info_(NULL), destination_(no_reg), smi_bits_(0) {
-  }
-
-  static Handle<Code> MakeCode(CompilationInfo* info);
-
-  void Generate(CompilationInfo* compilation_info);
-
- private:
-  MacroAssembler* masm() { return masm_; }
-  CompilationInfo* info() { return info_; }
-
-  Register destination() { return destination_; }
-  void set_destination(Register reg) { destination_ = reg; }
-
-  FunctionLiteral* function() { return info_->function(); }
-  Scope* scope() { return info_->scope(); }
-
-  // Platform-specific fixed registers, all guaranteed distinct.
-  Register accumulator0();
-  Register accumulator1();
-  Register scratch0();
-  Register scratch1();
-  Register scratch2();
-  Register receiver_reg();
-  Register context_reg();
-
-  Register other_accumulator(Register reg) {
-    ASSERT(reg.is(accumulator0()) || reg.is(accumulator1()));
-    return (reg.is(accumulator0())) ? accumulator1() : accumulator0();
-  }
-
-  // Flags are true if the respective register is statically known to hold a
-  // smi.  We do not track every register, only the accumulator registers.
-  bool is_smi(Register reg) {
-    ASSERT(!reg.is(no_reg));
-    return (smi_bits_ & reg.bit()) != 0;
-  }
-  void set_as_smi(Register reg) {
-    ASSERT(!reg.is(no_reg));
-    smi_bits_ = smi_bits_ | reg.bit();
-  }
-  void clear_as_smi(Register reg) {
-    ASSERT(!reg.is(no_reg));
-    smi_bits_ = smi_bits_ & ~reg.bit();
-  }
-
-  // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
-  AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
-  // Emit code to load the receiver from the stack into receiver_reg.
-  void EmitLoadReceiver();
-
-  // Emit code to load a global variable directly from a global property
-  // cell into the destination register.
-  void EmitGlobalVariableLoad(Handle<Object> cell);
-
-  // Emit a store to an own property of this.  The stored value is expected
-  // in accumulator0 and the receiver in receiver_reg.  The receiver
-  // register is preserved and the result (the stored value) is left in the
-  // destination register.
-  void EmitThisPropertyStore(Handle<String> name);
-
-  // Emit a load from an own property of this.  The receiver is expected in
-  // receiver_reg.  The receiver register is preserved and the result is
-  // left in the destination register.
-  void EmitThisPropertyLoad(Handle<String> name);
-
-  // Emit a bitwise or operation.  The left operand is in accumulator1 and
-  // the right is in accumulator0.  The result should be left in the
-  // destination register.
-  void EmitBitOr();
-
-  MacroAssembler* masm_;
-  CompilationInfo* info_;
-  Register destination_;
-  uint32_t smi_bits_;
-
-  DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator);
-};
-
-
-} }  // namespace v8::internal
-
-#endif  // V8_TARGET_ARCH_IA32
-
-#endif  // V8_FAST_CODEGEN_H_
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 02e8f16..a143bcd 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -148,11 +148,8 @@
 DEFINE_int(min_preparse_length, 1024,
            "minimum length for automatic enable preparsing")
 DEFINE_bool(full_compiler, true, "enable dedicated backend for run-once code")
-DEFINE_bool(fast_compiler, false, "enable speculative optimizing backend")
 DEFINE_bool(always_full_compiler, false,
             "try to use the dedicated run-once backend for all code")
-DEFINE_bool(always_fast_compiler, false,
-            "try to use the speculative optimizing backend for all code")
 DEFINE_bool(trace_bailout, false,
             "print reasons for falling back to using the classic V8 backend")
 DEFINE_bool(safe_int32_compiler, true,
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index a468f14..e97ed76 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -677,7 +677,7 @@
   MacroAssembler masm(NULL, kInitialBufferSize);
 
   FullCodeGenerator cgen(&masm);
-  cgen.Generate(info, PRIMARY);
+  cgen.Generate(info);
   if (cgen.HasStackOverflow()) {
     ASSERT(!Top::has_pending_exception());
     return Handle<Code>::null();
@@ -919,6 +919,9 @@
     EmitGetFromCache(expr->arguments());
   } else if (strcmp("_IsRegExpEquivalent", *name->ToCString()) == 0) {
     EmitIsRegExpEquivalent(expr->arguments());
+  } else if (strcmp("_IsStringWrapperSafeForDefaultValueOf",
+                    *name->ToCString()) == 0) {
+    EmitIsStringWrapperSafeForDefaultValueOf(expr->arguments());
   } else {
     UNREACHABLE();
   }
diff --git a/src/full-codegen.h b/src/full-codegen.h
index 6e2fecb..00f4c06 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -89,11 +89,6 @@
 
 class FullCodeGenerator: public AstVisitor {
  public:
-  enum Mode {
-    PRIMARY,
-    SECONDARY
-  };
-
   explicit FullCodeGenerator(MacroAssembler* masm)
       : masm_(masm),
         info_(NULL),
@@ -106,7 +101,7 @@
 
   static Handle<Code> MakeCode(CompilationInfo* info);
 
-  void Generate(CompilationInfo* info, Mode mode);
+  void Generate(CompilationInfo* info);
 
  private:
   class Breakable;
@@ -408,6 +403,8 @@
   void EmitIsArray(ZoneList<Expression*>* arguments);
   void EmitIsRegExp(ZoneList<Expression*>* arguments);
   void EmitIsConstructCall(ZoneList<Expression*>* arguments);
+  void EmitIsStringWrapperSafeForDefaultValueOf(
+      ZoneList<Expression*>* arguments);
   void EmitObjectEquals(ZoneList<Expression*>* arguments);
   void EmitArguments(ZoneList<Expression*>* arguments);
   void EmitArgumentsLength(ZoneList<Expression*>* arguments);
diff --git a/src/globals.h b/src/globals.h
index 030af7c..3fe9e24 100644
--- a/src/globals.h
+++ b/src/globals.h
@@ -507,6 +507,31 @@
 };
 
 
+// Union used for customized checking of the IEEE double types
+// inlined within v8 runtime, rather than going to the underlying
+// platform headers and libraries
+union IeeeDoubleLittleEndianArchType {
+  double d;
+  struct {
+    unsigned int man_low  :32;
+    unsigned int man_high :20;
+    unsigned int exp      :11;
+    unsigned int sign     :1;
+  } bits;
+};
+
+
+union IeeeDoubleBigEndianArchType {
+  double d;
+  struct {
+    unsigned int sign     :1;
+    unsigned int exp      :11;
+    unsigned int man_high :20;
+    unsigned int man_low  :32;
+  } bits;
+};
+
+
 // AccessorCallback
 struct AccessorDescriptor {
   Object* (*getter)(Object* object, void* data);
diff --git a/src/handles.cc b/src/handles.cc
index 927cfd9..7b76e92 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -637,8 +637,8 @@
 
     // Check access rights if required.
     if (current->IsAccessCheckNeeded() &&
-      !Top::MayNamedAccess(*current, Heap::undefined_value(),
-                           v8::ACCESS_KEYS)) {
+        !Top::MayNamedAccess(*current, Heap::undefined_value(),
+                             v8::ACCESS_KEYS)) {
       Top::ReportFailedAccessCheck(*current, v8::ACCESS_KEYS);
       break;
     }
@@ -819,11 +819,6 @@
 }
 
 
-Handle<Code> ComputeLazyCompile(int argc) {
-  CALL_HEAP_FUNCTION(StubCache::ComputeLazyCompile(argc), Code);
-}
-
-
 OptimizedObjectForAddingMultipleProperties::
 ~OptimizedObjectForAddingMultipleProperties() {
   // Reoptimize the object to allow fast property access.
diff --git a/src/handles.h b/src/handles.h
index 1e14daf..135dbfb 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -353,9 +353,6 @@
                        Handle<Object> receiver,
                        ClearExceptionFlag flag);
 
-// Returns the lazy compilation stub for argc arguments.
-Handle<Code> ComputeLazyCompile(int argc);
-
 class NoHandleAllocation BASE_EMBEDDED {
  public:
 #ifndef DEBUG
diff --git a/src/heap.cc b/src/heap.cc
index dfc18cc..1d696c7 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -37,6 +37,7 @@
 #include "global-handles.h"
 #include "mark-compact.h"
 #include "natives.h"
+#include "objects-visiting.h"
 #include "scanner.h"
 #include "scopeinfo.h"
 #include "snapshot.h"
@@ -1032,6 +1033,17 @@
 }
 
 
+class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
+ public:
+  static inline void VisitPointer(Object** p) {
+    Object* object = *p;
+    if (!Heap::InNewSpace(object)) return;
+    Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
+                         reinterpret_cast<HeapObject*>(object));
+  }
+};
+
+
 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
                          Address new_space_front) {
   do {
@@ -1042,10 +1054,7 @@
     // queue is empty.
     while (new_space_front < new_space_.top()) {
       HeapObject* object = HeapObject::FromAddress(new_space_front);
-      Map* map = object->map();
-      int size = object->SizeFromMap(map);
-      object->IterateBody(map->instance_type(), size, scavenge_visitor);
-      new_space_front += size;
+      new_space_front += NewSpaceScavenger::IterateBody(object->map(), object);
     }
 
     // Promote and process all the to-be-promoted objects.
@@ -1072,315 +1081,231 @@
 }
 
 
+class ScavengingVisitor : public StaticVisitorBase {
+ public:
+  static void Initialize() {
+    table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
+    table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
+    table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
+    table_.Register(kVisitByteArray, &EvacuateByteArray);
+    table_.Register(kVisitFixedArray, &EvacuateFixedArray);
+
+    typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
+
+    table_.Register(kVisitConsString,
+                    &ObjectEvacuationStrategy<POINTER_OBJECT>::
+                        VisitSpecialized<ConsString::kSize>);
+
+    table_.Register(kVisitSharedFunctionInfo,
+                    &ObjectEvacuationStrategy<POINTER_OBJECT>::
+                        VisitSpecialized<SharedFunctionInfo::kSize>);
+
+    table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
+                                   kVisitDataObject,
+                                   kVisitDataObjectGeneric>();
+
+    table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
+                                   kVisitJSObject,
+                                   kVisitJSObjectGeneric>();
+
+    table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
+                                   kVisitStruct,
+                                   kVisitStructGeneric>();
+  }
+
+
+  static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
+    table_.GetVisitor(map)(map, slot, obj);
+  }
+
+
+ private:
+  enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
+  enum SizeRestriction { SMALL, UNKNOWN_SIZE };
+
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-static void RecordCopiedObject(HeapObject* obj) {
-  bool should_record = false;
+  static void RecordCopiedObject(HeapObject* obj) {
+    bool should_record = false;
 #ifdef DEBUG
-  should_record = FLAG_heap_stats;
+    should_record = FLAG_heap_stats;
 #endif
 #ifdef ENABLE_LOGGING_AND_PROFILING
-  should_record = should_record || FLAG_log_gc;
+    should_record = should_record || FLAG_log_gc;
 #endif
-  if (should_record) {
-    if (Heap::new_space()->Contains(obj)) {
-      Heap::new_space()->RecordAllocation(obj);
-    } else {
-      Heap::new_space()->RecordPromotion(obj);
+    if (should_record) {
+      if (Heap::new_space()->Contains(obj)) {
+        Heap::new_space()->RecordAllocation(obj);
+      } else {
+        Heap::new_space()->RecordPromotion(obj);
+      }
     }
   }
-}
 #endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
 
+  // Helper function used by CopyObject to copy a source object to an
+  // allocated target object and update the forwarding pointer in the source
+  // object.  Returns the target object.
+  INLINE(static HeapObject* MigrateObject(HeapObject* source,
+                                          HeapObject* target,
+                                          int size)) {
+    // Copy the content of source to target.
+    Heap::CopyBlock(target->address(), source->address(), size);
 
-// Helper function used by CopyObject to copy a source object to an
-// allocated target object and update the forwarding pointer in the source
-// object.  Returns the target object.
-inline static HeapObject* MigrateObject(HeapObject* source,
-                                        HeapObject* target,
-                                        int size) {
-  // Copy the content of source to target.
-  Heap::CopyBlock(target->address(), source->address(), size);
-
-  // Set the forwarding address.
-  source->set_map_word(MapWord::FromForwardingAddress(target));
+    // Set the forwarding address.
+    source->set_map_word(MapWord::FromForwardingAddress(target));
 
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-  // Update NewSpace stats if necessary.
-  RecordCopiedObject(target);
+    // Update NewSpace stats if necessary.
+    RecordCopiedObject(target);
 #endif
-  HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
+    HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
 
-  return target;
-}
-
-
-enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
-enum SizeRestriction { SMALL, UNKNOWN_SIZE };
-
-
-template<ObjectContents object_contents, SizeRestriction size_restriction>
-static inline void EvacuateObject(Map* map,
-                                  HeapObject** slot,
-                                  HeapObject* object,
-                                  int object_size) {
-  ASSERT((size_restriction != SMALL) ||
-         (object_size <= Page::kMaxHeapObjectSize));
-  ASSERT(object->Size() == object_size);
-
-  if (Heap::ShouldBePromoted(object->address(), object_size)) {
-    Object* result;
-
-    if ((size_restriction != SMALL) &&
-        (object_size > Page::kMaxHeapObjectSize)) {
-      result = Heap::lo_space()->AllocateRawFixedArray(object_size);
-    } else {
-      if (object_contents == DATA_OBJECT) {
-        result = Heap::old_data_space()->AllocateRaw(object_size);
-      } else {
-        result = Heap::old_pointer_space()->AllocateRaw(object_size);
-      }
-    }
-
-    if (!result->IsFailure()) {
-      HeapObject* target = HeapObject::cast(result);
-      *slot = MigrateObject(object, target, object_size);
-
-      if (object_contents == POINTER_OBJECT) {
-        promotion_queue.insert(target, object_size);
-      }
-
-      Heap::tracer()->increment_promoted_objects_size(object_size);
-      return;
-    }
+    return target;
   }
-  Object* result = Heap::new_space()->AllocateRaw(object_size);
-  ASSERT(!result->IsFailure());
-  *slot = MigrateObject(object, HeapObject::cast(result), object_size);
-  return;
-}
 
 
-template<int object_size_in_words, ObjectContents object_contents>
-static inline void EvacuateObjectOfFixedSize(Map* map,
-                                             HeapObject** slot,
-                                             HeapObject* object) {
-  const int object_size = object_size_in_words << kPointerSizeLog2;
-  EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
-}
+  template<ObjectContents object_contents, SizeRestriction size_restriction>
+  static inline void EvacuateObject(Map* map,
+                                    HeapObject** slot,
+                                    HeapObject* object,
+                                    int object_size) {
+    ASSERT((size_restriction != SMALL) ||
+           (object_size <= Page::kMaxHeapObjectSize));
+    ASSERT(object->Size() == object_size);
 
+    if (Heap::ShouldBePromoted(object->address(), object_size)) {
+      Object* result;
 
-template<ObjectContents object_contents>
-static inline void EvacuateObjectOfFixedSize(Map* map,
-                                             HeapObject** slot,
-                                             HeapObject* object) {
-  int object_size = map->instance_size();
-  EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
-}
-
-
-static inline void EvacuateFixedArray(Map* map,
-                                      HeapObject** slot,
-                                      HeapObject* object) {
-  int object_size = FixedArray::cast(object)->FixedArraySize();
-  EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static inline void EvacuateByteArray(Map* map,
-                                     HeapObject** slot,
-                                     HeapObject* object) {
-  int object_size = ByteArray::cast(object)->ByteArraySize();
-  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static Scavenger GetScavengerForSize(int object_size,
-                                     ObjectContents object_contents) {
-  ASSERT(IsAligned(object_size, kPointerSize));
-  ASSERT(object_size < Page::kMaxHeapObjectSize);
-
-  switch (object_size >> kPointerSizeLog2) {
-#define CASE(n)                                           \
-    case n:                                               \
-      if (object_contents == DATA_OBJECT) {               \
-        return static_cast<Scavenger>(                    \
-          &EvacuateObjectOfFixedSize<n, DATA_OBJECT>);    \
-      } else {                                            \
-        return static_cast<Scavenger>(                    \
-          &EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
-      }
-
-    CASE(1);
-    CASE(2);
-    CASE(3);
-    CASE(4);
-    CASE(5);
-    CASE(6);
-    CASE(7);
-    CASE(8);
-    CASE(9);
-    CASE(10);
-    CASE(11);
-    CASE(12);
-    CASE(13);
-    CASE(14);
-    CASE(15);
-    CASE(16);
-    default:
-      if (object_contents == DATA_OBJECT) {
-        return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
+      if ((size_restriction != SMALL) &&
+          (object_size > Page::kMaxHeapObjectSize)) {
+        result = Heap::lo_space()->AllocateRawFixedArray(object_size);
       } else {
-        return static_cast<Scavenger>(
-            &EvacuateObjectOfFixedSize<POINTER_OBJECT>);
+        if (object_contents == DATA_OBJECT) {
+          result = Heap::old_data_space()->AllocateRaw(object_size);
+        } else {
+          result = Heap::old_pointer_space()->AllocateRaw(object_size);
+        }
       }
 
-#undef CASE
-  }
-}
+      if (!result->IsFailure()) {
+        HeapObject* target = HeapObject::cast(result);
+        *slot = MigrateObject(object, target, object_size);
 
+        if (object_contents == POINTER_OBJECT) {
+          promotion_queue.insert(target, object_size);
+        }
 
-static inline void EvacuateSeqAsciiString(Map* map,
-                                          HeapObject** slot,
-                                          HeapObject* object) {
-  int object_size = SeqAsciiString::cast(object)->
-      SeqAsciiStringSize(map->instance_type());
-  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static inline void EvacuateSeqTwoByteString(Map* map,
-                                            HeapObject** slot,
-                                            HeapObject* object) {
-  int object_size = SeqTwoByteString::cast(object)->
-      SeqTwoByteStringSize(map->instance_type());
-  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static inline bool IsShortcutCandidate(int type) {
-  return ((type & kShortcutTypeMask) == kShortcutTypeTag);
-}
-
-
-static inline void EvacuateShortcutCandidate(Map* map,
-                                             HeapObject** slot,
-                                             HeapObject* object) {
-  ASSERT(IsShortcutCandidate(map->instance_type()));
-
-  if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
-    HeapObject* first =
-        HeapObject::cast(ConsString::cast(object)->unchecked_first());
-
-    *slot = first;
-
-    if (!Heap::InNewSpace(first)) {
-      object->set_map_word(MapWord::FromForwardingAddress(first));
-      return;
+        Heap::tracer()->increment_promoted_objects_size(object_size);
+        return;
+      }
     }
-
-    MapWord first_word = first->map_word();
-    if (first_word.IsForwardingAddress()) {
-      HeapObject* target = first_word.ToForwardingAddress();
-
-      *slot = target;
-      object->set_map_word(MapWord::FromForwardingAddress(target));
-      return;
-    }
-
-    first->map()->Scavenge(slot, first);
-    object->set_map_word(MapWord::FromForwardingAddress(*slot));
+    Object* result = Heap::new_space()->AllocateRaw(object_size);
+    ASSERT(!result->IsFailure());
+    *slot = MigrateObject(object, HeapObject::cast(result), object_size);
     return;
   }
 
-  int object_size = ConsString::kSize;
-  EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
-}
+
+  static inline void EvacuateFixedArray(Map* map,
+                                        HeapObject** slot,
+                                        HeapObject* object) {
+    int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
+    EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
+                                                 slot,
+                                                 object,
+                                                 object_size);
+  }
 
 
-Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
-  if (instance_type < FIRST_NONSTRING_TYPE) {
-    switch (instance_type & kStringRepresentationMask) {
-      case kSeqStringTag:
-        if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
-          return &EvacuateSeqAsciiString;
-        } else {
-          return &EvacuateSeqTwoByteString;
-        }
+  static inline void EvacuateByteArray(Map* map,
+                                       HeapObject** slot,
+                                       HeapObject* object) {
+    int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+  }
 
-      case kConsStringTag:
-        if (IsShortcutCandidate(instance_type)) {
-          return &EvacuateShortcutCandidate;
-        } else {
-          ASSERT(instance_size == ConsString::kSize);
-          return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
-        }
 
-      case kExternalStringTag:
-        ASSERT(instance_size == ExternalString::kSize);
-        return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
+  static inline void EvacuateSeqAsciiString(Map* map,
+                                            HeapObject** slot,
+                                            HeapObject* object) {
+    int object_size = SeqAsciiString::cast(object)->
+        SeqAsciiStringSize(map->instance_type());
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+  }
+
+
+  static inline void EvacuateSeqTwoByteString(Map* map,
+                                              HeapObject** slot,
+                                              HeapObject* object) {
+    int object_size = SeqTwoByteString::cast(object)->
+        SeqTwoByteStringSize(map->instance_type());
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+  }
+
+
+  static inline bool IsShortcutCandidate(int type) {
+    return ((type & kShortcutTypeMask) == kShortcutTypeTag);
+  }
+
+  static inline void EvacuateShortcutCandidate(Map* map,
+                                               HeapObject** slot,
+                                               HeapObject* object) {
+    ASSERT(IsShortcutCandidate(map->instance_type()));
+
+    if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
+      HeapObject* first =
+          HeapObject::cast(ConsString::cast(object)->unchecked_first());
+
+      *slot = first;
+
+      if (!Heap::InNewSpace(first)) {
+        object->set_map_word(MapWord::FromForwardingAddress(first));
+        return;
+      }
+
+      MapWord first_word = first->map_word();
+      if (first_word.IsForwardingAddress()) {
+        HeapObject* target = first_word.ToForwardingAddress();
+
+        *slot = target;
+        object->set_map_word(MapWord::FromForwardingAddress(target));
+        return;
+      }
+
+      Scavenge(first->map(), slot, first);
+      object->set_map_word(MapWord::FromForwardingAddress(*slot));
+      return;
     }
-    UNREACHABLE();
+
+    int object_size = ConsString::kSize;
+    EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
   }
 
-  switch (instance_type) {
-    case BYTE_ARRAY_TYPE:
-      return reinterpret_cast<Scavenger>(&EvacuateByteArray);
+  template<ObjectContents object_contents>
+  class ObjectEvacuationStrategy {
+   public:
+    template<int object_size>
+    static inline void VisitSpecialized(Map* map,
+                                        HeapObject** slot,
+                                        HeapObject* object) {
+      EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+    }
 
-    case FIXED_ARRAY_TYPE:
-      return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
+    static inline void Visit(Map* map,
+                             HeapObject** slot,
+                             HeapObject* object) {
+      int object_size = map->instance_size();
+      EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+    }
+  };
 
-    case JS_OBJECT_TYPE:
-    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
-    case JS_VALUE_TYPE:
-    case JS_ARRAY_TYPE:
-    case JS_REGEXP_TYPE:
-    case JS_FUNCTION_TYPE:
-    case JS_GLOBAL_PROXY_TYPE:
-    case JS_GLOBAL_OBJECT_TYPE:
-    case JS_BUILTINS_OBJECT_TYPE:
-      return GetScavengerForSize(instance_size, POINTER_OBJECT);
+  typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
 
-    case ODDBALL_TYPE:
-      return NULL;
+  static VisitorDispatchTable<Callback> table_;
+};
 
-    case PROXY_TYPE:
-      return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
 
-    case MAP_TYPE:
-      return NULL;
-
-    case CODE_TYPE:
-      return NULL;
-
-    case JS_GLOBAL_PROPERTY_CELL_TYPE:
-      return NULL;
-
-    case HEAP_NUMBER_TYPE:
-    case FILLER_TYPE:
-    case PIXEL_ARRAY_TYPE:
-    case EXTERNAL_BYTE_ARRAY_TYPE:
-    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
-    case EXTERNAL_SHORT_ARRAY_TYPE:
-    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
-    case EXTERNAL_INT_ARRAY_TYPE:
-    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
-    case EXTERNAL_FLOAT_ARRAY_TYPE:
-      return GetScavengerForSize(instance_size, DATA_OBJECT);
-
-    case SHARED_FUNCTION_INFO_TYPE:
-      return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
-                                 POINTER_OBJECT);
-
-#define MAKE_STRUCT_CASE(NAME, Name, name) \
-        case NAME##_TYPE:
-      STRUCT_LIST(MAKE_STRUCT_CASE)
-#undef MAKE_STRUCT_CASE
-          return GetScavengerForSize(instance_size, POINTER_OBJECT);
-    default:
-      UNREACHABLE();
-      return NULL;
-  }
-}
+VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
 
 
 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
@@ -1388,7 +1313,7 @@
   MapWord first_word = object->map_word();
   ASSERT(!first_word.IsForwardingAddress());
   Map* map = first_word.ToMap();
-  map->Scavenge(p, object);
+  ScavengingVisitor::Scavenge(map, p, object);
 }
 
 
@@ -1407,7 +1332,8 @@
   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
   reinterpret_cast<Map*>(result)->
-      set_scavenger(GetScavenger(instance_type, instance_size));
+      set_visitor_id(
+          StaticVisitorBase::GetVisitorId(instance_type, instance_size));
   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
   reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1424,7 +1350,8 @@
   Map* map = reinterpret_cast<Map*>(result);
   map->set_map(meta_map());
   map->set_instance_type(instance_type);
-  map->set_scavenger(GetScavenger(instance_type, instance_size));
+  map->set_visitor_id(
+      StaticVisitorBase::GetVisitorId(instance_type, instance_size));
   map->set_prototype(null_value());
   map->set_constructor(null_value());
   map->set_instance_size(instance_size);
@@ -2504,8 +2431,7 @@
   if (CodeIsActive(shared_info->code())) return;
 
   // Compute the lazy compilable version of the code.
-  HandleScope scope;
-  Code* code = *ComputeLazyCompile(shared_info->length());
+  Code* code = Builtins::builtin(Builtins::LazyCompile);
   shared_info->set_code(code);
   function->set_code(code);
 }
@@ -4150,6 +4076,7 @@
   *stats->memory_allocator_size = MemoryAllocator::Size();
   *stats->memory_allocator_capacity =
       MemoryAllocator::Size() + MemoryAllocator::Available();
+  *stats->os_error = OS::GetLastError();
   if (take_snapshot) {
     HeapIterator iterator;
     for (HeapObject* obj = iterator.next();
@@ -4198,6 +4125,10 @@
     if (!ConfigureHeapDefault()) return false;
   }
 
+  ScavengingVisitor::Initialize();
+  NewSpaceScavenger::Initialize();
+  MarkCompactCollector::Initialize();
+
   // Setup memory allocator and reserve a chunk of memory for new
   // space.  The chunk is double the size of the requested reserved
   // new space size to ensure that we can find a pair of semispaces that
@@ -4882,6 +4813,7 @@
     PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
     PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
     PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
+    PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
     PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
     PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE]));
 
diff --git a/src/heap.h b/src/heap.h
index a0b2157..93b90b1 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -983,8 +983,6 @@
 
   static void RecordStats(HeapStats* stats, bool take_snapshot = false);
 
-  static Scavenger GetScavenger(int instance_type, int instance_size);
-
   // Copy block of memory from src to dst. Size of block should be aligned
   // by pointer size.
   static inline void CopyBlock(Address dst, Address src, int byte_size);
@@ -1347,7 +1345,8 @@
   int* memory_allocator_capacity;       // 20
   int* objects_per_type;                // 21
   int* size_per_type;                   // 22
-  int* end_marker;                      // 23
+  int* os_error;                        // 23
+  int* end_marker;                      // 24
 };
 
 
@@ -1725,6 +1724,7 @@
       EXTERNAL,
       MC_MARK,
       MC_SWEEP,
+      MC_SWEEP_NEWSPACE,
       MC_COMPACT,
       MC_FLUSH_CODE,
       kNumberOfScopes
diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h
index 7fa151e..ecbdfdc 100644
--- a/src/ia32/assembler-ia32-inl.h
+++ b/src/ia32/assembler-ia32-inl.h
@@ -183,6 +183,30 @@
 }
 
 
+template<typename StaticVisitor>
+void RelocInfo::Visit() {
+  RelocInfo::Mode mode = rmode();
+  if (mode == RelocInfo::EMBEDDED_OBJECT) {
+    StaticVisitor::VisitPointer(target_object_address());
+  } else if (RelocInfo::IsCodeTarget(mode)) {
+    StaticVisitor::VisitCodeTarget(this);
+  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
+    StaticVisitor::VisitExternalReference(target_reference_address());
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  } else if (Debug::has_break_points() &&
+             ((RelocInfo::IsJSReturn(mode) &&
+              IsPatchedReturnSequence()) ||
+             (RelocInfo::IsDebugBreakSlot(mode) &&
+              IsPatchedDebugBreakSlotSequence()))) {
+    StaticVisitor::VisitDebugTarget(this);
+#endif
+  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
+    StaticVisitor::VisitRuntimeEntry(this);
+  }
+}
+
+
+
 Immediate::Immediate(int x)  {
   x_ = x;
   rmode_ = RelocInfo::NONE;
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index 31f5041..35a90a4 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -429,6 +429,26 @@
 }
 
 
+void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
+  // Enter an internal frame.
+  __ EnterInternalFrame();
+
+  // Push a copy of the function onto the stack.
+  __ push(edi);
+
+  __ push(edi);  // Function is also the parameter to the runtime call.
+  __ CallRuntime(Runtime::kLazyCompile, 1);
+  __ pop(edi);
+
+  // Tear down temporary frame.
+  __ LeaveInternalFrame();
+
+  // Do a tail-call of the compiled function.
+  __ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
+  __ jmp(Operand(ecx));
+}
+
+
 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
   // 1. Make sure we have at least one argument.
   { Label done;
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 3c22def..cc89cc7 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -202,105 +202,92 @@
     // esi: callee's context
     allocator_->Initialize();
 
-    if (info->mode() == CompilationInfo::PRIMARY) {
-      frame_->Enter();
+    frame_->Enter();
 
-      // Allocate space for locals and initialize them.
-      frame_->AllocateStackSlots();
+    // Allocate space for locals and initialize them.
+    frame_->AllocateStackSlots();
 
-      // Allocate the local context if needed.
-      int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
-      if (heap_slots > 0) {
-        Comment cmnt(masm_, "[ allocate local context");
-        // Allocate local context.
-        // Get outer context and create a new context based on it.
-        frame_->PushFunction();
-        Result context;
-        if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-          FastNewContextStub stub(heap_slots);
-          context = frame_->CallStub(&stub, 1);
-        } else {
-          context = frame_->CallRuntime(Runtime::kNewContext, 1);
-        }
-
-        // Update context local.
-        frame_->SaveContextRegister();
-
-        // Verify that the runtime call result and esi agree.
-        if (FLAG_debug_code) {
-          __ cmp(context.reg(), Operand(esi));
-          __ Assert(equal, "Runtime::NewContext should end up in esi");
-        }
+    // Allocate the local context if needed.
+    int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+    if (heap_slots > 0) {
+      Comment cmnt(masm_, "[ allocate local context");
+      // Allocate local context.
+      // Get outer context and create a new context based on it.
+      frame_->PushFunction();
+      Result context;
+      if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+        FastNewContextStub stub(heap_slots);
+        context = frame_->CallStub(&stub, 1);
+      } else {
+        context = frame_->CallRuntime(Runtime::kNewContext, 1);
       }
 
-      // TODO(1241774): Improve this code:
-      // 1) only needed if we have a context
-      // 2) no need to recompute context ptr every single time
-      // 3) don't copy parameter operand code from SlotOperand!
-      {
-        Comment cmnt2(masm_, "[ copy context parameters into .context");
-        // Note that iteration order is relevant here! If we have the same
-        // parameter twice (e.g., function (x, y, x)), and that parameter
-        // needs to be copied into the context, it must be the last argument
-        // passed to the parameter that needs to be copied. This is a rare
-        // case so we don't check for it, instead we rely on the copying
-        // order: such a parameter is copied repeatedly into the same
-        // context location and thus the last value is what is seen inside
-        // the function.
-        for (int i = 0; i < scope()->num_parameters(); i++) {
-          Variable* par = scope()->parameter(i);
-          Slot* slot = par->slot();
-          if (slot != NULL && slot->type() == Slot::CONTEXT) {
-            // The use of SlotOperand below is safe in unspilled code
-            // because the slot is guaranteed to be a context slot.
-            //
-            // There are no parameters in the global scope.
-            ASSERT(!scope()->is_global_scope());
-            frame_->PushParameterAt(i);
-            Result value = frame_->Pop();
-            value.ToRegister();
+      // Update context local.
+      frame_->SaveContextRegister();
 
-            // SlotOperand loads context.reg() with the context object
-            // stored to, used below in RecordWrite.
-            Result context = allocator_->Allocate();
-            ASSERT(context.is_valid());
-            __ mov(SlotOperand(slot, context.reg()), value.reg());
-            int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
-            Result scratch = allocator_->Allocate();
-            ASSERT(scratch.is_valid());
-            frame_->Spill(context.reg());
-            frame_->Spill(value.reg());
-            __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
-          }
-        }
-      }
-
-      // Store the arguments object.  This must happen after context
-      // initialization because the arguments object may be stored in
-      // the context.
-      if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
-        StoreArgumentsObject(true);
-      }
-
-      // Initialize ThisFunction reference if present.
-      if (scope()->is_function_scope() && scope()->function() != NULL) {
-        frame_->Push(Factory::the_hole_value());
-        StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
-      }
-    } else {
-      // When used as the secondary compiler for splitting, ebp, esi,
-      // and edi have been pushed on the stack.  Adjust the virtual
-      // frame to match this state.
-      frame_->Adjust(3);
-      allocator_->Unuse(edi);
-
-      // Bind all the bailout labels to the beginning of the function.
-      List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
-      for (int i = 0; i < bailouts->length(); i++) {
-        __ bind(bailouts->at(i)->label());
+      // Verify that the runtime call result and esi agree.
+      if (FLAG_debug_code) {
+        __ cmp(context.reg(), Operand(esi));
+        __ Assert(equal, "Runtime::NewContext should end up in esi");
       }
     }
 
+    // TODO(1241774): Improve this code:
+    // 1) only needed if we have a context
+    // 2) no need to recompute context ptr every single time
+    // 3) don't copy parameter operand code from SlotOperand!
+    {
+      Comment cmnt2(masm_, "[ copy context parameters into .context");
+      // Note that iteration order is relevant here! If we have the same
+      // parameter twice (e.g., function (x, y, x)), and that parameter
+      // needs to be copied into the context, it must be the last argument
+      // passed to the parameter that needs to be copied. This is a rare
+      // case so we don't check for it, instead we rely on the copying
+      // order: such a parameter is copied repeatedly into the same
+      // context location and thus the last value is what is seen inside
+      // the function.
+      for (int i = 0; i < scope()->num_parameters(); i++) {
+        Variable* par = scope()->parameter(i);
+        Slot* slot = par->slot();
+        if (slot != NULL && slot->type() == Slot::CONTEXT) {
+          // The use of SlotOperand below is safe in unspilled code
+          // because the slot is guaranteed to be a context slot.
+          //
+          // There are no parameters in the global scope.
+          ASSERT(!scope()->is_global_scope());
+          frame_->PushParameterAt(i);
+          Result value = frame_->Pop();
+          value.ToRegister();
+
+          // SlotOperand loads context.reg() with the context object
+          // stored to, used below in RecordWrite.
+          Result context = allocator_->Allocate();
+          ASSERT(context.is_valid());
+          __ mov(SlotOperand(slot, context.reg()), value.reg());
+          int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
+          Result scratch = allocator_->Allocate();
+          ASSERT(scratch.is_valid());
+          frame_->Spill(context.reg());
+          frame_->Spill(value.reg());
+          __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
+        }
+      }
+    }
+
+    // Store the arguments object.  This must happen after context
+    // initialization because the arguments object may be stored in
+    // the context.
+    if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
+      StoreArgumentsObject(true);
+    }
+
+    // Initialize ThisFunction reference if present.
+    if (scope()->is_function_scope() && scope()->function() != NULL) {
+      frame_->Push(Factory::the_hole_value());
+      StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
+    }
+
+
     // Initialize the function return target after the locals are set
     // up, because it needs the expected frame height from the frame.
     function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
@@ -1263,7 +1250,7 @@
     if (left_info_.IsSmi()) {
       // Right is a heap object.
       __ JumpIfNotNumber(right_, right_info_, entry_label());
-      __ ConvertToInt32(right_, right_, dst_, left_info_, entry_label());
+      __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
       __ mov(dst_, Operand(left_));
       __ SmiUntag(dst_);
     } else if (right_info_.IsSmi()) {
@@ -1283,11 +1270,11 @@
       // Both were heap objects.
       __ rcl(right_, 1);  // Put tag back.
       __ JumpIfNotNumber(right_, right_info_, entry_label());
-      __ ConvertToInt32(right_, right_, no_reg, left_info_, entry_label());
+      __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
       __ jmp(&got_both);
       __ bind(&only_right_is_heap_object);
       __ JumpIfNotNumber(right_, right_info_, entry_label());
-      __ ConvertToInt32(right_, right_, no_reg, left_info_, entry_label());
+      __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
       __ bind(&got_both);
     }
   }
@@ -1953,6 +1940,7 @@
     // Use a fresh answer register to avoid spilling the left operand.
     answer = allocator_->Allocate();
     ASSERT(answer.is_valid());
+
     DeferredInlineBinaryOperation* deferred =
         new DeferredInlineBinaryOperation(op,
                                           answer.reg(),
@@ -4610,7 +4598,7 @@
   __ mov(ebx, Operand(eax));
 
   // If the property has been removed while iterating, we just skip it.
-  __ cmp(ebx, Factory::null_value());
+  __ test(ebx, Operand(ebx));
   node->continue_target()->Branch(equal);
 
   end_del_check.Bind();
@@ -4618,10 +4606,11 @@
   // loop.  edx: i'th entry of the enum cache (or string there of)
   frame_->EmitPush(ebx);
   { Reference each(this, node->each());
-    // Loading a reference may leave the frame in an unspilled state.
-    frame_->SpillAll();
     if (!each.is_illegal()) {
       if (each.size() > 0) {
+        // Loading a reference may leave the frame in an unspilled state.
+        frame_->SpillAll();
+        // Get the value (under the reference on the stack) from memory.
         frame_->EmitPush(frame_->ElementAt(each.size()));
         each.SetValue(NOT_CONST_INIT);
         frame_->Drop(2);
@@ -6760,7 +6749,7 @@
 }
 
 
-  void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
+void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
   // This generates a fast version of:
   // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
   // typeof(arg) == function).
@@ -6781,6 +6770,143 @@
 }
 
 
+// Deferred code to check whether the String JavaScript object is safe for using
+// default value of. This code is called after the bit caching this information
+// in the map has been checked with the map for the object in the map_result_
+// register. On return the register map_result_ contains 1 for true and 0 for
+// false.
+class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
+ public:
+  DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
+                                               Register map_result,
+                                               Register scratch1,
+                                               Register scratch2)
+      : object_(object),
+        map_result_(map_result),
+        scratch1_(scratch1),
+        scratch2_(scratch2) { }
+
+  virtual void Generate() {
+    Label false_result;
+
+    // Check that map is loaded as expected.
+    if (FLAG_debug_code) {
+      __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
+      __ Assert(equal, "Map not in expected register");
+    }
+
+    // Check for fast case object. Generate false result for slow case object.
+    __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
+    __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
+    __ cmp(scratch1_, Factory::hash_table_map());
+    __ j(equal, &false_result);
+
+    // Look for valueOf symbol in the descriptor array, and indicate false if
+    // found. The type is not checked, so if it is a transition it is a false
+    // negative.
+    __ mov(map_result_,
+           FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
+    __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
+    // map_result_: descriptor array
+    // scratch1_: length of descriptor array
+    // Calculate the end of the descriptor array.
+    STATIC_ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTagSize == 1);
+    STATIC_ASSERT(kPointerSize == 4);
+    __ lea(scratch1_,
+           Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
+    // Calculate location of the first key name.
+    __ add(Operand(map_result_),
+           Immediate(FixedArray::kHeaderSize +
+                     DescriptorArray::kFirstIndex * kPointerSize));
+    // Loop through all the keys in the descriptor array. If one of these is the
+    // symbol valueOf the result is false.
+    Label entry, loop;
+    __ jmp(&entry);
+    __ bind(&loop);
+    __ mov(scratch2_, FieldOperand(map_result_, 0));
+    __ cmp(scratch2_, Factory::value_of_symbol());
+    __ j(equal, &false_result);
+    __ add(Operand(map_result_), Immediate(kPointerSize));
+    __ bind(&entry);
+    __ cmp(map_result_, Operand(scratch1_));
+    __ j(not_equal, &loop);
+
+    // Reload map as register map_result_ was used as temporary above.
+    __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
+
+    // If a valueOf property is not found on the object check that it's
+    // prototype is the un-modified String prototype. If not result is false.
+    __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
+    __ test(scratch1_, Immediate(kSmiTagMask));
+    __ j(zero, &false_result);
+    __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
+    __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+    __ mov(scratch2_,
+           FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
+    __ cmp(scratch1_,
+           CodeGenerator::ContextOperand(
+               scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
+    __ j(not_equal, &false_result);
+    // Set the bit in the map to indicate that it has been checked safe for
+    // default valueOf and set true result.
+    __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
+           Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
+    __ Set(map_result_, Immediate(1));
+    __ jmp(exit_label());
+    __ bind(&false_result);
+    // Set false result.
+    __ Set(map_result_, Immediate(0));
+  }
+
+ private:
+  Register object_;
+  Register map_result_;
+  Register scratch1_;
+  Register scratch2_;
+};
+
+
+void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
+    ZoneList<Expression*>* args) {
+  ASSERT(args->length() == 1);
+  Load(args->at(0));
+  Result obj = frame_->Pop();  // Pop the string wrapper.
+  obj.ToRegister();
+  ASSERT(obj.is_valid());
+  if (FLAG_debug_code) {
+    __ AbortIfSmi(obj.reg());
+  }
+
+  // Check whether this map has already been checked to be safe for default
+  // valueOf.
+  Result map_result = allocator()->Allocate();
+  ASSERT(map_result.is_valid());
+  __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
+  __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
+            1 << Map::kStringWrapperSafeForDefaultValueOf);
+  destination()->true_target()->Branch(not_zero);
+
+  // We need an additional two scratch registers for the deferred code.
+  Result temp1 = allocator()->Allocate();
+  ASSERT(temp1.is_valid());
+  Result temp2 = allocator()->Allocate();
+  ASSERT(temp2.is_valid());
+
+  DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
+      new DeferredIsStringWrapperSafeForDefaultValueOf(
+          obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
+  deferred->Branch(zero);
+  deferred->BindExit();
+  __ test(map_result.reg(), Operand(map_result.reg()));
+  obj.Unuse();
+  map_result.Unuse();
+  temp1.Unuse();
+  temp2.Unuse();
+  destination()->Split(not_equal);
+}
+
+
 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
   // This generates a fast version of:
   // (%_ClassOf(arg) === 'Function')
diff --git a/src/ia32/codegen-ia32.h b/src/ia32/codegen-ia32.h
index 81a5da1..37b7011 100644
--- a/src/ia32/codegen-ia32.h
+++ b/src/ia32/codegen-ia32.h
@@ -358,6 +358,10 @@
     return FieldOperand(array, index_as_smi, times_half_pointer_size, offset);
   }
 
+  static Operand ContextOperand(Register context, int index) {
+    return Operand(context, Context::SlotOffset(index));
+  }
+
  private:
   // Construction/Destruction
   explicit CodeGenerator(MacroAssembler* masm);
@@ -430,10 +434,6 @@
   // The following are used by class Reference.
   void LoadReference(Reference* ref);
 
-  static Operand ContextOperand(Register context, int index) {
-    return Operand(context, Context::SlotOffset(index));
-  }
-
   Operand SlotOperand(Slot* slot, Register tmp);
 
   Operand ContextSlotOperandCheckExtensions(Slot* slot,
@@ -653,6 +653,8 @@
   void GenerateIsSpecObject(ZoneList<Expression*>* args);
   void GenerateIsFunction(ZoneList<Expression*>* args);
   void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
+  void GenerateIsStringWrapperSafeForDefaultValueOf(
+      ZoneList<Expression*>* args);
 
   // Support for construct call checks.
   void GenerateIsConstructCall(ZoneList<Expression*>* args);
@@ -811,6 +813,18 @@
 };
 
 
+class ToBooleanStub: public CodeStub {
+ public:
+  ToBooleanStub() { }
+
+  void Generate(MacroAssembler* masm);
+
+ private:
+  Major MajorKey() { return ToBoolean; }
+  int MinorKey() { return 0; }
+};
+
+
 // Flag that indicates how to generate code for the stub GenericBinaryOpStub.
 enum GenericBinaryFlags {
   NO_GENERIC_BINARY_FLAGS = 0,
diff --git a/src/ia32/debug-ia32.cc b/src/ia32/debug-ia32.cc
index dfa6634..b57cf3d 100644
--- a/src/ia32/debug-ia32.cc
+++ b/src/ia32/debug-ia32.cc
@@ -254,32 +254,20 @@
 }
 
 
-// FrameDropper is a code replacement for a JavaScript frame with possibly
-// several frames above.
-// There is no calling conventions here, because it never actually gets called,
-// it only gets returned to.
-// Frame structure (conforms InternalFrame structure):
-//   -- JSFunction
-//   -- code
-//   -- SMI maker
-//   -- context
-//   -- frame base
 void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
   ExternalReference restarter_frame_function_slot =
       ExternalReference(Debug_Address::RestarterFrameFunctionPointer());
   __ mov(Operand::StaticVariable(restarter_frame_function_slot), Immediate(0));
 
   // We do not know our frame height, but set esp based on ebp.
-  __ lea(esp, Operand(ebp, -4 * kPointerSize));
+  __ lea(esp, Operand(ebp, -1 * kPointerSize));
 
-  __ pop(edi);  // function
-
-  // Skip code self-reference and marker.
-  __ add(Operand(esp), Immediate(2 * kPointerSize));
-
-  __ pop(esi);  // Context.
+  __ pop(edi);  // Function.
   __ pop(ebp);
 
+  // Load context from the function.
+  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+
   // Get function code.
   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
@@ -289,28 +277,10 @@
   __ jmp(Operand(edx));
 }
 
+const bool Debug::kFrameDropperSupported = true;
+
 #undef __
 
-
-// TODO(LiveEdit): consider making it platform-independent.
-// TODO(LiveEdit): use more named constants instead of numbers.
-Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
-                                       Handle<Code> code) {
-  ASSERT(bottom_js_frame->is_java_script());
-
-  Address fp = bottom_js_frame->fp();
-  Memory::Object_at(fp - 4 * kPointerSize) =
-      Memory::Object_at(fp - 2 * kPointerSize);  // Move edi (function).
-
-  Memory::Object_at(fp - 3 * kPointerSize) = *code;
-  Memory::Object_at(fp - 2 * kPointerSize) = Smi::FromInt(StackFrame::INTERNAL);
-
-  return reinterpret_cast<Object**>(&Memory::Object_at(fp - 4 * kPointerSize));
-}
-
-const int Debug::kFrameDropperFrameSize = 5;
-
-
 #endif  // ENABLE_DEBUGGER_SUPPORT
 
 } }  // namespace v8::internal
diff --git a/src/ia32/disasm-ia32.cc b/src/ia32/disasm-ia32.cc
index dc4c27e..64305ef 100644
--- a/src/ia32/disasm-ia32.cc
+++ b/src/ia32/disasm-ia32.cc
@@ -560,6 +560,7 @@
       case kROL: mnem = "rol"; break;
       case kROR: mnem = "ror"; break;
       case kRCL: mnem = "rcl"; break;
+      case kRCR: mnem = "rcr"; break;
       case kSHL: mnem = "shl"; break;
       case KSHR: mnem = "shr"; break;
       case kSAR: mnem = "sar"; break;
diff --git a/src/ia32/fast-codegen-ia32.cc b/src/ia32/fast-codegen-ia32.cc
deleted file mode 100644
index b749e59..0000000
--- a/src/ia32/fast-codegen-ia32.cc
+++ /dev/null
@@ -1,954 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#if defined(V8_TARGET_ARCH_IA32)
-
-#include "codegen-inl.h"
-#include "fast-codegen.h"
-#include "data-flow.h"
-#include "scopes.h"
-
-namespace v8 {
-namespace internal {
-
-#define BAILOUT(reason)                         \
-  do {                                          \
-    if (FLAG_trace_bailout) {                   \
-      PrintF("%s\n", reason);                   \
-    }                                           \
-    has_supported_syntax_ = false;              \
-    return;                                     \
-  } while (false)
-
-
-#define CHECK_BAILOUT                           \
-  do {                                          \
-    if (!has_supported_syntax_) return;         \
-  } while (false)
-
-
-void FastCodeGenSyntaxChecker::Check(CompilationInfo* info) {
-  info_ = info;
-
-  // We do not specialize if we do not have a receiver or if it is not a
-  // JS object with fast mode properties.
-  if (!info->has_receiver()) BAILOUT("No receiver");
-  if (!info->receiver()->IsJSObject()) BAILOUT("Receiver is not an object");
-  Handle<JSObject> object = Handle<JSObject>::cast(info->receiver());
-  if (!object->HasFastProperties()) BAILOUT("Receiver is in dictionary mode");
-
-  // We do not support stack or heap slots (both of which require
-  // allocation).
-  Scope* scope = info->scope();
-  if (scope->num_stack_slots() > 0) {
-    BAILOUT("Function has stack-allocated locals");
-  }
-  if (scope->num_heap_slots() > 0) {
-    BAILOUT("Function has context-allocated locals");
-  }
-
-  VisitDeclarations(scope->declarations());
-  CHECK_BAILOUT;
-
-  // We do not support empty function bodies.
-  if (info->function()->body()->is_empty()) {
-    BAILOUT("Function has an empty body");
-  }
-  VisitStatements(info->function()->body());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDeclarations(
-    ZoneList<Declaration*>* decls) {
-  if (!decls->is_empty()) BAILOUT("Function has declarations");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitStatements(ZoneList<Statement*>* stmts) {
-  if (stmts->length() != 1) {
-    BAILOUT("Function body is not a singleton statement.");
-  }
-  Visit(stmts->at(0));
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDeclaration(Declaration* decl) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenSyntaxChecker::VisitBlock(Block* stmt) {
-  VisitStatements(stmt->statements());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitExpressionStatement(
-    ExpressionStatement* stmt) {
-  Visit(stmt->expression());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitEmptyStatement(EmptyStatement* stmt) {
-  // Supported.
-}
-
-
-void FastCodeGenSyntaxChecker::VisitIfStatement(IfStatement* stmt) {
-  BAILOUT("IfStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitContinueStatement(ContinueStatement* stmt) {
-  BAILOUT("Continuestatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitBreakStatement(BreakStatement* stmt) {
-  BAILOUT("BreakStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitReturnStatement(ReturnStatement* stmt) {
-  BAILOUT("ReturnStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitWithEnterStatement(
-    WithEnterStatement* stmt) {
-  BAILOUT("WithEnterStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitWithExitStatement(WithExitStatement* stmt) {
-  BAILOUT("WithExitStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitSwitchStatement(SwitchStatement* stmt) {
-  BAILOUT("SwitchStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
-  BAILOUT("DoWhileStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitWhileStatement(WhileStatement* stmt) {
-  BAILOUT("WhileStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitForStatement(ForStatement* stmt) {
-  BAILOUT("ForStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitForInStatement(ForInStatement* stmt) {
-  BAILOUT("ForInStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitTryCatchStatement(TryCatchStatement* stmt) {
-  BAILOUT("TryCatchStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitTryFinallyStatement(
-    TryFinallyStatement* stmt) {
-  BAILOUT("TryFinallyStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitDebuggerStatement(
-    DebuggerStatement* stmt) {
-  BAILOUT("DebuggerStatement");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
-  BAILOUT("FunctionLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitSharedFunctionInfoLiteral(
-    SharedFunctionInfoLiteral* expr) {
-  BAILOUT("SharedFunctionInfoLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitConditional(Conditional* expr) {
-  BAILOUT("Conditional");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitSlot(Slot* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenSyntaxChecker::VisitVariableProxy(VariableProxy* expr) {
-  // Only global variable references are supported.
-  Variable* var = expr->var();
-  if (!var->is_global() || var->is_this()) BAILOUT("Non-global variable");
-
-  // Check if the global variable is existing and non-deletable.
-  if (info()->has_global_object()) {
-    LookupResult lookup;
-    info()->global_object()->Lookup(*expr->name(), &lookup);
-    if (!lookup.IsProperty()) {
-      BAILOUT("Non-existing global variable");
-    }
-    // We do not handle global variables with accessors or interceptors.
-    if (lookup.type() != NORMAL) {
-      BAILOUT("Global variable with accessors or interceptors.");
-    }
-    // We do not handle deletable global variables.
-    if (!lookup.IsDontDelete()) {
-      BAILOUT("Deletable global variable");
-    }
-  }
-}
-
-
-void FastCodeGenSyntaxChecker::VisitLiteral(Literal* expr) {
-  BAILOUT("Literal");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
-  BAILOUT("RegExpLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitObjectLiteral(ObjectLiteral* expr) {
-  BAILOUT("ObjectLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitArrayLiteral(ArrayLiteral* expr) {
-  BAILOUT("ArrayLiteral");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCatchExtensionObject(
-    CatchExtensionObject* expr) {
-  BAILOUT("CatchExtensionObject");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitAssignment(Assignment* expr) {
-  // Simple assignments to (named) this properties are supported.
-  if (expr->op() != Token::ASSIGN) BAILOUT("Non-simple assignment");
-
-  Property* prop = expr->target()->AsProperty();
-  if (prop == NULL) BAILOUT("Non-property assignment");
-  VariableProxy* proxy = prop->obj()->AsVariableProxy();
-  if (proxy == NULL || !proxy->var()->is_this()) {
-    BAILOUT("Non-this-property assignment");
-  }
-  if (!prop->key()->IsPropertyName()) {
-    BAILOUT("Non-named-property assignment");
-  }
-
-  // We will only specialize for fields on the object itself.
-  // Expression::IsPropertyName implies that the name is a literal
-  // symbol but we do not assume that.
-  Literal* key = prop->key()->AsLiteral();
-  if (key != NULL && key->handle()->IsString()) {
-    Handle<Object> receiver = info()->receiver();
-    Handle<String> name = Handle<String>::cast(key->handle());
-    LookupResult lookup;
-    receiver->Lookup(*name, &lookup);
-    if (!lookup.IsProperty()) {
-      BAILOUT("Assigned property not found at compile time");
-    }
-    if (lookup.holder() != *receiver) BAILOUT("Non-own property assignment");
-    if (!lookup.type() == FIELD) BAILOUT("Non-field property assignment");
-  } else {
-    UNREACHABLE();
-    BAILOUT("Unexpected non-string-literal property key");
-  }
-
-  Visit(expr->value());
-}
-
-
-void FastCodeGenSyntaxChecker::VisitThrow(Throw* expr) {
-  BAILOUT("Throw");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitProperty(Property* expr) {
-  // We support named this property references.
-  VariableProxy* proxy = expr->obj()->AsVariableProxy();
-  if (proxy == NULL || !proxy->var()->is_this()) {
-    BAILOUT("Non-this-property reference");
-  }
-  if (!expr->key()->IsPropertyName()) {
-    BAILOUT("Non-named-property reference");
-  }
-
-  // We will only specialize for fields on the object itself.
-  // Expression::IsPropertyName implies that the name is a literal
-  // symbol but we do not assume that.
-  Literal* key = expr->key()->AsLiteral();
-  if (key != NULL && key->handle()->IsString()) {
-    Handle<Object> receiver = info()->receiver();
-    Handle<String> name = Handle<String>::cast(key->handle());
-    LookupResult lookup;
-    receiver->Lookup(*name, &lookup);
-    if (!lookup.IsProperty()) {
-      BAILOUT("Referenced property not found at compile time");
-    }
-    if (lookup.holder() != *receiver) BAILOUT("Non-own property reference");
-    if (!lookup.type() == FIELD) BAILOUT("Non-field property reference");
-  } else {
-    UNREACHABLE();
-    BAILOUT("Unexpected non-string-literal property key");
-  }
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCall(Call* expr) {
-  BAILOUT("Call");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCallNew(CallNew* expr) {
-  BAILOUT("CallNew");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCallRuntime(CallRuntime* expr) {
-  BAILOUT("CallRuntime");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitUnaryOperation(UnaryOperation* expr) {
-  BAILOUT("UnaryOperation");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCountOperation(CountOperation* expr) {
-  BAILOUT("CountOperation");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitBinaryOperation(BinaryOperation* expr) {
-  // We support bitwise OR.
-  switch (expr->op()) {
-    case Token::COMMA:
-      BAILOUT("BinaryOperation COMMA");
-    case Token::OR:
-      BAILOUT("BinaryOperation OR");
-    case Token::AND:
-      BAILOUT("BinaryOperation AND");
-
-    case Token::BIT_OR:
-      // We support expressions nested on the left because they only require
-      // a pair of registers to keep all intermediate values in registers
-      // (i.e., the expression stack has height no more than two).
-      if (!expr->right()->IsLeaf()) BAILOUT("expression nested on right");
-
-      // We do not allow subexpressions with side effects because we
-      // (currently) bail out to the beginning of the full function.  The
-      // only expressions with side effects that we would otherwise handle
-      // are assignments.
-      if (expr->left()->AsAssignment() != NULL ||
-          expr->right()->AsAssignment() != NULL) {
-        BAILOUT("subexpression of binary operation has side effects");
-      }
-
-      Visit(expr->left());
-      CHECK_BAILOUT;
-      Visit(expr->right());
-      break;
-
-    case Token::BIT_XOR:
-      BAILOUT("BinaryOperation BIT_XOR");
-    case Token::BIT_AND:
-      BAILOUT("BinaryOperation BIT_AND");
-    case Token::SHL:
-      BAILOUT("BinaryOperation SHL");
-    case Token::SAR:
-      BAILOUT("BinaryOperation SAR");
-    case Token::SHR:
-      BAILOUT("BinaryOperation SHR");
-    case Token::ADD:
-      BAILOUT("BinaryOperation ADD");
-    case Token::SUB:
-      BAILOUT("BinaryOperation SUB");
-    case Token::MUL:
-      BAILOUT("BinaryOperation MUL");
-    case Token::DIV:
-      BAILOUT("BinaryOperation DIV");
-    case Token::MOD:
-      BAILOUT("BinaryOperation MOD");
-    default:
-      UNREACHABLE();
-  }
-}
-
-
-void FastCodeGenSyntaxChecker::VisitCompareOperation(CompareOperation* expr) {
-  BAILOUT("CompareOperation");
-}
-
-
-void FastCodeGenSyntaxChecker::VisitThisFunction(ThisFunction* expr) {
-  BAILOUT("ThisFunction");
-}
-
-#undef BAILOUT
-#undef CHECK_BAILOUT
-
-
-#define __ ACCESS_MASM(masm())
-
-Handle<Code> FastCodeGenerator::MakeCode(CompilationInfo* info) {
-  // Label the AST before calling MakeCodePrologue, so AST node numbers are
-  // printed with the AST.
-  AstLabeler labeler;
-  labeler.Label(info);
-
-  CodeGenerator::MakeCodePrologue(info);
-
-  const int kInitialBufferSize = 4 * KB;
-  MacroAssembler masm(NULL, kInitialBufferSize);
-
-  // Generate the fast-path code.
-  FastCodeGenerator fast_cgen(&masm);
-  fast_cgen.Generate(info);
-  if (fast_cgen.HasStackOverflow()) {
-    ASSERT(!Top::has_pending_exception());
-    return Handle<Code>::null();
-  }
-
-  // Generate the full code for the function in bailout mode, using the same
-  // macro assembler.
-  CodeGenerator cgen(&masm);
-  CodeGeneratorScope scope(&cgen);
-  info->set_mode(CompilationInfo::SECONDARY);
-  cgen.Generate(info);
-  if (cgen.HasStackOverflow()) {
-    ASSERT(!Top::has_pending_exception());
-    return Handle<Code>::null();
-  }
-
-  Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP);
-  return CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
-}
-
-
-Register FastCodeGenerator::accumulator0() { return eax; }
-Register FastCodeGenerator::accumulator1() { return edx; }
-Register FastCodeGenerator::scratch0() { return ecx; }
-Register FastCodeGenerator::scratch1() { return edi; }
-Register FastCodeGenerator::receiver_reg() { return ebx; }
-Register FastCodeGenerator::context_reg() { return esi; }
-
-
-void FastCodeGenerator::EmitLoadReceiver() {
-  // Offset 2 is due to return address and saved frame pointer.
-  int index = 2 + function()->scope()->num_parameters();
-  __ mov(receiver_reg(), Operand(ebp, index * kPointerSize));
-}
-
-
-void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
-  ASSERT(!destination().is(no_reg));
-  ASSERT(cell->IsJSGlobalPropertyCell());
-
-  __ mov(destination(), Immediate(cell));
-  __ mov(destination(),
-         FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
-  if (FLAG_debug_code) {
-    __ cmp(destination(), Factory::the_hole_value());
-    __ Check(not_equal, "DontDelete cells can't contain the hole");
-  }
-
-  // The loaded value is not known to be a smi.
-  clear_as_smi(destination());
-}
-
-
-void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
-  LookupResult lookup;
-  info()->receiver()->Lookup(*name, &lookup);
-
-  ASSERT(lookup.holder() == *info()->receiver());
-  ASSERT(lookup.type() == FIELD);
-  Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
-  int index = lookup.GetFieldIndex() - map->inobject_properties();
-  int offset = index * kPointerSize;
-
-  // We will emit the write barrier unless the stored value is statically
-  // known to be a smi.
-  bool needs_write_barrier = !is_smi(accumulator0());
-
-  // Perform the store.  Negative offsets are inobject properties.
-  if (offset < 0) {
-    offset += map->instance_size();
-    __ mov(FieldOperand(receiver_reg(), offset), accumulator0());
-    if (needs_write_barrier) {
-      // Preserve receiver from write barrier.
-      __ mov(scratch0(), receiver_reg());
-    }
-  } else {
-    offset += FixedArray::kHeaderSize;
-    __ mov(scratch0(),
-           FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
-    __ mov(FieldOperand(scratch0(), offset), accumulator0());
-  }
-
-  if (needs_write_barrier) {
-    if (destination().is(no_reg)) {
-      // After RecordWrite accumulator0 is only accidently a smi, but it is
-      // already marked as not known to be one.
-      __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
-    } else {
-      // Copy the value to the other accumulator to preserve a copy from the
-      // write barrier. One of the accumulators is available as a scratch
-      // register.  Neither is a smi.
-      __ mov(accumulator1(), accumulator0());
-      clear_as_smi(accumulator1());
-      Register value_scratch = other_accumulator(destination());
-      __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
-    }
-  } else if (destination().is(accumulator1())) {
-    __ mov(accumulator1(), accumulator0());
-    // Is a smi because we do not need the write barrier.
-    set_as_smi(accumulator1());
-  }
-}
-
-
-void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
-  ASSERT(!destination().is(no_reg));
-  LookupResult lookup;
-  info()->receiver()->Lookup(*name, &lookup);
-
-  ASSERT(lookup.holder() == *info()->receiver());
-  ASSERT(lookup.type() == FIELD);
-  Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
-  int index = lookup.GetFieldIndex() - map->inobject_properties();
-  int offset = index * kPointerSize;
-
-  // Perform the load.  Negative offsets are inobject properties.
-  if (offset < 0) {
-    offset += map->instance_size();
-    __ mov(destination(), FieldOperand(receiver_reg(), offset));
-  } else {
-    offset += FixedArray::kHeaderSize;
-    __ mov(scratch0(),
-           FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
-    __ mov(destination(), FieldOperand(scratch0(), offset));
-  }
-
-  // The loaded value is not known to be a smi.
-  clear_as_smi(destination());
-}
-
-
-void FastCodeGenerator::EmitBitOr() {
-  if (is_smi(accumulator0()) && is_smi(accumulator1())) {
-    // If both operands are known to be a smi then there is no need to check
-    // the operands or result.  There is no need to perform the operation in
-    // an effect context.
-    if (!destination().is(no_reg)) {
-      // Leave the result in the destination register.  Bitwise or is
-      // commutative.
-      __ or_(destination(), Operand(other_accumulator(destination())));
-    }
-  } else {
-    // Left is in accumulator1, right in accumulator0.
-    Label* bailout = NULL;
-    if (destination().is(accumulator0())) {
-      __ mov(scratch0(), accumulator0());
-      __ or_(destination(), Operand(accumulator1()));  // Or is commutative.
-      __ test(destination(), Immediate(kSmiTagMask));
-      bailout = info()->AddBailout(accumulator1(), scratch0());  // Left, right.
-    } else if (destination().is(accumulator1())) {
-      __ mov(scratch0(), accumulator1());
-      __ or_(destination(), Operand(accumulator0()));
-      __ test(destination(), Immediate(kSmiTagMask));
-      bailout = info()->AddBailout(scratch0(), accumulator0());
-    } else {
-      ASSERT(destination().is(no_reg));
-      __ mov(scratch0(), accumulator1());
-      __ or_(scratch0(), Operand(accumulator0()));
-      __ test(scratch0(), Immediate(kSmiTagMask));
-      bailout = info()->AddBailout(accumulator1(), accumulator0());
-    }
-    __ j(not_zero, bailout, not_taken);
-  }
-
-  // If we didn't bailout, the result (in fact, both inputs too) is known to
-  // be a smi.
-  set_as_smi(accumulator0());
-  set_as_smi(accumulator1());
-}
-
-
-void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
-  ASSERT(info_ == NULL);
-  info_ = compilation_info;
-  Comment cmnt(masm_, "[ function compiled by fast code generator");
-
-  // Save the caller's frame pointer and set up our own.
-  Comment prologue_cmnt(masm(), ";; Prologue");
-  __ push(ebp);
-  __ mov(ebp, esp);
-  __ push(esi);  // Context.
-  __ push(edi);  // Closure.
-  // Note that we keep a live register reference to esi (context) at this
-  // point.
-
-  Label* bailout_to_beginning = info()->AddBailout();
-  // Receiver (this) is allocated to a fixed register.
-  if (info()->has_this_properties()) {
-    Comment cmnt(masm(), ";; MapCheck(this)");
-    if (FLAG_print_ir) {
-      PrintF("#: MapCheck(this)\n");
-    }
-    ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
-    Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
-    Handle<Map> map(object->map());
-    EmitLoadReceiver();
-    __ CheckMap(receiver_reg(), map, bailout_to_beginning, false);
-  }
-
-  // If there is a global variable access check if the global object is the
-  // same as at lazy-compilation time.
-  if (info()->has_globals()) {
-    Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
-    if (FLAG_print_ir) {
-      PrintF("#: MapCheck(GLOBAL)\n");
-    }
-    ASSERT(info()->has_global_object());
-    Handle<Map> map(info()->global_object()->map());
-    __ mov(scratch0(), CodeGenerator::GlobalObject());
-    __ CheckMap(scratch0(), map, bailout_to_beginning, true);
-  }
-
-  VisitStatements(function()->body());
-
-  Comment return_cmnt(masm(), ";; Return(<undefined>)");
-  if (FLAG_print_ir) {
-    PrintF("#: Return(<undefined>)\n");
-  }
-  __ mov(eax, Factory::undefined_value());
-  __ mov(esp, ebp);
-  __ pop(ebp);
-  __ ret((scope()->num_parameters() + 1) * kPointerSize);
-}
-
-
-void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitBlock(Block* stmt) {
-  VisitStatements(stmt->statements());
-}
-
-
-void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
-  Visit(stmt->expression());
-}
-
-
-void FastCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
-  // Nothing to do.
-}
-
-
-void FastCodeGenerator::VisitIfStatement(IfStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitWithEnterStatement(WithEnterStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitWithExitStatement(WithExitStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitForStatement(ForStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitSharedFunctionInfoLiteral(
-    SharedFunctionInfoLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitConditional(Conditional* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitSlot(Slot* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
-  ASSERT(expr->var()->is_global() && !expr->var()->is_this());
-  // Check if we can compile a global variable load directly from the cell.
-  ASSERT(info()->has_global_object());
-  LookupResult lookup;
-  info()->global_object()->Lookup(*expr->name(), &lookup);
-  // We only support normal (non-accessor/interceptor) DontDelete properties
-  // for now.
-  ASSERT(lookup.IsProperty());
-  ASSERT_EQ(NORMAL, lookup.type());
-  ASSERT(lookup.IsDontDelete());
-  Handle<Object> cell(info()->global_object()->GetPropertyCell(&lookup));
-
-  // Global variable lookups do not have side effects, so we do not need to
-  // emit code if we are in an effect context.
-  if (!destination().is(no_reg)) {
-    Comment cmnt(masm(), ";; Global");
-    if (FLAG_print_ir) {
-      SmartPointer<char> name = expr->name()->ToCString();
-      PrintF("%d: t%d = Global(%s)\n", expr->num(),
-             expr->num(), *name);
-    }
-    EmitGlobalVariableLoad(cell);
-  }
-}
-
-
-void FastCodeGenerator::VisitLiteral(Literal* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitAssignment(Assignment* expr) {
-  // Known to be a simple this property assignment.  Effectively a unary
-  // operation.
-  { Register my_destination = destination();
-    set_destination(accumulator0());
-    Visit(expr->value());
-    set_destination(my_destination);
-  }
-
-  Property* prop = expr->target()->AsProperty();
-  ASSERT_NOT_NULL(prop);
-  ASSERT_NOT_NULL(prop->obj()->AsVariableProxy());
-  ASSERT(prop->obj()->AsVariableProxy()->var()->is_this());
-  ASSERT(prop->key()->IsPropertyName());
-  Handle<String> name =
-      Handle<String>::cast(prop->key()->AsLiteral()->handle());
-
-  Comment cmnt(masm(), ";; Store to this");
-  if (FLAG_print_ir) {
-    SmartPointer<char> name_string = name->ToCString();
-    PrintF("%d: ", expr->num());
-    if (!destination().is(no_reg)) PrintF("t%d = ", expr->num());
-    PrintF("Store(this, \"%s\", t%d)\n", *name_string,
-           expr->value()->num());
-  }
-
-  EmitThisPropertyStore(name);
-}
-
-
-void FastCodeGenerator::VisitThrow(Throw* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitProperty(Property* expr) {
-  ASSERT_NOT_NULL(expr->obj()->AsVariableProxy());
-  ASSERT(expr->obj()->AsVariableProxy()->var()->is_this());
-  ASSERT(expr->key()->IsPropertyName());
-  if (!destination().is(no_reg)) {
-    Handle<String> name =
-        Handle<String>::cast(expr->key()->AsLiteral()->handle());
-
-    Comment cmnt(masm(), ";; Load from this");
-    if (FLAG_print_ir) {
-      SmartPointer<char> name_string = name->ToCString();
-      PrintF("%d: t%d = Load(this, \"%s\")\n",
-             expr->num(), expr->num(), *name_string);
-    }
-    EmitThisPropertyLoad(name);
-  }
-}
-
-
-void FastCodeGenerator::VisitCall(Call* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCallNew(CallNew* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitCountOperation(CountOperation* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
-  // We support limited binary operations: bitwise OR only allowed to be
-  // nested on the left.
-  ASSERT(expr->op() == Token::BIT_OR);
-  ASSERT(expr->right()->IsLeaf());
-
-  { Register my_destination = destination();
-    set_destination(accumulator1());
-    Visit(expr->left());
-    set_destination(accumulator0());
-    Visit(expr->right());
-    set_destination(my_destination);
-  }
-
-  Comment cmnt(masm(), ";; BIT_OR");
-  if (FLAG_print_ir) {
-    PrintF("%d: ", expr->num());
-    if (!destination().is(no_reg)) PrintF("t%d = ", expr->num());
-    PrintF("BIT_OR(t%d, t%d)\n", expr->left()->num(), expr->right()->num());
-  }
-  EmitBitOr();
-}
-
-
-void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
-  UNREACHABLE();
-}
-
-
-void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) {
-  UNREACHABLE();
-}
-
-#undef __
-
-
-} }  // namespace v8::internal
-
-#endif  // V8_TARGET_ARCH_IA32
diff --git a/src/ia32/fast-codegen-ia32.h b/src/ia32/fast-codegen-ia32.h
deleted file mode 100644
index e0851af..0000000
--- a/src/ia32/fast-codegen-ia32.h
+++ /dev/null
@@ -1,155 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_FAST_CODEGEN_IA32_H_
-#define V8_FAST_CODEGEN_IA32_H_
-
-#include "v8.h"
-
-#include "ast.h"
-#include "compiler.h"
-#include "list.h"
-
-namespace v8 {
-namespace internal {
-
-class FastCodeGenSyntaxChecker: public AstVisitor {
- public:
-  explicit FastCodeGenSyntaxChecker()
-      : info_(NULL), has_supported_syntax_(true) {
-  }
-
-  void Check(CompilationInfo* info);
-
-  CompilationInfo* info() { return info_; }
-  bool has_supported_syntax() { return has_supported_syntax_; }
-
- private:
-  void VisitDeclarations(ZoneList<Declaration*>* decls);
-  void VisitStatements(ZoneList<Statement*>* stmts);
-
-  // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
-  AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
-  CompilationInfo* info_;
-  bool has_supported_syntax_;
-
-  DISALLOW_COPY_AND_ASSIGN(FastCodeGenSyntaxChecker);
-};
-
-
-class FastCodeGenerator: public AstVisitor {
- public:
-  explicit FastCodeGenerator(MacroAssembler* masm)
-      : masm_(masm), info_(NULL), destination_(no_reg), smi_bits_(0) {
-  }
-
-  static Handle<Code> MakeCode(CompilationInfo* info);
-
-  void Generate(CompilationInfo* compilation_info);
-
- private:
-  MacroAssembler* masm() { return masm_; }
-  CompilationInfo* info() { return info_; }
-
-  Register destination() { return destination_; }
-  void set_destination(Register reg) { destination_ = reg; }
-
-  FunctionLiteral* function() { return info_->function(); }
-  Scope* scope() { return info_->scope(); }
-
-  // Platform-specific fixed registers, all guaranteed distinct.
-  Register accumulator0();
-  Register accumulator1();
-  Register scratch0();
-  Register scratch1();
-  Register receiver_reg();
-  Register context_reg();
-
-  Register other_accumulator(Register reg) {
-    ASSERT(reg.is(accumulator0()) || reg.is(accumulator1()));
-    return (reg.is(accumulator0())) ? accumulator1() : accumulator0();
-  }
-
-  // Flags are true if the respective register is statically known to hold a
-  // smi.  We do not track every register, only the accumulator registers.
-  bool is_smi(Register reg) {
-    ASSERT(!reg.is(no_reg));
-    return (smi_bits_ & reg.bit()) != 0;
-  }
-  void set_as_smi(Register reg) {
-    ASSERT(!reg.is(no_reg));
-    smi_bits_ = smi_bits_ | reg.bit();
-  }
-  void clear_as_smi(Register reg) {
-    ASSERT(!reg.is(no_reg));
-    smi_bits_ = smi_bits_ & ~reg.bit();
-  }
-
-  // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
-  AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
-  // Emit code to load the receiver from the stack into receiver_reg.
-  void EmitLoadReceiver();
-
-  // Emit code to load a global variable directly from a global property
-  // cell into the destination register.
-  void EmitGlobalVariableLoad(Handle<Object> cell);
-
-  // Emit a store to an own property of this.  The stored value is expected
-  // in accumulator0 and the receiver in receiver_reg.  The receiver
-  // register is preserved and the result (the stored value) is left in the
-  // destination register.
-  void EmitThisPropertyStore(Handle<String> name);
-
-  // Emit a load from an own property of this.  The receiver is expected in
-  // receiver_reg.  The receiver register is preserved and the result is
-  // left in the destination register.
-  void EmitThisPropertyLoad(Handle<String> name);
-
-  // Emit a bitwise or operation.  The left operand is in accumulator1 and
-  // the right is in accumulator0.  The result should be left in the
-  // destination register.
-  void EmitBitOr();
-
-  MacroAssembler* masm_;
-  CompilationInfo* info_;
-
-  Register destination_;
-  uint32_t smi_bits_;
-
-  DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator);
-};
-
-
-} }  // namespace v8::internal
-
-#endif  // V8_FAST_CODEGEN_IA32_H_
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index eb944e6..cb36904 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -54,97 +54,95 @@
 //
 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
 // frames-ia32.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) {
+void FullCodeGenerator::Generate(CompilationInfo* info) {
   ASSERT(info_ == NULL);
   info_ = info;
   SetFunctionPosition(function());
   Comment cmnt(masm_, "[ function compiled by full code generator");
 
-  if (mode == PRIMARY) {
-    __ push(ebp);  // Caller's frame pointer.
-    __ mov(ebp, esp);
-    __ push(esi);  // Callee's context.
-    __ push(edi);  // Callee's JS Function.
+  __ push(ebp);  // Caller's frame pointer.
+  __ mov(ebp, esp);
+  __ push(esi);  // Callee's context.
+  __ push(edi);  // Callee's JS Function.
 
-    { Comment cmnt(masm_, "[ Allocate locals");
-      int locals_count = scope()->num_stack_slots();
-      if (locals_count == 1) {
-        __ push(Immediate(Factory::undefined_value()));
-      } else if (locals_count > 1) {
-        __ mov(eax, Immediate(Factory::undefined_value()));
-        for (int i = 0; i < locals_count; i++) {
-          __ push(eax);
-        }
+  { Comment cmnt(masm_, "[ Allocate locals");
+    int locals_count = scope()->num_stack_slots();
+    if (locals_count == 1) {
+      __ push(Immediate(Factory::undefined_value()));
+    } else if (locals_count > 1) {
+      __ mov(eax, Immediate(Factory::undefined_value()));
+      for (int i = 0; i < locals_count; i++) {
+        __ push(eax);
       }
     }
+  }
 
-    bool function_in_register = true;
+  bool function_in_register = true;
 
-    // Possibly allocate a local context.
-    int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
-    if (heap_slots > 0) {
-      Comment cmnt(masm_, "[ Allocate local context");
-      // Argument to NewContext is the function, which is still in edi.
-      __ push(edi);
-      if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-        FastNewContextStub stub(heap_slots);
-        __ CallStub(&stub);
-      } else {
-        __ CallRuntime(Runtime::kNewContext, 1);
-      }
-      function_in_register = false;
-      // Context is returned in both eax and esi.  It replaces the context
-      // passed to us.  It's saved in the stack and kept live in esi.
-      __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
-
-      // Copy parameters into context if necessary.
-      int num_parameters = scope()->num_parameters();
-      for (int i = 0; i < num_parameters; i++) {
-        Slot* slot = scope()->parameter(i)->slot();
-        if (slot != NULL && slot->type() == Slot::CONTEXT) {
-          int parameter_offset = StandardFrameConstants::kCallerSPOffset +
-                                     (num_parameters - 1 - i) * kPointerSize;
-          // Load parameter from stack.
-          __ mov(eax, Operand(ebp, parameter_offset));
-          // Store it in the context.
-          int context_offset = Context::SlotOffset(slot->index());
-          __ mov(Operand(esi, context_offset), eax);
-          // Update the write barrier. This clobbers all involved
-          // registers, so we have use a third register to avoid
-          // clobbering esi.
-          __ mov(ecx, esi);
-          __ RecordWrite(ecx, context_offset, eax, ebx);
-        }
-      }
-    }
-
-    Variable* arguments = scope()->arguments()->AsVariable();
-    if (arguments != NULL) {
-      // Function uses arguments object.
-      Comment cmnt(masm_, "[ Allocate arguments object");
-      if (function_in_register) {
-        __ push(edi);
-      } else {
-        __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
-      }
-      // Receiver is just before the parameters on the caller's stack.
-      int offset = scope()->num_parameters() * kPointerSize;
-      __ lea(edx,
-             Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
-      __ push(edx);
-      __ push(Immediate(Smi::FromInt(scope()->num_parameters())));
-      // Arguments to ArgumentsAccessStub:
-      //   function, receiver address, parameter count.
-      // The stub will rewrite receiver and parameter count if the previous
-      // stack frame was an arguments adapter frame.
-      ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+  // Possibly allocate a local context.
+  int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+  if (heap_slots > 0) {
+    Comment cmnt(masm_, "[ Allocate local context");
+    // Argument to NewContext is the function, which is still in edi.
+    __ push(edi);
+    if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+      FastNewContextStub stub(heap_slots);
       __ CallStub(&stub);
-      __ mov(ecx, eax);  // Duplicate result.
-      Move(arguments->slot(), eax, ebx, edx);
-      Slot* dot_arguments_slot =
-          scope()->arguments_shadow()->AsVariable()->slot();
-      Move(dot_arguments_slot, ecx, ebx, edx);
+    } else {
+      __ CallRuntime(Runtime::kNewContext, 1);
     }
+    function_in_register = false;
+    // Context is returned in both eax and esi.  It replaces the context
+    // passed to us.  It's saved in the stack and kept live in esi.
+    __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
+
+    // Copy parameters into context if necessary.
+    int num_parameters = scope()->num_parameters();
+    for (int i = 0; i < num_parameters; i++) {
+      Slot* slot = scope()->parameter(i)->slot();
+      if (slot != NULL && slot->type() == Slot::CONTEXT) {
+        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
+            (num_parameters - 1 - i) * kPointerSize;
+        // Load parameter from stack.
+        __ mov(eax, Operand(ebp, parameter_offset));
+        // Store it in the context.
+        int context_offset = Context::SlotOffset(slot->index());
+        __ mov(Operand(esi, context_offset), eax);
+        // Update the write barrier. This clobbers all involved
+        // registers, so we have use a third register to avoid
+        // clobbering esi.
+        __ mov(ecx, esi);
+        __ RecordWrite(ecx, context_offset, eax, ebx);
+      }
+    }
+  }
+
+  Variable* arguments = scope()->arguments()->AsVariable();
+  if (arguments != NULL) {
+    // Function uses arguments object.
+    Comment cmnt(masm_, "[ Allocate arguments object");
+    if (function_in_register) {
+      __ push(edi);
+    } else {
+      __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+    }
+    // Receiver is just before the parameters on the caller's stack.
+    int offset = scope()->num_parameters() * kPointerSize;
+    __ lea(edx,
+           Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
+    __ push(edx);
+    __ push(Immediate(Smi::FromInt(scope()->num_parameters())));
+    // Arguments to ArgumentsAccessStub:
+    //   function, receiver address, parameter count.
+    // The stub will rewrite receiver and parameter count if the previous
+    // stack frame was an arguments adapter frame.
+    ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+    __ CallStub(&stub);
+    __ mov(ecx, eax);  // Duplicate result.
+    Move(arguments->slot(), eax, ebx, edx);
+    Slot* dot_arguments_slot =
+        scope()->arguments_shadow()->AsVariable()->slot();
+    Move(dot_arguments_slot, ecx, ebx, edx);
   }
 
   { Comment cmnt(masm_, "[ Declarations");
@@ -1048,7 +1046,7 @@
   __ push(ecx);  // Enumerable.
   __ push(ebx);  // Current entry.
   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
-  __ cmp(eax, Factory::null_value());
+  __ test(eax, Operand(eax));
   __ j(equal, loop_statement.continue_target());
   __ mov(ebx, Operand(eax));
 
@@ -2054,6 +2052,25 @@
 }
 
 
+void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
+    ZoneList<Expression*>* args) {
+  ASSERT(args->length() == 1);
+
+  VisitForValue(args->at(0), kAccumulator);
+
+  Label materialize_true, materialize_false;
+  Label* if_true = NULL;
+  Label* if_false = NULL;
+  PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+  // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
+  // used in a few functions in runtime.js which should not normally be hit by
+  // this compiler.
+  __ jmp(if_false);
+  Apply(context_, if_true, if_false);
+}
+
+
 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
   ASSERT(args->length() == 1);
 
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 37b6436..2453846 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -373,13 +373,13 @@
 
 void MacroAssembler::AbortIfNotSmi(Register object) {
   test(object, Immediate(kSmiTagMask));
-  Assert(equal, "Operand not a smi");
+  Assert(equal, "Operand is not a smi");
 }
 
 
 void MacroAssembler::AbortIfSmi(Register object) {
   test(object, Immediate(kSmiTagMask));
-  Assert(not_equal, "Operand a smi");
+  Assert(not_equal, "Operand is a smi");
 }
 
 
@@ -1549,12 +1549,10 @@
     if (scratch.is(no_reg)) scratch = dst;
     cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
     cmp(scratch, 0x80000000u);
-    if (push_pop || dst.is(source)) {
+    if (push_pop) {
       j(not_equal, &done);
-      if (push_pop) {
-        pop(dst);
-        jmp(on_not_int32);
-      }
+      pop(dst);
+      jmp(on_not_int32);
     } else {
       j(equal, on_not_int32);
     }
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index c21dd4f..b2c9dab 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -1255,30 +1255,6 @@
 }
 
 
-// TODO(1241006): Avoid having lazy compile stubs specialized by the
-// number of arguments. It is not needed anymore.
-Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
-  // Enter an internal frame.
-  __ EnterInternalFrame();
-
-  // Push a copy of the function onto the stack.
-  __ push(edi);
-
-  __ push(edi);  // function is also the parameter to the runtime call
-  __ CallRuntime(Runtime::kLazyCompile, 1);
-  __ pop(edi);
-
-  // Tear down temporary frame.
-  __ LeaveInternalFrame();
-
-  // Do a tail-call of the compiled function.
-  __ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
-  __ jmp(Operand(ecx));
-
-  return GetCodeWithFlags(flags, "LazyCompileStub");
-}
-
-
 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
   if (kind_ == Code::KEYED_CALL_IC) {
     __ cmp(Operand(ecx), Immediate(Handle<String>(name)));
@@ -1595,6 +1571,9 @@
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
 
+  // If object is not a string, bail out to regular call.
+  if (!object->IsString()) return Heap::undefined_value();
+
   const int argc = arguments().immediate();
 
   Label miss;
@@ -1605,6 +1584,7 @@
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
                                             Context::STRING_FUNCTION_INDEX,
                                             eax);
+  ASSERT(object != holder);
   CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
                   ebx, edx, edi, name, &miss);
 
@@ -1659,6 +1639,9 @@
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
 
+  // If object is not a string, bail out to regular call.
+  if (!object->IsString()) return Heap::undefined_value();
+
   const int argc = arguments().immediate();
 
   Label miss;
@@ -1670,6 +1653,7 @@
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
                                             Context::STRING_FUNCTION_INDEX,
                                             eax);
+  ASSERT(object != holder);
   CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
                   ebx, edx, edi, name, &miss);
 
diff --git a/src/liveedit-debugger.js b/src/liveedit-debugger.js
index c8c6f08..be97989 100644
--- a/src/liveedit-debugger.js
+++ b/src/liveedit-debugger.js
@@ -800,9 +800,10 @@
     this.end_position = raw_array[2];
     this.param_num = raw_array[3];
     this.code = raw_array[4];
-    this.scope_info = raw_array[5];
-    this.outer_index = raw_array[6];
-    this.shared_function_info = raw_array[7];
+    this.code_scope_info = raw_array[5];
+    this.scope_info = raw_array[6];
+    this.outer_index = raw_array[7];
+    this.shared_function_info = raw_array[8];
     this.next_sibling_index = null;
     this.raw_array = raw_array;
   }
diff --git a/src/liveedit.cc b/src/liveedit.cc
index 346d9ea..769ac35 100644
--- a/src/liveedit.cc
+++ b/src/liveedit.cc
@@ -32,6 +32,7 @@
 #include "compiler.h"
 #include "oprofile-agent.h"
 #include "scopes.h"
+#include "scopeinfo.h"
 #include "global-handles.h"
 #include "debug.h"
 #include "memory.h"
@@ -500,12 +501,16 @@
     this->SetSmiValueField(kParamNumOffset_, param_num);
     this->SetSmiValueField(kParentIndexOffset_, parent_index);
   }
-  void SetFunctionCode(Handle<Code> function_code) {
-    Handle<JSValue> wrapper = WrapInJSValue(*function_code);
-    this->SetField(kCodeOffset_, wrapper);
+  void SetFunctionCode(Handle<Code> function_code,
+      Handle<Object> code_scope_info) {
+    Handle<JSValue> code_wrapper = WrapInJSValue(*function_code);
+    this->SetField(kCodeOffset_, code_wrapper);
+
+    Handle<JSValue> scope_wrapper = WrapInJSValue(*code_scope_info);
+    this->SetField(kCodeScopeInfoOffset_, scope_wrapper);
   }
-  void SetScopeInfo(Handle<Object> scope_info_array) {
-    this->SetField(kScopeInfoOffset_, scope_info_array);
+  void SetOuterScopeInfo(Handle<Object> scope_info_array) {
+    this->SetField(kOuterScopeInfoOffset_, scope_info_array);
   }
   void SetSharedFunctionInfo(Handle<SharedFunctionInfo> info) {
     Handle<JSValue> info_holder = WrapInJSValue(*info);
@@ -519,6 +524,11 @@
         JSValue::cast(this->GetField(kCodeOffset_))));
     return Handle<Code>::cast(raw_result);
   }
+  Handle<Object> GetCodeScopeInfo() {
+    Handle<Object> raw_result = UnwrapJSValue(Handle<JSValue>(
+        JSValue::cast(this->GetField(kCodeScopeInfoOffset_))));
+    return raw_result;
+  }
   int GetStartPosition() {
     return this->GetSmiValueField(kStartPositionOffset_);
   }
@@ -532,10 +542,11 @@
   static const int kEndPositionOffset_ = 2;
   static const int kParamNumOffset_ = 3;
   static const int kCodeOffset_ = 4;
-  static const int kScopeInfoOffset_ = 5;
-  static const int kParentIndexOffset_ = 6;
-  static const int kSharedFunctionInfoOffset_ = 7;
-  static const int kSize_ = 8;
+  static const int kCodeScopeInfoOffset_ = 5;
+  static const int kOuterScopeInfoOffset_ = 6;
+  static const int kParentIndexOffset_ = 7;
+  static const int kSharedFunctionInfoOffset_ = 8;
+  static const int kSize_ = 9;
 
   friend class JSArrayBasedStruct<FunctionInfoWrapper>;
 };
@@ -671,7 +682,7 @@
   void FunctionCode(Handle<Code> function_code) {
     FunctionInfoWrapper info =
         FunctionInfoWrapper::cast(result_->GetElement(current_parent_index_));
-    info.SetFunctionCode(function_code);
+    info.SetFunctionCode(function_code, Handle<Object>(Heap::null_value()));
   }
 
   // Saves full information about a function: its code, its scope info
@@ -682,11 +693,12 @@
     }
     FunctionInfoWrapper info =
         FunctionInfoWrapper::cast(result_->GetElement(current_parent_index_));
-    info.SetFunctionCode(Handle<Code>(shared->code()));
+    info.SetFunctionCode(Handle<Code>(shared->code()),
+        Handle<Object>(shared->scope_info()));
     info.SetSharedFunctionInfo(shared);
 
     Handle<Object> scope_info_list(SerializeFunctionScope(scope));
-    info.SetScopeInfo(scope_info_list);
+    info.SetOuterScopeInfo(scope_info_list);
   }
 
   Handle<JSArray> GetResult() {
@@ -855,6 +867,10 @@
   if (IsJSFunctionCode(shared_info->code())) {
     ReplaceCodeObject(shared_info->code(),
                       *(compile_info_wrapper.GetFunctionCode()));
+    Handle<Object> code_scope_info =  compile_info_wrapper.GetCodeScopeInfo();
+    if (code_scope_info->IsFixedArray()) {
+      shared_info->set_scope_info(SerializedScopeInfo::cast(*code_scope_info));
+    }
   }
 
   if (shared_info->debug_info()->IsDebugInfo()) {
@@ -1190,7 +1206,7 @@
                               int bottom_js_frame_index,
                               Debug::FrameDropMode* mode,
                               Object*** restarter_frame_function_pointer) {
-  if (Debug::kFrameDropperFrameSize < 0) {
+  if (!Debug::kFrameDropperSupported) {
     return "Stack manipulations are not supported in this architecture.";
   }
 
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index d9b0222..1a020e5 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -32,6 +32,7 @@
 #include "global-handles.h"
 #include "ic-inl.h"
 #include "mark-compact.h"
+#include "objects-visiting.h"
 #include "stub-cache.h"
 
 namespace v8 {
@@ -63,6 +64,7 @@
 int MarkCompactCollector::live_lo_objects_size_ = 0;
 #endif
 
+
 void MarkCompactCollector::CollectGarbage() {
   // Make sure that Prepare() has been called. The individual steps below will
   // update the state as they proceed.
@@ -244,14 +246,72 @@
 }
 
 
-// Helper class for marking pointers in HeapObjects.
-class MarkingVisitor : public ObjectVisitor {
+class StaticMarkingVisitor : public StaticVisitorBase {
  public:
-  void VisitPointer(Object** p) {
+  static inline void IterateBody(Map* map, HeapObject* obj) {
+    table_.GetVisitor(map)(map, obj);
+  }
+
+  static void Initialize() {
+    table_.Register(kVisitShortcutCandidate,
+                    &FixedBodyVisitor<StaticMarkingVisitor,
+                                      ConsString::BodyDescriptor,
+                                      void>::Visit);
+
+    table_.Register(kVisitConsString,
+                    &FixedBodyVisitor<StaticMarkingVisitor,
+                                      ConsString::BodyDescriptor,
+                                      void>::Visit);
+
+
+    table_.Register(kVisitFixedArray,
+                    &FlexibleBodyVisitor<StaticMarkingVisitor,
+                                         FixedArray::BodyDescriptor,
+                                         void>::Visit);
+
+    table_.Register(kVisitSharedFunctionInfo,
+                    &FixedBodyVisitor<StaticMarkingVisitor,
+                                      SharedFunctionInfo::BodyDescriptor,
+                                      void>::Visit);
+
+    table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
+    table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
+    table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+
+    table_.Register(kVisitOddball,
+                    &FixedBodyVisitor<StaticMarkingVisitor,
+                                      Oddball::BodyDescriptor,
+                                      void>::Visit);
+    table_.Register(kVisitMap,
+                    &FixedBodyVisitor<StaticMarkingVisitor,
+                                      Map::BodyDescriptor,
+                                      void>::Visit);
+
+    table_.Register(kVisitCode, &VisitCode);
+
+    table_.Register(kVisitPropertyCell,
+                    &FixedBodyVisitor<StaticMarkingVisitor,
+                                      JSGlobalPropertyCell::BodyDescriptor,
+                                      void>::Visit);
+
+    table_.RegisterSpecializations<DataObjectVisitor,
+                                   kVisitDataObject,
+                                   kVisitDataObjectGeneric>();
+
+    table_.RegisterSpecializations<JSObjectVisitor,
+                                   kVisitJSObject,
+                                   kVisitJSObjectGeneric>();
+
+    table_.RegisterSpecializations<StructObjectVisitor,
+                                   kVisitStruct,
+                                   kVisitStructGeneric>();
+  }
+
+  INLINE(static void VisitPointer(Object** p)) {
     MarkObjectByPointer(p);
   }
 
-  void VisitPointers(Object** start, Object** end) {
+  INLINE(static void VisitPointers(Object** start, Object** end)) {
     // Mark all objects pointed to in [start, end).
     const int kMinRangeForMarkingRecursion = 64;
     if (end - start >= kMinRangeForMarkingRecursion) {
@@ -261,7 +321,7 @@
     for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
   }
 
-  void VisitCodeTarget(RelocInfo* rinfo) {
+  static inline void VisitCodeTarget(RelocInfo* rinfo) {
     ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
     Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
     if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
@@ -273,7 +333,7 @@
     }
   }
 
-  void VisitDebugTarget(RelocInfo* rinfo) {
+  static inline void VisitDebugTarget(RelocInfo* rinfo) {
     ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
             rinfo->IsPatchedReturnSequence()) ||
            (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
@@ -282,19 +342,15 @@
     MarkCompactCollector::MarkObject(code);
   }
 
- private:
   // Mark object pointed to by p.
-  void MarkObjectByPointer(Object** p) {
+  INLINE(static void MarkObjectByPointer(Object** p)) {
     if (!(*p)->IsHeapObject()) return;
     HeapObject* object = ShortCircuitConsString(p);
     MarkCompactCollector::MarkObject(object);
   }
 
-  // Tells whether the mark sweep collection will perform compaction.
-  bool IsCompacting() { return MarkCompactCollector::IsCompacting(); }
-
   // Visit an unmarked object.
-  void VisitUnmarkedObject(HeapObject* obj) {
+  static inline void VisitUnmarkedObject(HeapObject* obj) {
 #ifdef DEBUG
     ASSERT(Heap::Contains(obj));
     ASSERT(!obj->IsMarked());
@@ -303,12 +359,12 @@
     MarkCompactCollector::SetMark(obj);
     // Mark the map pointer and the body.
     MarkCompactCollector::MarkObject(map);
-    obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this);
+    IterateBody(map, obj);
   }
 
   // Visit all unmarked objects pointed to by [start, end).
   // Returns false if the operation fails (lack of stack space).
-  inline bool VisitUnmarkedObjects(Object** start, Object** end) {
+  static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
     // Return false is we are close to the stack limit.
     StackLimitCheck check;
     if (check.HasOverflowed()) return false;
@@ -322,6 +378,60 @@
     }
     return true;
   }
+
+  static inline void VisitExternalReference(Address* p) { }
+  static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
+
+ private:
+  class DataObjectVisitor {
+   public:
+    template<int size>
+    static void VisitSpecialized(Map* map, HeapObject* object) {
+    }
+
+    static void Visit(Map* map, HeapObject* object) {
+    }
+  };
+
+  typedef FlexibleBodyVisitor<StaticMarkingVisitor,
+                              JSObject::BodyDescriptor,
+                              void> JSObjectVisitor;
+
+  typedef FlexibleBodyVisitor<StaticMarkingVisitor,
+                              StructBodyDescriptor,
+                              void> StructObjectVisitor;
+
+  static void VisitCode(Map* map, HeapObject* object) {
+    reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
+  }
+
+  typedef void (*Callback)(Map* map, HeapObject* object);
+
+  static VisitorDispatchTable<Callback> table_;
+};
+
+
+VisitorDispatchTable<StaticMarkingVisitor::Callback>
+  StaticMarkingVisitor::table_;
+
+
+class MarkingVisitor : public ObjectVisitor {
+ public:
+  void VisitPointer(Object** p) {
+    StaticMarkingVisitor::VisitPointer(p);
+  }
+
+  void VisitPointers(Object** start, Object** end) {
+    StaticMarkingVisitor::VisitPointers(start, end);
+  }
+
+  void VisitCodeTarget(RelocInfo* rinfo) {
+    StaticMarkingVisitor::VisitCodeTarget(rinfo);
+  }
+
+  void VisitDebugTarget(RelocInfo* rinfo) {
+    StaticMarkingVisitor::VisitDebugTarget(rinfo);
+  }
 };
 
 
@@ -336,11 +446,7 @@
     for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
   }
 
-  MarkingVisitor* stack_visitor() { return &stack_visitor_; }
-
  private:
-  MarkingVisitor stack_visitor_;
-
   void MarkObjectByPointer(Object** p) {
     if (!(*p)->IsHeapObject()) return;
 
@@ -351,14 +457,14 @@
     Map* map = object->map();
     // Mark the object.
     MarkCompactCollector::SetMark(object);
+
     // Mark the map pointer and body, and push them on the marking stack.
     MarkCompactCollector::MarkObject(map);
-    object->IterateBody(map->instance_type(), object->SizeFromMap(map),
-                        &stack_visitor_);
+    StaticMarkingVisitor::IterateBody(map, object);
 
     // Mark all the objects reachable from the map and body.  May leave
     // overflowed objects in the heap.
-    MarkCompactCollector::EmptyMarkingStack(&stack_visitor_);
+    MarkCompactCollector::EmptyMarkingStack();
   }
 };
 
@@ -425,11 +531,12 @@
   // Mark the Object* fields of the Map.
   // Since the descriptor array has been marked already, it is fine
   // that one of these fields contains a pointer to it.
-  MarkingVisitor visitor;  // Has no state or contents.
-  visitor.VisitPointers(HeapObject::RawField(map,
-                                             Map::kPointerFieldsBeginOffset),
-                        HeapObject::RawField(map,
-                                             Map::kPointerFieldsEndOffset));
+  Object** start_slot = HeapObject::RawField(map,
+                                             Map::kPointerFieldsBeginOffset);
+
+  Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
+
+  StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
 }
 
 
@@ -447,10 +554,11 @@
   ASSERT(contents->IsFixedArray());
   ASSERT(contents->length() >= 2);
   SetMark(contents);
-  // Contents contains (value, details) pairs.  If the details say
-  // that the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
-  // or NULL_DESCRIPTOR, we don't mark the value as live.  Only for
-  // type MAP_TRANSITION is the value a Object* (a Map*).
+  // Contents contains (value, details) pairs.  If the details say that
+  // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or
+  // NULL_DESCRIPTOR, we don't mark the value as live.  Only for
+  // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a
+  // Map*).
   for (int i = 0; i < contents->length(); i += 2) {
     // If the pair (value, details) at index i, i+1 is not
     // a transition or null descriptor, mark the value.
@@ -529,7 +637,7 @@
   // Explicitly mark the prefix.
   MarkingVisitor marker;
   symbol_table->IteratePrefix(&marker);
-  ProcessMarkingStack(&marker);
+  ProcessMarkingStack();
 }
 
 
@@ -544,7 +652,7 @@
   // There may be overflowed objects in the heap.  Visit them now.
   while (marking_stack.overflowed()) {
     RefillMarkingStack();
-    EmptyMarkingStack(visitor->stack_visitor());
+    EmptyMarkingStack();
   }
 }
 
@@ -587,7 +695,7 @@
 // Before: the marking stack contains zero or more heap object pointers.
 // After: the marking stack is empty, and all objects reachable from the
 // marking stack have been marked, or are overflowed in the heap.
-void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) {
+void MarkCompactCollector::EmptyMarkingStack() {
   while (!marking_stack.is_empty()) {
     HeapObject* object = marking_stack.Pop();
     ASSERT(object->IsHeapObject());
@@ -601,8 +709,8 @@
     map_word.ClearMark();
     Map* map = map_word.ToMap();
     MarkObject(map);
-    object->IterateBody(map->instance_type(), object->SizeFromMap(map),
-                        visitor);
+
+    StaticMarkingVisitor::IterateBody(map, object);
   }
 }
 
@@ -652,22 +760,22 @@
 // stack.  Before: the marking stack contains zero or more heap object
 // pointers.  After: the marking stack is empty and there are no overflowed
 // objects in the heap.
-void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) {
-  EmptyMarkingStack(visitor);
+void MarkCompactCollector::ProcessMarkingStack() {
+  EmptyMarkingStack();
   while (marking_stack.overflowed()) {
     RefillMarkingStack();
-    EmptyMarkingStack(visitor);
+    EmptyMarkingStack();
   }
 }
 
 
-void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) {
+void MarkCompactCollector::ProcessObjectGroups() {
   bool work_to_do = true;
   ASSERT(marking_stack.is_empty());
   while (work_to_do) {
     MarkObjectGroups();
     work_to_do = !marking_stack.is_empty();
-    ProcessMarkingStack(visitor);
+    ProcessMarkingStack();
   }
 }
 
@@ -692,7 +800,7 @@
   // objects are unmarked.  Mark objects reachable from object groups
   // containing at least one marked object, and continue until no new
   // objects are reachable from the object groups.
-  ProcessObjectGroups(root_visitor.stack_visitor());
+  ProcessObjectGroups();
 
   // The objects reachable from the roots or object groups are marked,
   // yet unreachable objects are unmarked.  Mark objects reachable
@@ -705,12 +813,12 @@
   GlobalHandles::IterateWeakRoots(&root_visitor);
   while (marking_stack.overflowed()) {
     RefillMarkingStack();
-    EmptyMarkingStack(root_visitor.stack_visitor());
+    EmptyMarkingStack();
   }
 
   // Repeat the object groups to mark unmarked groups reachable from the
   // weak roots.
-  ProcessObjectGroups(root_visitor.stack_visitor());
+  ProcessObjectGroups();
 
   // Prune the symbol table removing all symbols only pointed to by the
   // symbol table.  Cannot use symbol_table() here because the symbol
@@ -1091,16 +1199,35 @@
 }
 
 
+class StaticPointersToNewGenUpdatingVisitor : public
+  StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
+ public:
+  static inline void VisitPointer(Object** p) {
+    if (!(*p)->IsHeapObject()) return;
+
+    HeapObject* obj = HeapObject::cast(*p);
+    Address old_addr = obj->address();
+
+    if (Heap::new_space()->Contains(obj)) {
+      ASSERT(Heap::InFromSpace(*p));
+      *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
+    }
+  }
+};
+
+
 // Visitor for updating pointers from live objects in old spaces to new space.
 // It does not expect to encounter pointers to dead objects.
 class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
  public:
   void VisitPointer(Object** p) {
-    UpdatePointer(p);
+    StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
   }
 
   void VisitPointers(Object** start, Object** end) {
-    for (Object** p = start; p < end; p++) UpdatePointer(p);
+    for (Object** p = start; p < end; p++) {
+      StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
+    }
   }
 
   void VisitCodeTarget(RelocInfo* rinfo) {
@@ -1119,19 +1246,6 @@
     VisitPointer(&target);
     rinfo->set_call_address(Code::cast(target)->instruction_start());
   }
-
- private:
-  void UpdatePointer(Object** p) {
-    if (!(*p)->IsHeapObject()) return;
-
-    HeapObject* obj = HeapObject::cast(*p);
-    Address old_addr = obj->address();
-
-    if (Heap::new_space()->Contains(obj)) {
-      ASSERT(Heap::InFromSpace(*p));
-      *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
-    }
-  }
 };
 
 
@@ -1248,15 +1362,12 @@
   PointersToNewGenUpdatingVisitor updating_visitor;
 
   // Update pointers in to space.
-  HeapObject* object;
-  for (Address current = space->bottom();
-       current < space->top();
-       current += object->Size()) {
-    object = HeapObject::FromAddress(current);
-
-    object->IterateBody(object->map()->instance_type(),
-                        object->Size(),
-                        &updating_visitor);
+  Address current = space->bottom();
+  while (current < space->top()) {
+    HeapObject* object = HeapObject::FromAddress(current);
+    current +=
+        StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
+                                                           object);
   }
 
   // Update roots.
@@ -1758,7 +1869,9 @@
   SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
   SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
   SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
-  SweepNewSpace(Heap::new_space());
+  { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
+    SweepNewSpace(Heap::new_space());
+  }
   SweepSpace(Heap::map_space(), &DeallocateMapBlock);
 
   Heap::IterateDirtyRegions(Heap::map_space(),
@@ -2327,4 +2440,11 @@
 #endif
 }
 
+
+void MarkCompactCollector::Initialize() {
+  StaticPointersToNewGenUpdatingVisitor::Initialize();
+  StaticMarkingVisitor::Initialize();
+}
+
+
 } }  // namespace v8::internal
diff --git a/src/mark-compact.h b/src/mark-compact.h
index 1d289a7..ad63586 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -86,6 +86,9 @@
     force_compaction_ = value;
   }
 
+
+  static void Initialize();
+
   // Prepares for GC by resetting relocation info in old and map spaces and
   // choosing spaces to compact.
   static void Prepare(GCTracer* tracer);
@@ -171,6 +174,7 @@
 
   friend class RootMarkingVisitor;
   friend class MarkingVisitor;
+  friend class StaticMarkingVisitor;
 
   // Marking operations for objects reachable from roots.
   static void MarkLiveObjects();
@@ -214,17 +218,17 @@
   // Mark all objects in an object group with at least one marked
   // object, then all objects reachable from marked objects in object
   // groups, and repeat.
-  static void ProcessObjectGroups(MarkingVisitor* visitor);
+  static void ProcessObjectGroups();
 
   // Mark objects reachable (transitively) from objects in the marking stack
   // or overflowed in the heap.
-  static void ProcessMarkingStack(MarkingVisitor* visitor);
+  static void ProcessMarkingStack();
 
   // Mark objects reachable (transitively) from objects in the marking
   // stack.  This function empties the marking stack, but may leave
   // overflowed objects in the heap, in which case the marking stack's
   // overflow flag will be set.
-  static void EmptyMarkingStack(MarkingVisitor* visitor);
+  static void EmptyMarkingStack();
 
   // Refill the marking stack with overflowed objects from the heap.  This
   // function either leaves the marking stack full or clears the overflow
diff --git a/src/messages.js b/src/messages.js
index b0f8aa1..0375e8a 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -707,14 +707,20 @@
   // See if we can find a unique property on the receiver that holds
   // this function.
   var ownName = this.fun.name;
-  if (ownName && this.receiver && this.receiver[ownName] === this.fun)
+  if (ownName && this.receiver &&
+      (ObjectLookupGetter.call(this.receiver, ownName) === this.fun ||
+       ObjectLookupSetter.call(this.receiver, ownName) === this.fun ||
+       this.receiver[ownName] === this.fun)) {
     // To handle DontEnum properties we guess that the method has
     // the same name as the function.
     return ownName;
+  }
   var name = null;
   for (var prop in this.receiver) {
-    if (this.receiver[prop] === this.fun) {
-      // If we find more than one match bail out to avoid confusion
+    if (this.receiver.__lookupGetter__(prop) === this.fun ||
+        this.receiver.__lookupSetter__(prop) === this.fun ||
+        (!this.receiver.__lookupGetter__(prop) && this.receiver[prop] === this.fun)) {
+      // If we find more than one match bail out to avoid confusion.
       if (name)
         return null;
       name = prop;
diff --git a/src/mips/debug-mips.cc b/src/mips/debug-mips.cc
index 47961fa..b8ae68e 100644
--- a/src/mips/debug-mips.cc
+++ b/src/mips/debug-mips.cc
@@ -114,17 +114,12 @@
   masm->Abort("LiveEdit frame dropping is not supported on mips");
 }
 
+
+const bool Debug::kFrameDropperSupported = false;
+
 #undef __
 
 
-Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
-                                       Handle<Code> code) {
-  UNREACHABLE();
-  return NULL;
-}
-const int Debug::kFrameDropperFrameSize = -1;
-
-
 #endif  // ENABLE_DEBUGGER_SUPPORT
 
 } }  // namespace v8::internal
diff --git a/src/objects-inl.h b/src/objects-inl.h
index c81f4ab..5e8022e 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -1493,6 +1493,16 @@
 }
 
 
+int DescriptorArray::SearchWithCache(String* name) {
+  int number = DescriptorLookupCache::Lookup(this, name);
+  if (number == DescriptorLookupCache::kAbsent) {
+    number = Search(name);
+    DescriptorLookupCache::Update(this, name, number);
+  }
+  return number;
+}
+
+
 String* DescriptorArray::GetKey(int descriptor_number) {
   ASSERT(descriptor_number < number_of_descriptors());
   return String::cast(get(ToKeyIndex(descriptor_number)));
@@ -2060,21 +2070,8 @@
   ptr[index] = value;
 }
 
-inline Scavenger Map::scavenger() {
-  Scavenger callback = reinterpret_cast<Scavenger>(
-      READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
 
-  ASSERT(callback == Heap::GetScavenger(instance_type(),
-                                        instance_size()));
-
-  return callback;
-}
-
-inline void Map::set_scavenger(Scavenger callback) {
-  WRITE_INTPTR_FIELD(this,
-                     kScavengerCallbackOffset,
-                     reinterpret_cast<intptr_t>(callback));
-}
+INT_ACCESSORS(Map, visitor_id, kScavengerCallbackOffset)
 
 int Map::instance_size() {
   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
@@ -2099,7 +2096,7 @@
       (kStringTag | kConsStringTag) ||
       instance_type == JS_ARRAY_TYPE) return map->instance_size();
   if (instance_type == FIXED_ARRAY_TYPE) {
-    return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
+    return FixedArray::BodyDescriptor::SizeOf(map, this);
   }
   if (instance_type == BYTE_ARRAY_TYPE) {
     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
@@ -2661,8 +2658,7 @@
 
 
 bool SharedFunctionInfo::is_compiled() {
-  // TODO(1242782): Create a code kind for uncompiled code.
-  return code()->kind() != Code::STUB;
+  return code() != Builtins::builtin(Builtins::LazyCompile);
 }
 
 
@@ -2773,7 +2769,7 @@
 
 
 bool JSFunction::is_compiled() {
-  return code()->kind() != Code::STUB;
+  return code() != Builtins::builtin(Builtins::LazyCompile);
 }
 
 
@@ -2820,12 +2816,6 @@
 }
 
 
-void Proxy::ProxyIterateBody(ObjectVisitor* visitor) {
-  visitor->VisitExternalReference(
-      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
-}
-
-
 ACCESSORS(JSValue, value, Object, kValueOffset)
 
 
@@ -3309,6 +3299,74 @@
 }
 
 
+int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
+  return map->instance_size();
+}
+
+
+void Proxy::ProxyIterateBody(ObjectVisitor* v) {
+  v->VisitExternalReference(
+      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
+}
+
+
+template<typename StaticVisitor>
+void Proxy::ProxyIterateBody() {
+  StaticVisitor::VisitExternalReference(
+      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
+}
+
+
+void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
+  typedef v8::String::ExternalAsciiStringResource Resource;
+  v->VisitExternalAsciiString(
+      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+
+template<typename StaticVisitor>
+void ExternalAsciiString::ExternalAsciiStringIterateBody() {
+  typedef v8::String::ExternalAsciiStringResource Resource;
+  StaticVisitor::VisitExternalAsciiString(
+      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+
+void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
+  typedef v8::String::ExternalStringResource Resource;
+  v->VisitExternalTwoByteString(
+      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+
+template<typename StaticVisitor>
+void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
+  typedef v8::String::ExternalStringResource Resource;
+  StaticVisitor::VisitExternalTwoByteString(
+      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+#define SLOT_ADDR(obj, offset) \
+  reinterpret_cast<Object**>((obj)->address() + offset)
+
+template<int start_offset, int end_offset, int size>
+void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
+    HeapObject* obj,
+    ObjectVisitor* v) {
+    v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
+}
+
+
+template<int start_offset>
+void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
+                                                       int object_size,
+                                                       ObjectVisitor* v) {
+  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
+}
+
+#undef SLOT_ADDR
+
+
 #undef CAST_ACCESSOR
 #undef INT_ACCESSORS
 #undef SMI_ACCESSORS
diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc
new file mode 100644
index 0000000..293c9bf
--- /dev/null
+++ b/src/objects-visiting.cc
@@ -0,0 +1,139 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "ic-inl.h"
+#include "objects-visiting.h"
+
+namespace v8 {
+namespace internal {
+
+
+static inline bool IsShortcutCandidate(int type) {
+  return ((type & kShortcutTypeMask) == kShortcutTypeTag);
+}
+
+
+StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
+    int instance_type,
+    int instance_size) {
+  if (instance_type < FIRST_NONSTRING_TYPE) {
+    switch (instance_type & kStringRepresentationMask) {
+      case kSeqStringTag:
+        if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
+          return kVisitSeqAsciiString;
+        } else {
+          return kVisitSeqTwoByteString;
+        }
+
+      case kConsStringTag:
+        if (IsShortcutCandidate(instance_type)) {
+          return kVisitShortcutCandidate;
+        } else {
+          return kVisitConsString;
+        }
+
+      case kExternalStringTag:
+        return GetVisitorIdForSize(kVisitDataObject,
+                                   kVisitDataObjectGeneric,
+                                   ExternalString::kSize);
+    }
+    UNREACHABLE();
+  }
+
+  switch (instance_type) {
+    case BYTE_ARRAY_TYPE:
+      return kVisitByteArray;
+
+    case FIXED_ARRAY_TYPE:
+      return kVisitFixedArray;
+
+    case ODDBALL_TYPE:
+      return kVisitOddball;
+
+    case MAP_TYPE:
+      return kVisitMap;
+
+    case CODE_TYPE:
+      return kVisitCode;
+
+    case JS_GLOBAL_PROPERTY_CELL_TYPE:
+      return kVisitPropertyCell;
+
+    case SHARED_FUNCTION_INFO_TYPE:
+      return kVisitSharedFunctionInfo;
+
+    case PROXY_TYPE:
+      return GetVisitorIdForSize(kVisitDataObject,
+                                 kVisitDataObjectGeneric,
+                                 Proxy::kSize);
+
+    case FILLER_TYPE:
+      return kVisitDataObjectGeneric;
+
+    case JS_OBJECT_TYPE:
+    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+    case JS_VALUE_TYPE:
+    case JS_ARRAY_TYPE:
+    case JS_REGEXP_TYPE:
+    case JS_FUNCTION_TYPE:
+    case JS_GLOBAL_PROXY_TYPE:
+    case JS_GLOBAL_OBJECT_TYPE:
+    case JS_BUILTINS_OBJECT_TYPE:
+      return GetVisitorIdForSize(kVisitJSObject,
+                                 kVisitJSObjectGeneric,
+                                 instance_size);
+
+    case HEAP_NUMBER_TYPE:
+    case PIXEL_ARRAY_TYPE:
+    case EXTERNAL_BYTE_ARRAY_TYPE:
+    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
+    case EXTERNAL_SHORT_ARRAY_TYPE:
+    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
+    case EXTERNAL_INT_ARRAY_TYPE:
+    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
+    case EXTERNAL_FLOAT_ARRAY_TYPE:
+      return GetVisitorIdForSize(kVisitDataObject,
+                                 kVisitDataObjectGeneric,
+                                 instance_size);
+
+#define MAKE_STRUCT_CASE(NAME, Name, name) \
+        case NAME##_TYPE:
+      STRUCT_LIST(MAKE_STRUCT_CASE)
+#undef MAKE_STRUCT_CASE
+          return GetVisitorIdForSize(kVisitStruct,
+                                     kVisitStructGeneric,
+                                     instance_size);
+
+    default:
+      UNREACHABLE();
+      return kVisitorIdCount;
+  }
+}
+
+} }  // namespace v8::internal
diff --git a/src/objects-visiting.h b/src/objects-visiting.h
new file mode 100644
index 0000000..6280bac
--- /dev/null
+++ b/src/objects-visiting.h
@@ -0,0 +1,382 @@
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_OBJECTS_ITERATION_H_
+#define V8_OBJECTS_ITERATION_H_
+
+// This file provides base classes and auxiliary methods for defining
+// static object visitors used during GC.
+// Visiting HeapObject body with a normal ObjectVisitor requires performing
+// two switches on object's instance type to determine object size and layout
+// and one or more virtual method calls on visitor itself.
+// Static visitor is different: it provides a dispatch table which contains
+// pointers to specialized visit functions. Each map has the visitor_id
+// field which contains an index of specialized visitor to use.
+
+namespace v8 {
+namespace internal {
+
+
+// Base class for all static visitors.
+class StaticVisitorBase : public AllStatic {
+ public:
+  enum VisitorId {
+    kVisitSeqAsciiString = 0,
+    kVisitSeqTwoByteString,
+    kVisitShortcutCandidate,
+    kVisitByteArray,
+    kVisitFixedArray,
+
+    // For data objects, JS objects and structs along with generic visitor which
+    // can visit object of any size we provide visitors specialized by
+    // object size in words.
+    // Ids of specialized visitors are declared in a linear order (without
+    // holes) starting from the id of visitor specialized for 2 words objects
+    // (base visitor id) and ending with the id of generic visitor.
+    // Method GetVisitorIdForSize depends on this ordering to calculate visitor
+    // id of specialized visitor from given instance size, base visitor id and
+    // generic visitor's id.
+
+    kVisitDataObject,
+    kVisitDataObject2 = kVisitDataObject,
+    kVisitDataObject3,
+    kVisitDataObject4,
+    kVisitDataObject5,
+    kVisitDataObject6,
+    kVisitDataObject7,
+    kVisitDataObject8,
+    kVisitDataObject9,
+    kVisitDataObjectGeneric,
+
+    kVisitJSObject,
+    kVisitJSObject2 = kVisitJSObject,
+    kVisitJSObject3,
+    kVisitJSObject4,
+    kVisitJSObject5,
+    kVisitJSObject6,
+    kVisitJSObject7,
+    kVisitJSObject8,
+    kVisitJSObject9,
+    kVisitJSObjectGeneric,
+
+    kVisitStruct,
+    kVisitStruct2 = kVisitStruct,
+    kVisitStruct3,
+    kVisitStruct4,
+    kVisitStruct5,
+    kVisitStruct6,
+    kVisitStruct7,
+    kVisitStruct8,
+    kVisitStruct9,
+    kVisitStructGeneric,
+
+    kVisitConsString,
+    kVisitOddball,
+    kVisitCode,
+    kVisitMap,
+    kVisitPropertyCell,
+    kVisitSharedFunctionInfo,
+
+    kVisitorIdCount,
+    kMinObjectSizeInWords = 2
+  };
+
+  // Determine which specialized visitor should be used for given instance type
+  // and instance type.
+  static VisitorId GetVisitorId(int instance_type, int instance_size);
+
+  static VisitorId GetVisitorId(Map* map) {
+    return GetVisitorId(map->instance_type(), map->instance_size());
+  }
+
+  // For visitors that allow specialization by size calculate VisitorId based
+  // on size, base visitor id and generic visitor id.
+  static VisitorId GetVisitorIdForSize(VisitorId base,
+                                       VisitorId generic,
+                                       int object_size) {
+    ASSERT((base == kVisitDataObject) ||
+           (base == kVisitStruct) ||
+           (base == kVisitJSObject));
+    ASSERT(IsAligned(object_size, kPointerSize));
+    ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
+    ASSERT(object_size < Page::kMaxHeapObjectSize);
+
+    const VisitorId specialization = static_cast<VisitorId>(
+        base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
+
+    return Min(specialization, generic);
+  }
+};
+
+
+template<typename Callback>
+class VisitorDispatchTable {
+ public:
+  inline Callback GetVisitor(Map* map) {
+    return callbacks_[map->visitor_id()];
+  }
+
+  void Register(StaticVisitorBase::VisitorId id, Callback callback) {
+    ASSERT((0 <= id) && (id < StaticVisitorBase::kVisitorIdCount));
+    callbacks_[id] = callback;
+  }
+
+  template<typename Visitor,
+           StaticVisitorBase::VisitorId base,
+           StaticVisitorBase::VisitorId generic,
+           int object_size_in_words>
+  void RegisterSpecialization() {
+    static const int size = object_size_in_words * kPointerSize;
+    Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
+             &Visitor::template VisitSpecialized<size>);
+  }
+
+
+  template<typename Visitor,
+           StaticVisitorBase::VisitorId base,
+           StaticVisitorBase::VisitorId generic>
+  void RegisterSpecializations() {
+    STATIC_ASSERT(
+        (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
+    RegisterSpecialization<Visitor, base, generic, 2>();
+    RegisterSpecialization<Visitor, base, generic, 3>();
+    RegisterSpecialization<Visitor, base, generic, 4>();
+    RegisterSpecialization<Visitor, base, generic, 5>();
+    RegisterSpecialization<Visitor, base, generic, 6>();
+    RegisterSpecialization<Visitor, base, generic, 7>();
+    RegisterSpecialization<Visitor, base, generic, 8>();
+    RegisterSpecialization<Visitor, base, generic, 9>();
+    Register(generic, &Visitor::Visit);
+  }
+
+ private:
+  Callback callbacks_[StaticVisitorBase::kVisitorIdCount];
+};
+
+
+template<typename StaticVisitor>
+class BodyVisitorBase : public AllStatic {
+ public:
+  static inline void IteratePointers(HeapObject* object,
+                                     int start_offset,
+                                     int end_offset) {
+    Object** start_slot = reinterpret_cast<Object**>(object->address() +
+                                                     start_offset);
+    Object** end_slot = reinterpret_cast<Object**>(object->address() +
+                                                   end_offset);
+    StaticVisitor::VisitPointers(start_slot, end_slot);
+  }
+};
+
+
+template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
+class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
+ public:
+  static inline ReturnType Visit(Map* map, HeapObject* object) {
+    int object_size = BodyDescriptor::SizeOf(map, object);
+    IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
+    return static_cast<ReturnType>(object_size);
+  }
+
+  template<int object_size>
+  static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
+    IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
+    return static_cast<ReturnType>(object_size);
+  }
+};
+
+
+template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
+class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
+ public:
+  static inline ReturnType Visit(Map* map, HeapObject* object) {
+    IteratePointers(object,
+                    BodyDescriptor::kStartOffset,
+                    BodyDescriptor::kEndOffset);
+    return static_cast<ReturnType>(BodyDescriptor::kSize);
+  }
+};
+
+
+// Base class for visitors used for a linear new space iteration.
+// IterateBody returns size of visited object.
+// Certain types of objects (i.e. Code objects) are not handled
+// by dispatch table of this visitor because they cannot appear
+// in the new space.
+//
+// This class is intended to be used in the following way:
+//
+//   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
+//     ...
+//   }
+//
+// This is an example of Curiously recurring template pattern
+// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
+// We use CRTP to guarantee aggressive compile time optimizations (i.e.
+// inlining and specialization of StaticVisitor::VisitPointers methods).
+template<typename StaticVisitor>
+class StaticNewSpaceVisitor : public StaticVisitorBase {
+ public:
+  static void Initialize() {
+    table_.Register(kVisitShortcutCandidate,
+                    &FixedBodyVisitor<StaticVisitor,
+                                      ConsString::BodyDescriptor,
+                                      int>::Visit);
+
+    table_.Register(kVisitConsString,
+                    &FixedBodyVisitor<StaticVisitor,
+                                      ConsString::BodyDescriptor,
+                                      int>::Visit);
+
+    table_.Register(kVisitFixedArray,
+                    &FlexibleBodyVisitor<StaticVisitor,
+                                         FixedArray::BodyDescriptor,
+                                         int>::Visit);
+
+    table_.Register(kVisitByteArray, &VisitByteArray);
+
+    table_.Register(kVisitSharedFunctionInfo,
+                    &FixedBodyVisitor<StaticVisitor,
+                                      SharedFunctionInfo::BodyDescriptor,
+                                      int>::Visit);
+
+    table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
+
+    table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
+
+    table_.RegisterSpecializations<DataObjectVisitor,
+                                   kVisitDataObject,
+                                   kVisitDataObjectGeneric>();
+    table_.RegisterSpecializations<JSObjectVisitor,
+                                   kVisitJSObject,
+                                   kVisitJSObjectGeneric>();
+    table_.RegisterSpecializations<StructVisitor,
+        kVisitStruct,
+        kVisitStructGeneric>();
+  }
+
+  static inline int IterateBody(Map* map, HeapObject* obj) {
+    return table_.GetVisitor(map)(map, obj);
+  }
+
+  static inline void VisitPointers(Object** start, Object** end) {
+    for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p);
+  }
+
+ private:
+  static inline int VisitByteArray(Map* map, HeapObject* object) {
+    return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
+  }
+
+  static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
+    return SeqAsciiString::cast(object)->
+        SeqAsciiStringSize(map->instance_type());
+  }
+
+  static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
+    return SeqTwoByteString::cast(object)->
+        SeqTwoByteStringSize(map->instance_type());
+  }
+
+  class DataObjectVisitor {
+   public:
+    template<int object_size>
+    static inline int VisitSpecialized(Map* map, HeapObject* object) {
+      return object_size;
+    }
+
+    static inline int Visit(Map* map, HeapObject* object) {
+      return map->instance_size();
+    }
+  };
+
+  typedef FlexibleBodyVisitor<StaticVisitor,
+                              StructBodyDescriptor,
+                              int> StructVisitor;
+
+  typedef FlexibleBodyVisitor<StaticVisitor,
+                              JSObject::BodyDescriptor,
+                              int> JSObjectVisitor;
+
+  typedef int (*Callback)(Map* map, HeapObject* object);
+
+  static VisitorDispatchTable<Callback> table_;
+};
+
+
+template<typename StaticVisitor>
+VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
+  StaticNewSpaceVisitor<StaticVisitor>::table_;
+
+
+void Code::CodeIterateBody(ObjectVisitor* v) {
+  int mode_mask = RelocInfo::kCodeTargetMask |
+                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+                  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+                  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+                  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+
+  // Use the relocation info pointer before it is visited by
+  // the heap compaction in the next statement.
+  RelocIterator it(this, mode_mask);
+
+  IteratePointers(v,
+                  kRelocationInfoOffset,
+                  kRelocationInfoOffset + kPointerSize);
+
+  for (; !it.done(); it.next()) {
+    it.rinfo()->Visit(v);
+  }
+}
+
+
+template<typename StaticVisitor>
+void Code::CodeIterateBody() {
+  int mode_mask = RelocInfo::kCodeTargetMask |
+                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+                  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+                  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+                  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+
+  // Use the relocation info pointer before it is visited by
+  // the heap compaction in the next statement.
+  RelocIterator it(this, mode_mask);
+
+  StaticVisitor::VisitPointer(
+      reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
+
+  for (; !it.done(); it.next()) {
+    it.rinfo()->template Visit<StaticVisitor>();
+  }
+}
+
+
+} }  // namespace v8::internal
+
+#endif  // V8_OBJECTS_ITERATION_H_
diff --git a/src/objects.cc b/src/objects.cc
index aabb041..5687a3a 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -33,6 +33,7 @@
 #include "debug.h"
 #include "execution.h"
 #include "objects-inl.h"
+#include "objects-visiting.h"
 #include "macro-assembler.h"
 #include "scanner.h"
 #include "scopeinfo.h"
@@ -1042,7 +1043,7 @@
 
   switch (instance_type) {
     case FIXED_ARRAY_TYPE:
-      return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
+      return FixedArray::BodyDescriptor::SizeOf(map, this);
     case BYTE_ARRAY_TYPE:
       return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
     case CODE_TYPE:
@@ -1073,7 +1074,7 @@
       case kSeqStringTag:
         break;
       case kConsStringTag:
-        reinterpret_cast<ConsString*>(this)->ConsStringIterateBody(v);
+        ConsString::BodyDescriptor::IterateBody(this, v);
         break;
       case kExternalStringTag:
         if ((type & kStringEncodingMask) == kAsciiStringTag) {
@@ -1090,7 +1091,7 @@
 
   switch (type) {
     case FIXED_ARRAY_TYPE:
-      reinterpret_cast<FixedArray*>(this)->FixedArrayIterateBody(v);
+      FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
       break;
     case JS_OBJECT_TYPE:
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
@@ -1101,23 +1102,22 @@
     case JS_GLOBAL_PROXY_TYPE:
     case JS_GLOBAL_OBJECT_TYPE:
     case JS_BUILTINS_OBJECT_TYPE:
-      reinterpret_cast<JSObject*>(this)->JSObjectIterateBody(object_size, v);
+      JSObject::BodyDescriptor::IterateBody(this, object_size, v);
       break;
     case ODDBALL_TYPE:
-      reinterpret_cast<Oddball*>(this)->OddballIterateBody(v);
+      Oddball::BodyDescriptor::IterateBody(this, v);
       break;
     case PROXY_TYPE:
       reinterpret_cast<Proxy*>(this)->ProxyIterateBody(v);
       break;
     case MAP_TYPE:
-      reinterpret_cast<Map*>(this)->MapIterateBody(v);
+      Map::BodyDescriptor::IterateBody(this, v);
       break;
     case CODE_TYPE:
       reinterpret_cast<Code*>(this)->CodeIterateBody(v);
       break;
     case JS_GLOBAL_PROPERTY_CELL_TYPE:
-      reinterpret_cast<JSGlobalPropertyCell*>(this)
-          ->JSGlobalPropertyCellIterateBody(v);
+      JSGlobalPropertyCell::BodyDescriptor::IterateBody(this, v);
       break;
     case HEAP_NUMBER_TYPE:
     case FILLER_TYPE:
@@ -1131,16 +1131,15 @@
     case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
     case EXTERNAL_FLOAT_ARRAY_TYPE:
       break;
-    case SHARED_FUNCTION_INFO_TYPE: {
-      SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(this);
-      shared->SharedFunctionInfoIterateBody(v);
+    case SHARED_FUNCTION_INFO_TYPE:
+      SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
       break;
-    }
+
 #define MAKE_STRUCT_CASE(NAME, Name, name) \
         case NAME##_TYPE:
       STRUCT_LIST(MAKE_STRUCT_CASE)
 #undef MAKE_STRUCT_CASE
-      IterateStructBody(object_size, v);
+      StructBodyDescriptor::IterateBody(this, object_size, v);
       break;
     default:
       PrintF("Unknown type: %d\n", type);
@@ -1156,11 +1155,23 @@
 
 Object* HeapNumber::HeapNumberToBoolean() {
   // NaN, +0, and -0 should return the false object
-  switch (fpclassify(value())) {
-    case FP_NAN:  // fall through
-    case FP_ZERO: return Heap::false_value();
-    default: return Heap::true_value();
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+  union IeeeDoubleLittleEndianArchType u;
+#elif __BYTE_ORDER == __BIG_ENDIAN
+  union IeeeDoubleBigEndianArchType u;
+#endif
+  u.d = value();
+  if (u.bits.exp == 2047) {
+    // Detect NaN for IEEE double precision floating point.
+    if ((u.bits.man_low | u.bits.man_high) != 0)
+      return Heap::false_value();
   }
+  if (u.bits.exp == 0) {
+    // Detect +0, and -0 for IEEE double precision floating point.
+    if ((u.bits.man_low | u.bits.man_high) == 0)
+      return Heap::false_value();
+  }
+  return Heap::true_value();
 }
 
 
@@ -1209,12 +1220,6 @@
 }
 
 
-void JSObject::JSObjectIterateBody(int object_size, ObjectVisitor* v) {
-  // Iterate over all fields in the body. Assumes all are Object*.
-  IteratePointers(v, kPropertiesOffset, object_size);
-}
-
-
 Object* JSObject::AddFastPropertyUsingMap(Map* new_map,
                                           String* name,
                                           Object* value) {
@@ -1337,7 +1342,7 @@
   if (attributes != NONE) {
     return function;
   }
-  ConstTransitionDescriptor mark(name);
+  ConstTransitionDescriptor mark(name, Map::cast(new_map));
   new_descriptors =
       old_map->instance_descriptors()->CopyInsert(&mark, KEEP_TRANSITIONS);
   if (new_descriptors->IsFailure()) {
@@ -1695,11 +1700,7 @@
 
 void JSObject::LookupInDescriptor(String* name, LookupResult* result) {
   DescriptorArray* descriptors = map()->instance_descriptors();
-  int number = DescriptorLookupCache::Lookup(descriptors, name);
-  if (number == DescriptorLookupCache::kAbsent) {
-    number = descriptors->Search(name);
-    DescriptorLookupCache::Update(descriptors, name, number);
-  }
+  int number = descriptors->SearchWithCache(name);
   if (number != DescriptorArray::kNotFound) {
     result->DescriptorResult(this, descriptors->GetDetails(number), number);
   } else {
@@ -1817,8 +1818,10 @@
     }
   }
 
+  HandleScope scope;
+  Handle<Object> value_handle(value);
   Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
-  return value;
+  return *value_handle;
 }
 
 
@@ -1896,10 +1899,25 @@
                                      result->holder());
     case INTERCEPTOR:
       return SetPropertyWithInterceptor(name, value, attributes);
-    case CONSTANT_TRANSITION:
-      // Replace with a MAP_TRANSITION to a new map with a FIELD, even
-      // if the value is a function.
+    case CONSTANT_TRANSITION: {
+      // If the same constant function is being added we can simply
+      // transition to the target map.
+      Map* target_map = result->GetTransitionMap();
+      DescriptorArray* target_descriptors = target_map->instance_descriptors();
+      int number = target_descriptors->SearchWithCache(name);
+      ASSERT(number != DescriptorArray::kNotFound);
+      ASSERT(target_descriptors->GetType(number) == CONSTANT_FUNCTION);
+      JSFunction* function =
+          JSFunction::cast(target_descriptors->GetValue(number));
+      ASSERT(!Heap::InNewSpace(function));
+      if (value == function) {
+        set_map(target_map);
+        return value;
+      }
+      // Otherwise, replace with a MAP_TRANSITION to a new map with a
+      // FIELD, even if the value is a constant function.
       return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
+    }
     case NULL_DESCRIPTOR:
       return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
     default:
@@ -2190,8 +2208,7 @@
     int new_instance_size = map()->instance_size() - instance_size_delta;
     new_map->set_inobject_properties(0);
     new_map->set_instance_size(new_instance_size);
-    new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
-                                              new_map->instance_size()));
+    new_map->set_visitor_id(StaticVisitorBase::GetVisitorId(new_map));
     Heap::CreateFillerObjectAt(this->address() + new_instance_size,
                                instance_size_delta);
   }
@@ -3407,11 +3424,6 @@
 }
 
 
-void FixedArray::FixedArrayIterateBody(ObjectVisitor* v) {
-  IteratePointers(v, kHeaderSize, kHeaderSize + length() * kPointerSize);
-}
-
-
 static bool HasKey(FixedArray* array, Object* key) {
   int len0 = array->length();
   for (int i = 0; i < len0; i++) {
@@ -4501,16 +4513,6 @@
 }
 
 
-void ConsString::ConsStringIterateBody(ObjectVisitor* v) {
-  IteratePointers(v, kFirstOffset, kSecondOffset + kPointerSize);
-}
-
-
-void JSGlobalPropertyCell::JSGlobalPropertyCellIterateBody(ObjectVisitor* v) {
-  IteratePointers(v, kValueOffset, kValueOffset + kPointerSize);
-}
-
-
 uint16_t ConsString::ConsStringGet(int index) {
   ASSERT(index >= 0 && index < this->length());
 
@@ -4614,24 +4616,6 @@
 }
 
 
-#define FIELD_ADDR(p, offset) \
-  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
-
-void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
-  typedef v8::String::ExternalAsciiStringResource Resource;
-  v->VisitExternalAsciiString(
-      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-
-void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
-  typedef v8::String::ExternalStringResource Resource;
-  v->VisitExternalTwoByteString(
-      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
-}
-
-#undef FIELD_ADDR
-
 template <typename IteratorA, typename IteratorB>
 static inline bool CompareStringContents(IteratorA* ia, IteratorB* ib) {
   // General slow case check.  We know that the ia and ib iterators
@@ -4977,7 +4961,8 @@
 void Map::CreateBackPointers() {
   DescriptorArray* descriptors = instance_descriptors();
   for (int i = 0; i < descriptors->number_of_descriptors(); i++) {
-    if (descriptors->GetType(i) == MAP_TRANSITION) {
+    if (descriptors->GetType(i) == MAP_TRANSITION ||
+        descriptors->GetType(i) == CONSTANT_TRANSITION) {
       // Get target.
       Map* target = Map::cast(descriptors->GetValue(i));
 #ifdef DEBUG
@@ -5018,7 +5003,8 @@
     // map is not reached again by following a back pointer from a
     // non-live object.
     PropertyDetails details(Smi::cast(contents->get(i + 1)));
-    if (details.type() == MAP_TRANSITION) {
+    if (details.type() == MAP_TRANSITION ||
+        details.type() == CONSTANT_TRANSITION) {
       Map* target = reinterpret_cast<Map*>(contents->get(i));
       ASSERT(target->IsHeapObject());
       if (!target->IsMarked()) {
@@ -5035,12 +5021,6 @@
 }
 
 
-void Map::MapIterateBody(ObjectVisitor* v) {
-  // Assumes all Object* members are contiguously allocated!
-  IteratePointers(v, kPointerFieldsBeginOffset, kPointerFieldsEndOffset);
-}
-
-
 Object* JSFunction::SetInstancePrototype(Object* value) {
   ASSERT(value->IsJSObject());
 
@@ -5104,12 +5084,6 @@
 }
 
 
-void Oddball::OddballIterateBody(ObjectVisitor* v) {
-  // Assumes all Object* members are contiguously allocated!
-  IteratePointers(v, kToStringOffset, kToNumberOffset + kPointerSize);
-}
-
-
 Object* Oddball::Initialize(const char* to_string, Object* to_number) {
   Object* symbol = Heap::LookupAsciiSymbol(to_string);
   if (symbol->IsFailure()) return symbol;
@@ -5282,13 +5256,6 @@
 }
 
 
-void SharedFunctionInfo::SharedFunctionInfoIterateBody(ObjectVisitor* v) {
-  IteratePointers(v,
-                  kNameOffset,
-                  kThisPropertyAssignmentsOffset + kPointerSize);
-}
-
-
 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
   ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
   Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
@@ -5310,28 +5277,6 @@
 }
 
 
-void Code::CodeIterateBody(ObjectVisitor* v) {
-  int mode_mask = RelocInfo::kCodeTargetMask |
-                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
-                  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
-                  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
-                  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
-                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
-
-  // Use the relocation info pointer before it is visited by
-  // the heap compaction in the next statement.
-  RelocIterator it(this, mode_mask);
-
-  IteratePointers(v,
-                  kRelocationInfoOffset,
-                  kRelocationInfoOffset + kPointerSize);
-
-  for (; !it.done(); it.next()) {
-    it.rinfo()->Visit(v);
-  }
-}
-
-
 void Code::Relocate(intptr_t delta) {
   for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
     it.rinfo()->apply(delta);
@@ -5823,23 +5768,18 @@
   CustomArguments args(interceptor->data(), receiver, this);
   v8::AccessorInfo info(args.end());
   if (!interceptor->query()->IsUndefined()) {
-    v8::IndexedPropertyQueryImpl query =
-        v8::ToCData<v8::IndexedPropertyQueryImpl>(interceptor->query());
+    v8::IndexedPropertyQuery query =
+        v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
     LOG(ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
-    v8::Handle<v8::Value> result;
+    v8::Handle<v8::Integer> result;
     {
       // Leaving JavaScript.
       VMState state(EXTERNAL);
       result = query(index, info);
     }
     if (!result.IsEmpty()) {
-      // IsBoolean check would be removed when transition to new API is over.
-      if (result->IsBoolean()) {
-        return result->IsTrue() ? true : false;
-      } else {
-        ASSERT(result->IsInt32());
-        return true;  // absence of property is signaled by empty handle.
-      }
+      ASSERT(result->IsInt32());
+      return true;  // absence of property is signaled by empty handle.
     }
   } else if (!interceptor->getter()->IsUndefined()) {
     v8::IndexedPropertyGetter getter =
@@ -6159,8 +6099,10 @@
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
       !Top::MayIndexedAccess(this, index, v8::ACCESS_SET)) {
+    HandleScope scope;
+    Handle<Object> value_handle(value);
     Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
-    return value;
+    return *value_handle;
   }
 
   if (IsJSGlobalProxy()) {
diff --git a/src/objects.h b/src/objects.h
index 1ca3003..d2f6d35 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -1106,6 +1106,51 @@
 };
 
 
+#define SLOT_ADDR(obj, offset) \
+  reinterpret_cast<Object**>((obj)->address() + offset)
+
+// This class describes a body of an object of a fixed size
+// in which all pointer fields are located in the [start_offset, end_offset)
+// interval.
+template<int start_offset, int end_offset, int size>
+class FixedBodyDescriptor {
+ public:
+  static const int kStartOffset = start_offset;
+  static const int kEndOffset = end_offset;
+  static const int kSize = size;
+
+  static inline void IterateBody(HeapObject* obj, ObjectVisitor* v);
+
+  template<typename StaticVisitor>
+  static inline void IterateBody(HeapObject* obj) {
+    StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
+                                 SLOT_ADDR(obj, end_offset));
+  }
+};
+
+
+// This class describes a body of an object of a variable size
+// in which all pointer fields are located in the [start_offset, object_size)
+// interval.
+template<int start_offset>
+class FlexibleBodyDescriptor {
+ public:
+  static const int kStartOffset = start_offset;
+
+  static inline void IterateBody(HeapObject* obj,
+                                 int object_size,
+                                 ObjectVisitor* v);
+
+  template<typename StaticVisitor>
+  static inline void IterateBody(HeapObject* obj, int object_size) {
+    StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
+                                 SLOT_ADDR(obj, object_size));
+  }
+};
+
+#undef SLOT_ADDR
+
+
 // The HeapNumber class describes heap allocated numbers that cannot be
 // represented in a Smi (small integer)
 class HeapNumber: public HeapObject {
@@ -1522,7 +1567,6 @@
 
 
   // Dispatched behavior.
-  void JSObjectIterateBody(int object_size, ObjectVisitor* v);
   void JSObjectShortPrint(StringStream* accumulator);
 #ifdef DEBUG
   void JSObjectPrint();
@@ -1578,6 +1622,11 @@
 
   STATIC_CHECK(kHeaderSize == Internals::kJSObjectHeaderSize);
 
+  class BodyDescriptor : public FlexibleBodyDescriptor<kPropertiesOffset> {
+   public:
+    static inline int SizeOf(Map* map, HeapObject* object);
+  };
+
  private:
   Object* GetElementWithCallback(Object* receiver,
                                  Object* structure,
@@ -1692,8 +1741,6 @@
   static const int kMaxLength = (kMaxSize - kHeaderSize) / kPointerSize;
 
   // Dispatched behavior.
-  int FixedArraySize() { return SizeFor(length()); }
-  void FixedArrayIterateBody(ObjectVisitor* v);
 #ifdef DEBUG
   void FixedArrayPrint();
   void FixedArrayVerify();
@@ -1711,6 +1758,13 @@
   // object, the prefix of this array is sorted.
   void SortPairs(FixedArray* numbers, uint32_t len);
 
+  class BodyDescriptor : public FlexibleBodyDescriptor<kHeaderSize> {
+   public:
+    static inline int SizeOf(Map* map, HeapObject* object) {
+      return SizeFor(reinterpret_cast<FixedArray*>(object)->length());
+    }
+  };
+
  protected:
   // Set operation on FixedArray without using write barriers. Can
   // only be used for storing old space objects or smis.
@@ -1811,6 +1865,10 @@
   // Search the instance descriptors for given name.
   inline int Search(String* name);
 
+  // As the above, but uses DescriptorLookupCache and updates it when
+  // necessary.
+  inline int SearchWithCache(String* name);
+
   // Tells whether the name is present int the array.
   bool Contains(String* name) { return kNotFound != Search(name); }
 
@@ -2426,7 +2484,9 @@
   static inline ByteArray* cast(Object* obj);
 
   // Dispatched behavior.
-  int ByteArraySize() { return SizeFor(length()); }
+  inline int ByteArraySize() {
+    return SizeFor(this->length());
+  }
 #ifdef DEBUG
   void ByteArrayPrint();
   void ByteArrayVerify();
@@ -2847,7 +2907,10 @@
 
   // Dispatched behavior.
   int CodeSize() { return SizeFor(body_size()); }
-  void CodeIterateBody(ObjectVisitor* v);
+  inline void CodeIterateBody(ObjectVisitor* v);
+
+  template<typename StaticVisitor>
+  inline void CodeIterateBody();
 #ifdef DEBUG
   void CodePrint();
   void CodeVerify();
@@ -2893,7 +2956,6 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
 };
 
-typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
 
 // All heap objects have a Map that describes their structure.
 //  A Map contains information about:
@@ -3089,18 +3151,13 @@
   void ClearNonLiveTransitions(Object* real_prototype);
 
   // Dispatched behavior.
-  void MapIterateBody(ObjectVisitor* v);
 #ifdef DEBUG
   void MapPrint();
   void MapVerify();
 #endif
 
-  inline Scavenger scavenger();
-  inline void set_scavenger(Scavenger callback);
-
-  inline void Scavenge(HeapObject** slot, HeapObject* obj) {
-    scavenger()(this, slot, obj);
-  }
+  inline int visitor_id();
+  inline void set_visitor_id(int visitor_id);
 
   static const int kMaxPreAllocatedPropertyFields = 255;
 
@@ -3154,12 +3211,17 @@
   static const int kIsExtensible = 0;
   static const int kFunctionWithPrototype = 1;
   static const int kHasFastElements = 2;
+  static const int kStringWrapperSafeForDefaultValueOf = 3;
 
   // Layout of the default cache. It holds alternating name and code objects.
   static const int kCodeCacheEntrySize = 2;
   static const int kCodeCacheEntryNameOffset = 0;
   static const int kCodeCacheEntryCodeOffset = 1;
 
+  typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
+                              kPointerFieldsEndOffset,
+                              kSize> BodyDescriptor;
+
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(Map);
 };
@@ -3414,7 +3476,6 @@
   int CalculateInObjectProperties();
 
   // Dispatched behavior.
-  void SharedFunctionInfoIterateBody(ObjectVisitor* v);
   // Set max_length to -1 for unlimited length.
   void SourceCodePrint(StringStream* accumulator, int max_length);
 #ifdef DEBUG
@@ -3503,6 +3564,10 @@
 #endif
   static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
 
+  typedef FixedBodyDescriptor<kNameOffset,
+                              kThisPropertyAssignmentsOffset + kPointerSize,
+                              kSize> BodyDescriptor;
+
  private:
   // Bit positions in start_position_and_type.
   // The source code start position is in the 30 most significant bits of
@@ -4553,11 +4618,6 @@
   // Casting.
   static inline ConsString* cast(Object* obj);
 
-  // Garbage collection support.  This method is called during garbage
-  // collection to iterate through the heap pointers in the body of
-  // the ConsString.
-  void ConsStringIterateBody(ObjectVisitor* v);
-
   // Layout description.
   static const int kFirstOffset = POINTER_SIZE_ALIGN(String::kSize);
   static const int kSecondOffset = kFirstOffset + kPointerSize;
@@ -4574,6 +4634,9 @@
   // Minimum length for a cons string.
   static const int kMinLength = 13;
 
+  typedef FixedBodyDescriptor<kFirstOffset, kSecondOffset + kPointerSize, kSize>
+          BodyDescriptor;
+
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString);
 };
@@ -4623,7 +4686,10 @@
   static inline ExternalAsciiString* cast(Object* obj);
 
   // Garbage collection support.
-  void ExternalAsciiStringIterateBody(ObjectVisitor* v);
+  inline void ExternalAsciiStringIterateBody(ObjectVisitor* v);
+
+  template<typename StaticVisitor>
+  inline void ExternalAsciiStringIterateBody();
 
   // Support for StringInputBuffer.
   const unibrow::byte* ExternalAsciiStringReadBlock(unsigned* remaining,
@@ -4660,7 +4726,11 @@
   static inline ExternalTwoByteString* cast(Object* obj);
 
   // Garbage collection support.
-  void ExternalTwoByteStringIterateBody(ObjectVisitor* v);
+  inline void ExternalTwoByteStringIterateBody(ObjectVisitor* v);
+
+  template<typename StaticVisitor>
+  inline void ExternalTwoByteStringIterateBody();
+
 
   // Support for StringInputBuffer.
   void ExternalTwoByteStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
@@ -4771,7 +4841,6 @@
   static inline Oddball* cast(Object* obj);
 
   // Dispatched behavior.
-  void OddballIterateBody(ObjectVisitor* v);
 #ifdef DEBUG
   void OddballVerify();
 #endif
@@ -4784,6 +4853,10 @@
   static const int kToNumberOffset = kToStringOffset + kPointerSize;
   static const int kSize = kToNumberOffset + kPointerSize;
 
+  typedef FixedBodyDescriptor<kToStringOffset,
+                              kToNumberOffset + kPointerSize,
+                              kSize> BodyDescriptor;
+
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(Oddball);
 };
@@ -4797,8 +4870,6 @@
   // Casting.
   static inline JSGlobalPropertyCell* cast(Object* obj);
 
-  // Dispatched behavior.
-  void JSGlobalPropertyCellIterateBody(ObjectVisitor* v);
 #ifdef DEBUG
   void JSGlobalPropertyCellVerify();
   void JSGlobalPropertyCellPrint();
@@ -4808,6 +4879,10 @@
   static const int kValueOffset = HeapObject::kHeaderSize;
   static const int kSize = kValueOffset + kPointerSize;
 
+  typedef FixedBodyDescriptor<kValueOffset,
+                              kValueOffset + kPointerSize,
+                              kSize> BodyDescriptor;
+
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSGlobalPropertyCell);
 };
@@ -4828,6 +4903,10 @@
 
   // Dispatched behavior.
   inline void ProxyIterateBody(ObjectVisitor* v);
+
+  template<typename StaticVisitor>
+  inline void ProxyIterateBody();
+
 #ifdef DEBUG
   void ProxyPrint();
   void ProxyVerify();
@@ -5345,6 +5424,15 @@
 };
 
 
+class StructBodyDescriptor : public
+  FlexibleBodyDescriptor<HeapObject::kHeaderSize> {
+ public:
+  static inline int SizeOf(Map* map, HeapObject* object) {
+    return map->instance_size();
+  }
+};
+
+
 // BooleanBit is a helper class for setting and getting a bit in an
 // integer or Smi.
 class BooleanBit : public AllStatic {
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index d3a4498..f7d8609 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -290,9 +290,10 @@
 void OS::DebugBreak() {
 // TODO(lrn): Introduce processor define for runtime system (!= V8_ARCH_x,
 //  which is the architecture of generated code).
-#if (defined(__arm__) || defined(__thumb__)) && \
-    defined(CAN_USE_ARMV5_INSTRUCTIONS)
+#if (defined(__arm__) || defined(__thumb__))
+# if defined(CAN_USE_ARMV5_INSTRUCTIONS)
   asm("bkpt 0");
+# endif
 #elif defined(__mips__)
   asm("break");
 #else
diff --git a/src/platform-nullos.cc b/src/platform-nullos.cc
index 656c317..b8392e8 100644
--- a/src/platform-nullos.cc
+++ b/src/platform-nullos.cc
@@ -100,6 +100,12 @@
 }
 
 
+int OS::GetLastError() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
 // Returns the local time offset in milliseconds east of UTC without
 // taking daylight savings time into account.
 double OS::LocalTimeOffset() {
diff --git a/src/platform-posix.cc b/src/platform-posix.cc
index 89f4d98..c50d396 100644
--- a/src/platform-posix.cc
+++ b/src/platform-posix.cc
@@ -108,6 +108,11 @@
 }
 
 
+int OS::GetLastError() {
+  return errno;
+}
+
+
 // ----------------------------------------------------------------------------
 // POSIX stdio support.
 //
@@ -238,7 +243,7 @@
   addr.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
   addr.sin_port = htons(port);
   int status = bind(socket_,
-                    reinterpret_cast<struct sockaddr *>(&addr),
+                    BitCast<struct sockaddr *>(&addr),
                     sizeof(addr));
   return status == 0;
 }
diff --git a/src/platform-win32.cc b/src/platform-win32.cc
index af3e9b2..86314a8 100644
--- a/src/platform-win32.cc
+++ b/src/platform-win32.cc
@@ -651,6 +651,11 @@
 }
 
 
+int OS::GetLastError() {
+  return ::GetLastError();
+}
+
+
 // ----------------------------------------------------------------------------
 // Win32 console output.
 //
diff --git a/src/platform.h b/src/platform.h
index d63ca5e..7539fd2 100644
--- a/src/platform.h
+++ b/src/platform.h
@@ -165,6 +165,9 @@
   // Returns the daylight savings offset for the given time.
   static double DaylightSavingsOffset(double time);
 
+  // Returns last OS error.
+  static int GetLastError();
+
   static FILE* FOpen(const char* path, const char* mode);
 
   // Log file open mode is platform-dependent due to line ends issues.
diff --git a/src/property.h b/src/property.h
index 15a5652..01c58de 100644
--- a/src/property.h
+++ b/src/property.h
@@ -115,8 +115,8 @@
 // the same CONSTANT_FUNCTION field.
 class ConstTransitionDescriptor: public Descriptor {
  public:
-  explicit ConstTransitionDescriptor(String* key)
-      : Descriptor(key, Smi::FromInt(0), NONE, CONSTANT_TRANSITION) { }
+  explicit ConstTransitionDescriptor(String* key, Map* map)
+      : Descriptor(key, map, NONE, CONSTANT_TRANSITION) { }
 };
 
 
@@ -260,7 +260,7 @@
 
   Map* GetTransitionMap() {
     ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
-    ASSERT(type() == MAP_TRANSITION);
+    ASSERT(type() == MAP_TRANSITION || type() == CONSTANT_TRANSITION);
     return Map::cast(GetValue());
   }
 
diff --git a/src/runtime.js b/src/runtime.js
index aca1945..4296810 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -175,7 +175,7 @@
 // Left operand (this) is already a string.
 function STRING_ADD_LEFT(y) {
   if (!IS_STRING(y)) {
-    if (IS_STRING_WRAPPER(y)) {
+    if (IS_STRING_WRAPPER(y) && %_IsStringWrapperSafeForDefaultValueOf(y)) {
       y = %_ValueOf(y);
     } else {
       y = IS_NUMBER(y)
@@ -191,7 +191,7 @@
 function STRING_ADD_RIGHT(y) {
   var x = this;
   if (!IS_STRING(x)) {
-    if (IS_STRING_WRAPPER(x)) {
+    if (IS_STRING_WRAPPER(x) && %_IsStringWrapperSafeForDefaultValueOf(x)) {
       x = %_ValueOf(x);
     } else {
       x = IS_NUMBER(x)
@@ -387,11 +387,11 @@
 
 // Filter a given key against an object by checking if the object
 // has a property with the given key; return the key as a string if
-// it has. Otherwise returns null. Used in for-in statements.
+// it has. Otherwise returns 0 (smi). Used in for-in statements.
 function FILTER_KEY(key) {
   var string = %ToString(key);
   if (%HasProperty(this, string)) return string;
-  return null;
+  return 0;
 }
 
 
diff --git a/src/serialize.cc b/src/serialize.cc
index 3988b4a..0057d18 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -680,14 +680,6 @@
     LOG(SnapshotPositionEvent(address, source_->position()));
   }
   ReadChunk(current, limit, space_number, address);
-
-  if (space == Heap::map_space()) {
-    ASSERT(size == Map::kSize);
-    HeapObject* obj = HeapObject::FromAddress(address);
-    Map* map = reinterpret_cast<Map*>(obj);
-    map->set_scavenger(Heap::GetScavenger(map->instance_type(),
-                                          map->instance_size()));
-  }
 }
 
 
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 6a0c93e..54d9384 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -789,23 +789,6 @@
 #endif
 
 
-Object* StubCache::ComputeLazyCompile(int argc) {
-  Code::Flags flags =
-      Code::ComputeFlags(Code::STUB, NOT_IN_LOOP, UNINITIALIZED, NORMAL, argc);
-  Object* probe = ProbeCache(flags);
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  Object* result = FillCache(compiler.CompileLazyCompile(flags));
-  if (result->IsCode()) {
-    Code* code = Code::cast(result);
-    USE(code);
-    PROFILE(CodeCreateEvent(Logger::LAZY_COMPILE_TAG,
-                            code, code->arguments_count()));
-  }
-  return result;
-}
-
-
 void StubCache::Clear() {
   for (int i = 0; i < kPrimaryTableSize; i++) {
     primary_[i].key = Heap::empty_string();
diff --git a/src/stub-cache.h b/src/stub-cache.h
index 0be32f1..663201b 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -210,8 +210,6 @@
   static Object* ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind);
 #endif
 
-  static Object* ComputeLazyCompile(int argc);
-
 
   // Update cache for entry hash(name, map).
   static Code* Set(String* name, Map* map, Code* code);
@@ -357,7 +355,6 @@
   Object* CompileCallDebugBreak(Code::Flags flags);
   Object* CompileCallDebugPrepareStepIn(Code::Flags flags);
 #endif
-  Object* CompileLazyCompile(Code::Flags flags);
 
   // Static functions for generating parts of stubs.
   static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
diff --git a/src/third_party/dtoa/dtoa.c b/src/third_party/dtoa/dtoa.c
index 178b3d1..068ed94 100644
--- a/src/third_party/dtoa/dtoa.c
+++ b/src/third_party/dtoa/dtoa.c
@@ -270,25 +270,14 @@
 
 typedef union { double d; ULong L[2]; } U;
 
-#ifdef YES_ALIAS
-#define dval(x) x
 #ifdef IEEE_8087
-#define word0(x) ((ULong *)&x)[1]
-#define word1(x) ((ULong *)&x)[0]
+#define word0(x) (x).L[1]
+#define word1(x) (x).L[0]
 #else
-#define word0(x) ((ULong *)&x)[0]
-#define word1(x) ((ULong *)&x)[1]
+#define word0(x) (x).L[0]
+#define word1(x) (x).L[1]
 #endif
-#else
-#ifdef IEEE_8087
-#define word0(x) ((U*)&x)->L[1]
-#define word1(x) ((U*)&x)->L[0]
-#else
-#define word0(x) ((U*)&x)->L[0]
-#define word1(x) ((U*)&x)->L[1]
-#endif
-#define dval(x) ((U*)&x)->d
-#endif
+#define dval(x) (x).d
 
 /* The following definition of Storeinc is appropriate for MIPS processors.
  * An alternative that might be better on some machines is
@@ -1108,13 +1097,15 @@
  static double
 ulp
 #ifdef KR_headers
-	(x) double x;
+	(dx) double dx;
 #else
-	(double x)
+	(double dx)
 #endif
 {
 	register Long L;
-	double a;
+        U x, a;
+
+        dval(x) = dx;
 
 	L = (word0(x) & Exp_mask) - (P-1)*Exp_msk1;
 #ifndef Avoid_Underflow
@@ -1157,7 +1148,7 @@
 {
 	ULong *xa, *xa0, w, y, z;
 	int k;
-	double d;
+	U d;
 #ifdef VAX
 	ULong d0, d1;
 #else
@@ -1220,9 +1211,9 @@
  static Bigint *
 d2b
 #ifdef KR_headers
-	(d, e, bits) double d; int *e, *bits;
+	(dd, e, bits) double dd; int *e, *bits;
 #else
-	(double d, int *e, int *bits)
+	(double dd, int *e, int *bits)
 #endif
 {
 	Bigint *b;
@@ -1236,6 +1227,8 @@
 	d0 = word0(d) >> 16 | word0(d) << 16;
 	d1 = word1(d) >> 16 | word1(d) << 16;
 #else
+        U d;
+        dval(d) = dd;
 #define d0 word0(d)
 #define d1 word1(d)
 #endif
@@ -1368,7 +1361,7 @@
 	(Bigint *a, Bigint *b)
 #endif
 {
-	double da, db;
+	U da, db;
 	int k, ka, kb;
 
 	dval(da) = b2d(a, &ka);
@@ -1542,7 +1535,8 @@
 	int bb2, bb5, bbe, bd2, bd5, bbbits, bs2, c, dsign,
 		 e, e1, esign, i, j, k, nd, nd0, nf, nz, nz0, sign;
 	CONST char *s, *s0, *s1;
-	double aadj, aadj1, adj, rv, rv0;
+        double aadj;
+	U aadj1, adj, rv, rv0;
 	Long L;
 	ULong y, z;
 	Bigint *bb = NULL, *bb1, *bd = NULL, *bd0, *bs = NULL, *delta = NULL;
@@ -2042,12 +2036,12 @@
 					}
 				if (rounding) {
 					if (dsign) {
-						adj = 1.;
+						dval(adj) = 1.;
 						goto apply_adj;
 						}
 					}
 				else if (!dsign) {
-					adj = -1.;
+					dval(adj) = -1.;
 					if (!word1(rv)
 					 && !(word0(rv) & Frac_mask)) {
 						y = word0(rv) & Exp_mask;
@@ -2059,7 +2053,7 @@
 						  {
 						  delta = lshift(delta,Log2P);
 						  if (cmp(delta, bs) <= 0)
-							adj = -0.5;
+							dval(adj) = -0.5;
 						  }
 						}
  apply_adj:
@@ -2072,26 +2066,26 @@
 					if ((word0(rv) & Exp_mask) <=
 							P*Exp_msk1) {
 						word0(rv) += P*Exp_msk1;
-						dval(rv) += adj*ulp(dval(rv));
+						dval(rv) += dval(adj)*ulp(dval(rv));
 						word0(rv) -= P*Exp_msk1;
 						}
 					else
 #endif /*Sudden_Underflow*/
 #endif /*Avoid_Underflow*/
-					dval(rv) += adj*ulp(dval(rv));
+					dval(rv) += dval(adj)*ulp(dval(rv));
 					}
 				break;
 				}
-			adj = ratio(delta, bs);
-			if (adj < 1.)
-				adj = 1.;
-			if (adj <= 0x7ffffffe) {
+			dval(adj) = ratio(delta, bs);
+			if (dval(adj) < 1.)
+				dval(adj) = 1.;
+			if (dval(adj) <= 0x7ffffffe) {
 				/* adj = rounding ? ceil(adj) : floor(adj); */
-				y = adj;
-				if (y != adj) {
+				y = dval(adj);
+				if (y != dval(adj)) {
 					if (!((rounding>>1) ^ dsign))
 						y++;
-					adj = y;
+					dval(adj) = y;
 					}
 				}
 #ifdef Avoid_Underflow
@@ -2101,21 +2095,21 @@
 #ifdef Sudden_Underflow
 			if ((word0(rv) & Exp_mask) <= P*Exp_msk1) {
 				word0(rv) += P*Exp_msk1;
-				adj *= ulp(dval(rv));
+				dval(adj) *= ulp(dval(rv));
 				if (dsign)
-					dval(rv) += adj;
+					dval(rv) += dval(adj);
 				else
-					dval(rv) -= adj;
+					dval(rv) -= dval(adj);
 				word0(rv) -= P*Exp_msk1;
 				goto cont;
 				}
 #endif /*Sudden_Underflow*/
 #endif /*Avoid_Underflow*/
-			adj *= ulp(dval(rv));
+			dval(adj) *= ulp(dval(rv));
 			if (dsign)
-				dval(rv) += adj;
+				dval(rv) += dval(adj);
 			else
-				dval(rv) -= adj;
+				dval(rv) -= dval(adj);
 			goto cont;
 			}
 #endif /*Honor_FLT_ROUNDS*/
@@ -2237,14 +2231,14 @@
 			}
 		if ((aadj = ratio(delta, bs)) <= 2.) {
 			if (dsign)
-				aadj = aadj1 = 1.;
+                                aadj = dval(aadj1) = 1.;
 			else if (word1(rv) || word0(rv) & Bndry_mask) {
 #ifndef Sudden_Underflow
 				if (word1(rv) == Tiny1 && !word0(rv))
 					goto undfl;
 #endif
 				aadj = 1.;
-				aadj1 = -1.;
+				dval(aadj1) = -1.;
 				}
 			else {
 				/* special case -- power of FLT_RADIX to be */
@@ -2254,24 +2248,24 @@
 					aadj = 1./FLT_RADIX;
 				else
 					aadj *= 0.5;
-				aadj1 = -aadj;
+				dval(aadj1) = -aadj;
 				}
 			}
 		else {
 			aadj *= 0.5;
-			aadj1 = dsign ? aadj : -aadj;
+			dval(aadj1) = dsign ? aadj : -aadj;
 #ifdef Check_FLT_ROUNDS
 			switch(Rounding) {
 				case 2: /* towards +infinity */
-					aadj1 -= 0.5;
+					dval(aadj1) -= 0.5;
 					break;
 				case 0: /* towards 0 */
 				case 3: /* towards -infinity */
-					aadj1 += 0.5;
+					dval(aadj1) += 0.5;
 				}
 #else
 			if (Flt_Rounds == 0)
-				aadj1 += 0.5;
+				dval(aadj1) += 0.5;
 #endif /*Check_FLT_ROUNDS*/
 			}
 		y = word0(rv) & Exp_mask;
@@ -2281,8 +2275,8 @@
 		if (y == Exp_msk1*(DBL_MAX_EXP+Bias-1)) {
 			dval(rv0) = dval(rv);
 			word0(rv) -= P*Exp_msk1;
-			adj = aadj1 * ulp(dval(rv));
-			dval(rv) += adj;
+			dval(adj) = dval(aadj1) * ulp(dval(rv));
+			dval(rv) += dval(adj);
 			if ((word0(rv) & Exp_mask) >=
 					Exp_msk1*(DBL_MAX_EXP+Bias-P)) {
 				if (word0(rv0) == Big0 && word1(rv0) == Big1)
@@ -2301,19 +2295,19 @@
 					if ((z = aadj) <= 0)
 						z = 1;
 					aadj = z;
-					aadj1 = dsign ? aadj : -aadj;
+					dval(aadj1) = dsign ? aadj : -aadj;
 					}
 				word0(aadj1) += (2*P+1)*Exp_msk1 - y;
 				}
-			adj = aadj1 * ulp(dval(rv));
-			dval(rv) += adj;
+			dval(adj) = dval(aadj1) * ulp(dval(rv));
+			dval(rv) += dval(adj);
 #else
 #ifdef Sudden_Underflow
 			if ((word0(rv) & Exp_mask) <= P*Exp_msk1) {
 				dval(rv0) = dval(rv);
 				word0(rv) += P*Exp_msk1;
-				adj = aadj1 * ulp(dval(rv));
-				dval(rv) += adj;
+				dval(adj) = dval(aadj1) * ulp(dval(rv));
+				dval(rv) += dval(adj);
 #ifdef IBM
 				if ((word0(rv) & Exp_mask) <  P*Exp_msk1)
 #else
@@ -2331,8 +2325,8 @@
 					word0(rv) -= P*Exp_msk1;
 				}
 			else {
-				adj = aadj1 * ulp(dval(rv));
-				dval(rv) += adj;
+				dval(adj) = dval(aadj1) * ulp(dval(rv));
+				dval(rv) += dval(adj);
 				}
 #else /*Sudden_Underflow*/
 			/* Compute adj so that the IEEE rounding rules will
@@ -2343,12 +2337,12 @@
 			 * example: 1.2e-307 .
 			 */
 			if (y <= (P-1)*Exp_msk1 && aadj > 1.) {
-				aadj1 = (double)(int)(aadj + 0.5);
+				dval(aadj1) = (double)(int)(aadj + 0.5);
 				if (!dsign)
-					aadj1 = -aadj1;
+					dval(aadj1) = -dval(aadj1);
 				}
-			adj = aadj1 * ulp(dval(rv));
-			dval(rv) += adj;
+			dval(adj) = dval(aadj1) * ulp(dval(rv));
+			dval(rv) += dval(adj);
 #endif /*Sudden_Underflow*/
 #endif /*Avoid_Underflow*/
 			}
@@ -2638,10 +2632,10 @@
  char *
 dtoa
 #ifdef KR_headers
-	(d, mode, ndigits, decpt, sign, rve)
-	double d; int mode, ndigits, *decpt, *sign; char **rve;
+	(dd, mode, ndigits, decpt, sign, rve)
+	double dd; int mode, ndigits, *decpt, *sign; char **rve;
 #else
-	(double d, int mode, int ndigits, int *decpt, int *sign, char **rve)
+	(double dd, int mode, int ndigits, int *decpt, int *sign, char **rve)
 #endif
 {
  /*	Arguments ndigits, decpt, sign are similar to those
@@ -2687,7 +2681,8 @@
 	ULong x;
 #endif
 	Bigint *b, *b1, *delta, *mlo, *mhi, *S;
-	double d2, ds, eps;
+        double ds;
+	U d2, eps;
 	char *s, *s0;
 #ifdef Honor_FLT_ROUNDS
 	int rounding;
@@ -2695,6 +2690,8 @@
 #ifdef SET_INEXACT
 	int inexact, oldinexact;
 #endif
+        U d;
+        dval(d) = dd;
 
         /* In mode 2 and 3 we bias rounding up when there are ties. */
         bias_round_up = mode == 2 || mode == 3;
diff --git a/src/top.cc b/src/top.cc
index 1a4a948..8296027 100644
--- a/src/top.cc
+++ b/src/top.cc
@@ -520,7 +520,6 @@
 
 
 void Top::SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback) {
-  ASSERT(thread_local_.failed_access_check_callback_ == NULL);
   thread_local_.failed_access_check_callback_ = callback;
 }
 
@@ -530,8 +529,6 @@
 
   ASSERT(receiver->IsAccessCheckNeeded());
   ASSERT(Top::context());
-  // The callers of this method are not expecting a GC.
-  AssertNoAllocation no_gc;
 
   // Get the data object from access check info.
   JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
diff --git a/src/type-info.h b/src/type-info.h
index 91ecab8..f588e56 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -54,7 +54,7 @@
   static inline TypeInfo Primitive();
   // We know it's a number of some sort.
   static inline TypeInfo Number();
-  // We know it's signed or unsigned 32 bit integer.
+  // We know it's signed 32 bit integer.
   static inline TypeInfo Integer32();
   // We know it's a Smi.
   static inline TypeInfo Smi();
@@ -113,19 +113,15 @@
   }
 
 
-  // Integer32 is an integer that can be represented as either a signed
-  // 32-bit integer or as an unsigned 32-bit integer. It has to be
-  // in the range [-2^31, 2^32 - 1]. We also have to check for negative 0
-  // as it is not an Integer32.
+  // Integer32 is an integer that can be represented as a signed
+  // 32-bit integer. It has to be in the range [-2^31, 2^31 - 1].
+  // We also have to check for negative 0 as it is not an Integer32.
   static inline bool IsInt32Double(double value) {
     const DoubleRepresentation minus_zero(-0.0);
     DoubleRepresentation rep(value);
     if (rep.bits == minus_zero.bits) return false;
-    if (value >= kMinInt && value <= kMaxUInt32) {
-      if (value <= kMaxInt && value == static_cast<int32_t>(value)) {
-        return true;
-      }
-      if (value == static_cast<uint32_t>(value)) return true;
+    if (value >= kMinInt && value <= kMaxInt) {
+      if (value == static_cast<int32_t>(value)) return true;
     }
     return false;
   }
diff --git a/src/version.cc b/src/version.cc
index e501a7c..61c0a0e 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     2
 #define MINOR_VERSION     3
-#define BUILD_NUMBER      7
+#define BUILD_NUMBER      8
 #define PATCH_LEVEL       0
 #define CANDIDATE_VERSION false
 
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index c8abd22..44159e0 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -350,6 +350,29 @@
 }
 
 
+template<typename StaticVisitor>
+void RelocInfo::Visit() {
+  RelocInfo::Mode mode = rmode();
+  if (mode == RelocInfo::EMBEDDED_OBJECT) {
+    StaticVisitor::VisitPointer(target_object_address());
+  } else if (RelocInfo::IsCodeTarget(mode)) {
+    StaticVisitor::VisitCodeTarget(this);
+  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
+    StaticVisitor::VisitExternalReference(target_reference_address());
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  } else if (Debug::has_break_points() &&
+             ((RelocInfo::IsJSReturn(mode) &&
+              IsPatchedReturnSequence()) ||
+             (RelocInfo::IsDebugBreakSlot(mode) &&
+              IsPatchedDebugBreakSlotSequence()))) {
+    StaticVisitor::VisitDebugTarget(this);
+#endif
+  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
+    StaticVisitor::VisitRuntimeEntry(this);
+  }
+}
+
+
 // -----------------------------------------------------------------------------
 // Implementation of Operand
 
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 6b34a4f..4f2d2b9 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -1291,6 +1291,26 @@
   Generate_JSEntryTrampolineHelper(masm, true);
 }
 
+
+void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
+  // Enter an internal frame.
+  __ EnterInternalFrame();
+
+  // Push a copy of the function onto the stack.
+  __ push(rdi);
+
+  __ push(rdi);  // Function is also the parameter to the runtime call.
+  __ CallRuntime(Runtime::kLazyCompile, 1);
+  __ pop(rdi);
+
+  // Tear down temporary frame.
+  __ LeaveInternalFrame();
+
+  // Do a tail-call of the compiled function.
+  __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
+  __ jmp(rcx);
+}
+
 } }  // namespace v8::internal
 
 #endif  // V8_TARGET_ARCH_X64
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index 04078ef..e545ffa 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -201,103 +201,89 @@
     // rsi: callee's context
     allocator_->Initialize();
 
-    if (info->mode() == CompilationInfo::PRIMARY) {
-      frame_->Enter();
+    frame_->Enter();
 
-      // Allocate space for locals and initialize them.
-      frame_->AllocateStackSlots();
+    // Allocate space for locals and initialize them.
+    frame_->AllocateStackSlots();
 
-      // Allocate the local context if needed.
-      int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
-      if (heap_slots > 0) {
-        Comment cmnt(masm_, "[ allocate local context");
-        // Allocate local context.
-        // Get outer context and create a new context based on it.
-        frame_->PushFunction();
-        Result context;
-        if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-          FastNewContextStub stub(heap_slots);
-          context = frame_->CallStub(&stub, 1);
-        } else {
-          context = frame_->CallRuntime(Runtime::kNewContext, 1);
-        }
+    // Allocate the local context if needed.
+    int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+    if (heap_slots > 0) {
+      Comment cmnt(masm_, "[ allocate local context");
+      // Allocate local context.
+      // Get outer context and create a new context based on it.
+      frame_->PushFunction();
+      Result context;
+      if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+        FastNewContextStub stub(heap_slots);
+        context = frame_->CallStub(&stub, 1);
+      } else {
+        context = frame_->CallRuntime(Runtime::kNewContext, 1);
+      }
 
-        // Update context local.
-        frame_->SaveContextRegister();
+      // Update context local.
+      frame_->SaveContextRegister();
 
-        // Verify that the runtime call result and rsi agree.
-        if (FLAG_debug_code) {
-          __ cmpq(context.reg(), rsi);
-          __ Assert(equal, "Runtime::NewContext should end up in rsi");
+      // Verify that the runtime call result and rsi agree.
+      if (FLAG_debug_code) {
+        __ cmpq(context.reg(), rsi);
+        __ Assert(equal, "Runtime::NewContext should end up in rsi");
+      }
+    }
+
+    // TODO(1241774): Improve this code:
+    // 1) only needed if we have a context
+    // 2) no need to recompute context ptr every single time
+    // 3) don't copy parameter operand code from SlotOperand!
+    {
+      Comment cmnt2(masm_, "[ copy context parameters into .context");
+      // Note that iteration order is relevant here! If we have the same
+      // parameter twice (e.g., function (x, y, x)), and that parameter
+      // needs to be copied into the context, it must be the last argument
+      // passed to the parameter that needs to be copied. This is a rare
+      // case so we don't check for it, instead we rely on the copying
+      // order: such a parameter is copied repeatedly into the same
+      // context location and thus the last value is what is seen inside
+      // the function.
+      for (int i = 0; i < scope()->num_parameters(); i++) {
+        Variable* par = scope()->parameter(i);
+        Slot* slot = par->slot();
+        if (slot != NULL && slot->type() == Slot::CONTEXT) {
+          // The use of SlotOperand below is safe in unspilled code
+          // because the slot is guaranteed to be a context slot.
+          //
+          // There are no parameters in the global scope.
+          ASSERT(!scope()->is_global_scope());
+          frame_->PushParameterAt(i);
+          Result value = frame_->Pop();
+          value.ToRegister();
+
+          // SlotOperand loads context.reg() with the context object
+          // stored to, used below in RecordWrite.
+          Result context = allocator_->Allocate();
+          ASSERT(context.is_valid());
+          __ movq(SlotOperand(slot, context.reg()), value.reg());
+          int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
+          Result scratch = allocator_->Allocate();
+          ASSERT(scratch.is_valid());
+          frame_->Spill(context.reg());
+          frame_->Spill(value.reg());
+          __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
         }
       }
+    }
 
-      // TODO(1241774): Improve this code:
-      // 1) only needed if we have a context
-      // 2) no need to recompute context ptr every single time
-      // 3) don't copy parameter operand code from SlotOperand!
-      {
-        Comment cmnt2(masm_, "[ copy context parameters into .context");
-        // Note that iteration order is relevant here! If we have the same
-        // parameter twice (e.g., function (x, y, x)), and that parameter
-        // needs to be copied into the context, it must be the last argument
-        // passed to the parameter that needs to be copied. This is a rare
-        // case so we don't check for it, instead we rely on the copying
-        // order: such a parameter is copied repeatedly into the same
-        // context location and thus the last value is what is seen inside
-        // the function.
-        for (int i = 0; i < scope()->num_parameters(); i++) {
-          Variable* par = scope()->parameter(i);
-          Slot* slot = par->slot();
-          if (slot != NULL && slot->type() == Slot::CONTEXT) {
-            // The use of SlotOperand below is safe in unspilled code
-            // because the slot is guaranteed to be a context slot.
-            //
-            // There are no parameters in the global scope.
-            ASSERT(!scope()->is_global_scope());
-            frame_->PushParameterAt(i);
-            Result value = frame_->Pop();
-            value.ToRegister();
+    // Store the arguments object.  This must happen after context
+    // initialization because the arguments object may be stored in
+    // the context.
+    if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
+      StoreArgumentsObject(true);
+    }
 
-            // SlotOperand loads context.reg() with the context object
-            // stored to, used below in RecordWrite.
-            Result context = allocator_->Allocate();
-            ASSERT(context.is_valid());
-            __ movq(SlotOperand(slot, context.reg()), value.reg());
-            int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
-            Result scratch = allocator_->Allocate();
-            ASSERT(scratch.is_valid());
-            frame_->Spill(context.reg());
-            frame_->Spill(value.reg());
-            __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
-          }
-        }
-      }
-
-      // Store the arguments object.  This must happen after context
-      // initialization because the arguments object may be stored in
-      // the context.
-      if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
-        StoreArgumentsObject(true);
-      }
-
-      // Initialize ThisFunction reference if present.
-      if (scope()->is_function_scope() && scope()->function() != NULL) {
-        frame_->Push(Factory::the_hole_value());
-        StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
-      }
-    } else {
-      // When used as the secondary compiler for splitting, rbp, rsi,
-      // and rdi have been pushed on the stack.  Adjust the virtual
-      // frame to match this state.
-      frame_->Adjust(3);
-      allocator_->Unuse(rdi);
-
-      // Bind all the bailout labels to the beginning of the function.
-      List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
-      for (int i = 0; i < bailouts->length(); i++) {
-        __ bind(bailouts->at(i)->label());
-      }
+    // Initialize ThisFunction reference if present.
+    if (scope()->is_function_scope() && scope()->function() != NULL) {
+      frame_->Push(Factory::the_hole_value());
+      StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
     }
 
     // Initialize the function return target after the locals are set
@@ -3925,7 +3911,7 @@
   __ movq(rbx, rax);
 
   // If the property has been removed while iterating, we just skip it.
-  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
+  __ SmiCompare(rbx, Smi::FromInt(0));
   node->continue_target()->Branch(equal);
 
   end_del_check.Bind();
@@ -6040,6 +6026,143 @@
 }
 
 
+// Deferred code to check whether the String JavaScript object is safe for using
+// default value of. This code is called after the bit caching this information
+// in the map has been checked with the map for the object in the map_result_
+// register. On return the register map_result_ contains 1 for true and 0 for
+// false.
+class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
+ public:
+  DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
+                                               Register map_result,
+                                               Register scratch1,
+                                               Register scratch2)
+      : object_(object),
+        map_result_(map_result),
+        scratch1_(scratch1),
+        scratch2_(scratch2) { }
+
+  virtual void Generate() {
+    Label false_result;
+
+    // Check that map is loaded as expected.
+    if (FLAG_debug_code) {
+      __ cmpq(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
+      __ Assert(equal, "Map not in expected register");
+    }
+
+    // Check for fast case object. Generate false result for slow case object.
+    __ movq(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
+    __ movq(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
+    __ CompareRoot(scratch1_, Heap::kHashTableMapRootIndex);
+    __ j(equal, &false_result);
+
+    // Look for valueOf symbol in the descriptor array, and indicate false if
+    // found. The type is not checked, so if it is a transition it is a false
+    // negative.
+    __ movq(map_result_,
+           FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
+    __ movq(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
+    // map_result_: descriptor array
+    // scratch1_: length of descriptor array
+    // Calculate the end of the descriptor array.
+    SmiIndex index = masm_->SmiToIndex(scratch2_, scratch1_, kPointerSizeLog2);
+    __ lea(scratch1_,
+           Operand(
+               map_result_, index.reg, index.scale, FixedArray::kHeaderSize));
+    // Calculate location of the first key name.
+    __ addq(map_result_,
+            Immediate(FixedArray::kHeaderSize +
+                      DescriptorArray::kFirstIndex * kPointerSize));
+    // Loop through all the keys in the descriptor array. If one of these is the
+    // symbol valueOf the result is false.
+    Label entry, loop;
+    __ jmp(&entry);
+    __ bind(&loop);
+    __ movq(scratch2_, FieldOperand(map_result_, 0));
+    __ Cmp(scratch2_, Factory::value_of_symbol());
+    __ j(equal, &false_result);
+    __ addq(map_result_, Immediate(kPointerSize));
+    __ bind(&entry);
+    __ cmpq(map_result_, scratch1_);
+    __ j(not_equal, &loop);
+
+    // Reload map as register map_result_ was used as temporary above.
+    __ movq(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
+
+    // If a valueOf property is not found on the object check that it's
+    // prototype is the un-modified String prototype. If not result is false.
+    __ movq(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
+    __ testq(scratch1_, Immediate(kSmiTagMask));
+    __ j(zero, &false_result);
+    __ movq(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
+    __ movq(scratch2_,
+            Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+    __ movq(scratch2_,
+            FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
+    __ cmpq(scratch1_,
+            CodeGenerator::ContextOperand(
+                scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
+    __ j(not_equal, &false_result);
+    // Set the bit in the map to indicate that it has been checked safe for
+    // default valueOf and set true result.
+    __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
+           Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
+    __ Set(map_result_, 1);
+    __ jmp(exit_label());
+    __ bind(&false_result);
+    // Set false result.
+    __ Set(map_result_, 0);
+  }
+
+ private:
+  Register object_;
+  Register map_result_;
+  Register scratch1_;
+  Register scratch2_;
+};
+
+
+void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
+    ZoneList<Expression*>* args) {
+  ASSERT(args->length() == 1);
+  Load(args->at(0));
+  Result obj = frame_->Pop();  // Pop the string wrapper.
+  obj.ToRegister();
+  ASSERT(obj.is_valid());
+  if (FLAG_debug_code) {
+    __ AbortIfSmi(obj.reg());
+  }
+
+  // Check whether this map has already been checked to be safe for default
+  // valueOf.
+  Result map_result = allocator()->Allocate();
+  ASSERT(map_result.is_valid());
+  __ movq(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
+  __ testb(FieldOperand(map_result.reg(), Map::kBitField2Offset),
+           Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
+  destination()->true_target()->Branch(not_zero);
+
+  // We need an additional two scratch registers for the deferred code.
+  Result temp1 = allocator()->Allocate();
+  ASSERT(temp1.is_valid());
+  Result temp2 = allocator()->Allocate();
+  ASSERT(temp2.is_valid());
+
+  DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
+      new DeferredIsStringWrapperSafeForDefaultValueOf(
+          obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
+  deferred->Branch(zero);
+  deferred->BindExit();
+  __ testq(map_result.reg(), map_result.reg());
+  obj.Unuse();
+  map_result.Unuse();
+  temp1.Unuse();
+  temp2.Unuse();
+  destination()->Split(not_equal);
+}
+
+
 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
   // This generates a fast version of:
   // (%_ClassOf(arg) === 'Function')
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index 2806f56..14f690e 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -347,6 +347,10 @@
   // expected arguments. Otherwise return -1.
   static int InlineRuntimeCallArgumentsCount(Handle<String> name);
 
+  static Operand ContextOperand(Register context, int index) {
+    return Operand(context, Context::SlotOffset(index));
+  }
+
  private:
   // Construction/Destruction
   explicit CodeGenerator(MacroAssembler* masm);
@@ -406,10 +410,6 @@
   void LoadReference(Reference* ref);
   void UnloadReference(Reference* ref);
 
-  static Operand ContextOperand(Register context, int index) {
-    return Operand(context, Context::SlotOffset(index));
-  }
-
   Operand SlotOperand(Slot* slot, Register tmp);
 
   Operand ContextSlotOperandCheckExtensions(Slot* slot,
@@ -611,6 +611,8 @@
   void GenerateIsSpecObject(ZoneList<Expression*>* args);
   void GenerateIsFunction(ZoneList<Expression*>* args);
   void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
+  void GenerateIsStringWrapperSafeForDefaultValueOf(
+      ZoneList<Expression*>* args);
 
   // Support for construct call checks.
   void GenerateIsConstructCall(ZoneList<Expression*>* args);
@@ -764,6 +766,18 @@
 };
 
 
+class ToBooleanStub: public CodeStub {
+ public:
+  ToBooleanStub() { }
+
+  void Generate(MacroAssembler* masm);
+
+ private:
+  Major MajorKey() { return ToBoolean; }
+  int MinorKey() { return 0; }
+};
+
+
 // Flag that indicates how to generate code for the stub GenericBinaryOpStub.
 enum GenericBinaryFlags {
   NO_GENERIC_BINARY_FLAGS = 0,
diff --git a/src/x64/debug-x64.cc b/src/x64/debug-x64.cc
index 2aa77e7..d5b7e77 100644
--- a/src/x64/debug-x64.cc
+++ b/src/x64/debug-x64.cc
@@ -202,23 +202,39 @@
 
 
 void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
-  masm->Abort("LiveEdit frame dropping is not supported on x64");
+  masm->ret(0);
 }
 
 
 void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
-  masm->Abort("LiveEdit frame dropping is not supported on x64");
+  ExternalReference restarter_frame_function_slot =
+      ExternalReference(Debug_Address::RestarterFrameFunctionPointer());
+  __ movq(rax, restarter_frame_function_slot);
+  __ movq(Operand(rax, 0), Immediate(0));
+
+  // We do not know our frame height, but set rsp based on rbp.
+  __ lea(rsp, Operand(rbp, -1 * kPointerSize));
+
+  __ pop(rdi);  // Function.
+  __ pop(rbp);
+
+  // Load context from the function.
+  __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+  // Get function code.
+  __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+  __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
+  __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
+
+  // Re-run JSFunction, rdi is function, rsi is context.
+  __ jmp(rdx);
 }
 
+const bool Debug::kFrameDropperSupported = true;
+
 #undef __
 
 
-Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
-                                       Handle<Code> code) {
-  UNREACHABLE();
-  return NULL;
-}
-const int Debug::kFrameDropperFrameSize = -1;
 
 
 void BreakLocationIterator::ClearDebugBreakAtReturn() {
diff --git a/src/x64/fast-codegen-x64.cc b/src/x64/fast-codegen-x64.cc
deleted file mode 100644
index 13eef03..0000000
--- a/src/x64/fast-codegen-x64.cc
+++ /dev/null
@@ -1,250 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#if defined(V8_TARGET_ARCH_X64)
-
-#include "codegen-inl.h"
-#include "fast-codegen.h"
-#include "scopes.h"
-
-namespace v8 {
-namespace internal {
-
-#define __ ACCESS_MASM(masm())
-
-Register FastCodeGenerator::accumulator0() { return rax; }
-Register FastCodeGenerator::accumulator1() { return rdx; }
-Register FastCodeGenerator::scratch0() { return rcx; }
-Register FastCodeGenerator::scratch1() { return rdi; }
-Register FastCodeGenerator::receiver_reg() { return rbx; }
-Register FastCodeGenerator::context_reg() { return rsi; }
-
-
-void FastCodeGenerator::EmitLoadReceiver() {
-  // Offset 2 is due to return address and saved frame pointer.
-  int index = 2 + scope()->num_parameters();
-  __ movq(receiver_reg(), Operand(rbp, index * kPointerSize));
-}
-
-
-void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
-  ASSERT(!destination().is(no_reg));
-  ASSERT(cell->IsJSGlobalPropertyCell());
-
-  __ Move(destination(), cell);
-  __ movq(destination(),
-          FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
-  if (FLAG_debug_code) {
-    __ Cmp(destination(), Factory::the_hole_value());
-    __ Check(not_equal, "DontDelete cells can't contain the hole");
-  }
-
-  // The loaded value is not known to be a smi.
-  clear_as_smi(destination());
-}
-
-
-void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
-  LookupResult lookup;
-  info()->receiver()->Lookup(*name, &lookup);
-
-  ASSERT(lookup.holder() == *info()->receiver());
-  ASSERT(lookup.type() == FIELD);
-  Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
-  int index = lookup.GetFieldIndex() - map->inobject_properties();
-  int offset = index * kPointerSize;
-
-  // We will emit the write barrier unless the stored value is statically
-  // known to be a smi.
-  bool needs_write_barrier = !is_smi(accumulator0());
-
-  // Perform the store.  Negative offsets are inobject properties.
-  if (offset < 0) {
-    offset += map->instance_size();
-    __ movq(FieldOperand(receiver_reg(), offset), accumulator0());
-    if (needs_write_barrier) {
-      // Preserve receiver from write barrier.
-      __ movq(scratch0(), receiver_reg());
-    }
-  } else {
-    offset += FixedArray::kHeaderSize;
-    __ movq(scratch0(),
-            FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
-    __ movq(FieldOperand(scratch0(), offset), accumulator0());
-  }
-
-  if (needs_write_barrier) {
-    if (destination().is(no_reg)) {
-      // After RecordWrite accumulator0 is only accidently a smi, but it is
-      // already marked as not known to be one.
-      __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
-    } else {
-      // Copy the value to the other accumulator to preserve a copy from the
-      // write barrier. One of the accumulators is available as a scratch
-      // register.  Neither is a smi.
-      __ movq(accumulator1(), accumulator0());
-      clear_as_smi(accumulator1());
-      Register value_scratch = other_accumulator(destination());
-      __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
-    }
-  } else if (destination().is(accumulator1())) {
-    __ movq(accumulator1(), accumulator0());
-    // Is a smi because we do not need the write barrier.
-    set_as_smi(accumulator1());
-  }
-}
-
-
-void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
-  ASSERT(!destination().is(no_reg));
-  LookupResult lookup;
-  info()->receiver()->Lookup(*name, &lookup);
-
-  ASSERT(lookup.holder() == *info()->receiver());
-  ASSERT(lookup.type() == FIELD);
-  Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
-  int index = lookup.GetFieldIndex() - map->inobject_properties();
-  int offset = index * kPointerSize;
-
-  // Perform the load.  Negative offsets are inobject properties.
-  if (offset < 0) {
-    offset += map->instance_size();
-    __ movq(destination(), FieldOperand(receiver_reg(), offset));
-  } else {
-    offset += FixedArray::kHeaderSize;
-    __ movq(scratch0(),
-            FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
-    __ movq(destination(), FieldOperand(scratch0(), offset));
-  }
-
-  // The loaded value is not known to be a smi.
-  clear_as_smi(destination());
-}
-
-
-void FastCodeGenerator::EmitBitOr() {
-  if (is_smi(accumulator0()) && is_smi(accumulator1())) {
-    // If both operands are known to be a smi then there is no need to check
-    // the operands or result.
-    if (destination().is(no_reg)) {
-      __ or_(accumulator1(), accumulator0());
-    } else {
-      // Leave the result in the destination register.  Bitwise or is
-      // commutative.
-      __ or_(destination(), other_accumulator(destination()));
-    }
-  } else {
-    // Left is in accumulator1, right in accumulator0.
-    if (destination().is(accumulator0())) {
-      __ movq(scratch0(), accumulator0());
-      __ or_(destination(), accumulator1());  // Or is commutative.
-      Label* bailout =
-          info()->AddBailout(accumulator1(), scratch0());  // Left, right.
-      __ JumpIfNotSmi(destination(), bailout);
-    } else if (destination().is(accumulator1())) {
-      __ movq(scratch0(), accumulator1());
-      __ or_(destination(), accumulator0());
-      Label* bailout = info()->AddBailout(scratch0(), accumulator0());
-      __ JumpIfNotSmi(destination(), bailout);
-    } else {
-      ASSERT(destination().is(no_reg));
-      __ movq(scratch0(), accumulator1());
-      __ or_(scratch0(), accumulator0());
-      Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
-      __ JumpIfNotSmi(scratch0(), bailout);
-    }
-  }
-
-  // If we didn't bailout, the result (in fact, both inputs too) is known to
-  // be a smi.
-  set_as_smi(accumulator0());
-  set_as_smi(accumulator1());
-}
-
-
-void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
-  ASSERT(info_ == NULL);
-  info_ = compilation_info;
-  Comment cmnt(masm_, "[ function compiled by fast code generator");
-
-  // Save the caller's frame pointer and set up our own.
-  Comment prologue_cmnt(masm(), ";; Prologue");
-  __ push(rbp);
-  __ movq(rbp, rsp);
-  __ push(rsi);  // Context.
-  __ push(rdi);  // Closure.
-  // Note that we keep a live register reference to esi (context) at this
-  // point.
-
-  Label* bailout_to_beginning = info()->AddBailout();
-  // Receiver (this) is allocated to a fixed register.
-  if (info()->has_this_properties()) {
-    Comment cmnt(masm(), ";; MapCheck(this)");
-    if (FLAG_print_ir) {
-      PrintF("MapCheck(this)\n");
-    }
-    ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
-    Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
-    Handle<Map> map(object->map());
-    EmitLoadReceiver();
-    __ CheckMap(receiver_reg(), map, bailout_to_beginning, false);
-  }
-
-  // If there is a global variable access check if the global object is the
-  // same as at lazy-compilation time.
-  if (info()->has_globals()) {
-    Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
-    if (FLAG_print_ir) {
-      PrintF("MapCheck(GLOBAL)\n");
-    }
-    ASSERT(info()->has_global_object());
-    Handle<Map> map(info()->global_object()->map());
-    __ movq(scratch0(), CodeGenerator::GlobalObject());
-    __ CheckMap(scratch0(), map, bailout_to_beginning, true);
-  }
-
-  VisitStatements(info()->function()->body());
-
-  Comment return_cmnt(masm(), ";; Return(<undefined>)");
-  if (FLAG_print_ir) {
-    PrintF("Return(<undefined>)\n");
-  }
-  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
-  __ movq(rsp, rbp);
-  __ pop(rbp);
-  __ ret((scope()->num_parameters() + 1) * kPointerSize);
-}
-
-
-#undef __
-
-
-} }  // namespace v8::internal
-
-#endif  // V8_TARGET_ARCH_X64
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 4d74735..725cbb0 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -54,100 +54,98 @@
 //
 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
 // frames-x64.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) {
+void FullCodeGenerator::Generate(CompilationInfo* info) {
   ASSERT(info_ == NULL);
   info_ = info;
   SetFunctionPosition(function());
   Comment cmnt(masm_, "[ function compiled by full code generator");
 
-  if (mode == PRIMARY) {
-    __ push(rbp);  // Caller's frame pointer.
-    __ movq(rbp, rsp);
-    __ push(rsi);  // Callee's context.
-    __ push(rdi);  // Callee's JS Function.
+  __ push(rbp);  // Caller's frame pointer.
+  __ movq(rbp, rsp);
+  __ push(rsi);  // Callee's context.
+  __ push(rdi);  // Callee's JS Function.
 
-    { Comment cmnt(masm_, "[ Allocate locals");
-      int locals_count = scope()->num_stack_slots();
-      if (locals_count == 1) {
-        __ PushRoot(Heap::kUndefinedValueRootIndex);
-      } else if (locals_count > 1) {
-        __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
-        for (int i = 0; i < locals_count; i++) {
-          __ push(rdx);
-        }
+  { Comment cmnt(masm_, "[ Allocate locals");
+    int locals_count = scope()->num_stack_slots();
+    if (locals_count == 1) {
+      __ PushRoot(Heap::kUndefinedValueRootIndex);
+    } else if (locals_count > 1) {
+      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
+      for (int i = 0; i < locals_count; i++) {
+        __ push(rdx);
       }
     }
+  }
 
-    bool function_in_register = true;
+  bool function_in_register = true;
 
-    // Possibly allocate a local context.
-    int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
-    if (heap_slots > 0) {
-      Comment cmnt(masm_, "[ Allocate local context");
-      // Argument to NewContext is the function, which is still in rdi.
-      __ push(rdi);
-      if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-        FastNewContextStub stub(heap_slots);
-        __ CallStub(&stub);
-      } else {
-        __ CallRuntime(Runtime::kNewContext, 1);
-      }
-      function_in_register = false;
-      // Context is returned in both rax and rsi.  It replaces the context
-      // passed to us.  It's saved in the stack and kept live in rsi.
-      __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
-
-      // Copy any necessary parameters into the context.
-      int num_parameters = scope()->num_parameters();
-      for (int i = 0; i < num_parameters; i++) {
-        Slot* slot = scope()->parameter(i)->slot();
-        if (slot != NULL && slot->type() == Slot::CONTEXT) {
-          int parameter_offset = StandardFrameConstants::kCallerSPOffset +
-                                     (num_parameters - 1 - i) * kPointerSize;
-          // Load parameter from stack.
-          __ movq(rax, Operand(rbp, parameter_offset));
-          // Store it in the context.
-          int context_offset = Context::SlotOffset(slot->index());
-          __ movq(Operand(rsi, context_offset), rax);
-          // Update the write barrier. This clobbers all involved
-          // registers, so we have use a third register to avoid
-          // clobbering rsi.
-          __ movq(rcx, rsi);
-          __ RecordWrite(rcx, context_offset, rax, rbx);
-        }
-      }
-    }
-
-    // Possibly allocate an arguments object.
-    Variable* arguments = scope()->arguments()->AsVariable();
-    if (arguments != NULL) {
-      // Arguments object must be allocated after the context object, in
-      // case the "arguments" or ".arguments" variables are in the context.
-      Comment cmnt(masm_, "[ Allocate arguments object");
-      if (function_in_register) {
-        __ push(rdi);
-      } else {
-        __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
-      }
-      // The receiver is just before the parameters on the caller's stack.
-      int offset = scope()->num_parameters() * kPointerSize;
-      __ lea(rdx,
-             Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
-      __ push(rdx);
-      __ Push(Smi::FromInt(scope()->num_parameters()));
-      // Arguments to ArgumentsAccessStub:
-      //   function, receiver address, parameter count.
-      // The stub will rewrite receiver and parameter count if the previous
-      // stack frame was an arguments adapter frame.
-      ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+  // Possibly allocate a local context.
+  int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+  if (heap_slots > 0) {
+    Comment cmnt(masm_, "[ Allocate local context");
+    // Argument to NewContext is the function, which is still in rdi.
+    __ push(rdi);
+    if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+      FastNewContextStub stub(heap_slots);
       __ CallStub(&stub);
-      // Store new arguments object in both "arguments" and ".arguments" slots.
-      __ movq(rcx, rax);
-      Move(arguments->slot(), rax, rbx, rdx);
-      Slot* dot_arguments_slot =
-          scope()->arguments_shadow()->AsVariable()->slot();
-      Move(dot_arguments_slot, rcx, rbx, rdx);
+    } else {
+      __ CallRuntime(Runtime::kNewContext, 1);
     }
+    function_in_register = false;
+    // Context is returned in both rax and rsi.  It replaces the context
+    // passed to us.  It's saved in the stack and kept live in rsi.
+    __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
+
+    // Copy any necessary parameters into the context.
+    int num_parameters = scope()->num_parameters();
+    for (int i = 0; i < num_parameters; i++) {
+      Slot* slot = scope()->parameter(i)->slot();
+      if (slot != NULL && slot->type() == Slot::CONTEXT) {
+        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
+            (num_parameters - 1 - i) * kPointerSize;
+        // Load parameter from stack.
+        __ movq(rax, Operand(rbp, parameter_offset));
+        // Store it in the context.
+        int context_offset = Context::SlotOffset(slot->index());
+        __ movq(Operand(rsi, context_offset), rax);
+        // Update the write barrier. This clobbers all involved
+        // registers, so we have use a third register to avoid
+        // clobbering rsi.
+        __ movq(rcx, rsi);
+        __ RecordWrite(rcx, context_offset, rax, rbx);
+      }
+    }
+  }
+
+  // Possibly allocate an arguments object.
+  Variable* arguments = scope()->arguments()->AsVariable();
+  if (arguments != NULL) {
+    // Arguments object must be allocated after the context object, in
+    // case the "arguments" or ".arguments" variables are in the context.
+    Comment cmnt(masm_, "[ Allocate arguments object");
+    if (function_in_register) {
+      __ push(rdi);
+    } else {
+      __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+    }
+    // The receiver is just before the parameters on the caller's stack.
+    int offset = scope()->num_parameters() * kPointerSize;
+    __ lea(rdx,
+           Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
+    __ push(rdx);
+    __ Push(Smi::FromInt(scope()->num_parameters()));
+    // Arguments to ArgumentsAccessStub:
+    //   function, receiver address, parameter count.
+    // The stub will rewrite receiver and parameter count if the previous
+    // stack frame was an arguments adapter frame.
+    ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+    __ CallStub(&stub);
+    // Store new arguments object in both "arguments" and ".arguments" slots.
+    __ movq(rcx, rax);
+    Move(arguments->slot(), rax, rbx, rdx);
+    Slot* dot_arguments_slot =
+        scope()->arguments_shadow()->AsVariable()->slot();
+    Move(dot_arguments_slot, rcx, rbx, rdx);
   }
 
   { Comment cmnt(masm_, "[ Declarations");
@@ -1053,7 +1051,7 @@
   __ push(rcx);  // Enumerable.
   __ push(rbx);  // Current entry.
   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
-  __ CompareRoot(rax, Heap::kNullValueRootIndex);
+  __ SmiCompare(rax, Smi::FromInt(0));
   __ j(equal, loop_statement.continue_target());
   __ movq(rbx, rax);
 
@@ -2059,6 +2057,25 @@
 }
 
 
+void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
+    ZoneList<Expression*>* args) {
+  ASSERT(args->length() == 1);
+
+  VisitForValue(args->at(0), kAccumulator);
+
+  Label materialize_true, materialize_false;
+  Label* if_true = NULL;
+  Label* if_false = NULL;
+  PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
+
+  // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
+  // used in a few functions in runtime.js which should not normally be hit by
+  // this compiler.
+  __ jmp(if_false);
+  Apply(context_, if_true, if_false);
+}
+
+
 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
   ASSERT(args->length() == 1);
 
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index e744d53..c1954a8 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -782,8 +782,8 @@
   if (src->value() == 0) {
     testq(dst, dst);
   } else {
-    Move(kScratchRegister, src);
-    cmpq(dst, kScratchRegister);
+    Register constant_reg = GetSmiConstant(src);
+    cmpq(dst, constant_reg);
   }
 }
 
@@ -1977,10 +1977,17 @@
 }
 
 
+void MacroAssembler::AbortIfSmi(Register object) {
+  Label ok;
+  Condition is_smi = CheckSmi(object);
+  Assert(NegateCondition(is_smi), "Operand is a smi");
+}
+
+
 void MacroAssembler::AbortIfNotSmi(Register object) {
   Label ok;
   Condition is_smi = CheckSmi(object);
-  Assert(is_smi, "Operand not a smi");
+  Assert(is_smi, "Operand is not a smi");
 }
 
 
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index a294ad6..7083224 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -582,6 +582,9 @@
   // Abort execution if argument is not a number. Used in debug code.
   void AbortIfNotNumber(Register object);
 
+  // Abort execution if argument is a smi. Used in debug code.
+  void AbortIfSmi(Register object);
+
   // Abort execution if argument is not a smi. Used in debug code.
   void AbortIfNotSmi(Register object);
 
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 4c15715..7aaeab7 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -2039,30 +2039,6 @@
 }
 
 
-// TODO(1241006): Avoid having lazy compile stubs specialized by the
-// number of arguments. It is not needed anymore.
-Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
-  // Enter an internal frame.
-  __ EnterInternalFrame();
-
-  // Push a copy of the function onto the stack.
-  __ push(rdi);
-
-  __ push(rdi);  // function is also the parameter to the runtime call
-  __ CallRuntime(Runtime::kLazyCompile, 1);
-  __ pop(rdi);
-
-  // Tear down temporary frame.
-  __ LeaveInternalFrame();
-
-  // Do a tail-call of the compiled function.
-  __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
-  __ jmp(rcx);
-
-  return GetCodeWithFlags(flags, "LazyCompileStub");
-}
-
-
 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
                                            JSObject* interceptor_holder,
                                            LookupResult* lookup,
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 47a55e6..8bfa51c 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -27,8 +27,6 @@
 
 #include <limits.h>
 
-#define USE_NEW_QUERY_CALLBACKS
-
 #include "v8.h"
 
 #include "api.h"
@@ -83,12 +81,23 @@
 }
 
 
+static void ExpectFalse(const char* code) {
+  ExpectBoolean(code, false);
+}
+
+
 static void ExpectObject(const char* code, Local<Value> expected) {
   Local<Value> result = CompileRun(code);
   CHECK(result->Equals(expected));
 }
 
 
+static void ExpectUndefined(const char* code) {
+  Local<Value> result = CompileRun(code);
+  CHECK(result->IsUndefined());
+}
+
+
 static int signature_callback_count;
 static v8::Handle<Value> IncrementingSignatureCallback(
     const v8::Arguments& args) {
@@ -11189,3 +11198,89 @@
   reresult = CompileRun("str2.charCodeAt(2);");
   CHECK_EQ(static_cast<int32_t>('e'), reresult->Int32Value());
 }
+
+
+// Failed access check callback that performs a GC on each invocation.
+void FailedAccessCheckCallbackGC(Local<v8::Object> target,
+                                 v8::AccessType type,
+                                 Local<v8::Value> data) {
+  i::Heap::CollectAllGarbage(true);
+}
+
+
+TEST(GCInFailedAccessCheckCallback) {
+  // Install a failed access check callback that performs a GC on each
+  // invocation. Then force the callback to be called from va
+
+  v8::V8::Initialize();
+  v8::V8::SetFailedAccessCheckCallbackFunction(&FailedAccessCheckCallbackGC);
+
+  v8::HandleScope scope;
+
+  // Create an ObjectTemplate for global objects and install access
+  // check callbacks that will block access.
+  v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
+  global_template->SetAccessCheckCallbacks(NamedGetAccessBlocker,
+                                           IndexedGetAccessBlocker,
+                                           v8::Handle<v8::Value>(),
+                                           false);
+
+  // Create a context and set an x property on it's global object.
+  LocalContext context0(NULL, global_template);
+  context0->Global()->Set(v8_str("x"), v8_num(42));
+  v8::Handle<v8::Object> global0 = context0->Global();
+
+  // Create a context with a different security token so that the
+  // failed access check callback will be called on each access.
+  LocalContext context1(NULL, global_template);
+  context1->Global()->Set(v8_str("other"), global0);
+
+  // Get property with failed access check.
+  ExpectUndefined("other.x");
+
+  // Get element with failed access check.
+  ExpectUndefined("other[0]");
+
+  // Set property with failed access check.
+  v8::Handle<v8::Value> result = CompileRun("other.x = new Object()");
+  CHECK(result->IsObject());
+
+  // Set element with failed access check.
+  result = CompileRun("other[0] = new Object()");
+  CHECK(result->IsObject());
+
+  // Get property attribute with failed access check.
+  ExpectFalse("\'x\' in other");
+
+  // Get property attribute for element with failed access check.
+  ExpectFalse("0 in other");
+
+  // Delete property.
+  ExpectFalse("delete other.x");
+
+  // Delete element.
+  CHECK_EQ(false, global0->Delete(0));
+
+  // DefineAccessor.
+  CHECK_EQ(false,
+           global0->SetAccessor(v8_str("x"), GetXValue, NULL, v8_str("x")));
+
+  // Define JavaScript accessor.
+  ExpectUndefined("Object.prototype.__defineGetter__.call("
+                  "    other, \'x\', function() { return 42; })");
+
+  // LookupAccessor.
+  ExpectUndefined("Object.prototype.__lookupGetter__.call("
+                  "    other, \'x\')");
+
+  // HasLocalElement.
+  ExpectFalse("Object.prototype.hasOwnProperty.call(other, \'0\')");
+
+  CHECK_EQ(false, global0->HasRealIndexedProperty(0));
+  CHECK_EQ(false, global0->HasRealNamedProperty(v8_str("x")));
+  CHECK_EQ(false, global0->HasRealNamedCallbackProperty(v8_str("x")));
+
+  // Reset the failed access check callback so it does not influence
+  // the other tests.
+  v8::V8::SetFailedAccessCheckCallbackFunction(NULL);
+}
diff --git a/test/cctest/test-disasm-ia32.cc b/test/cctest/test-disasm-ia32.cc
index 40fadd8..25d2ec0 100644
--- a/test/cctest/test-disasm-ia32.cc
+++ b/test/cctest/test-disasm-ia32.cc
@@ -194,6 +194,8 @@
 
   __ rcl(edx, 1);
   __ rcl(edx, 7);
+  __ rcr(edx, 1);
+  __ rcr(edx, 7);
   __ sar(edx, 1);
   __ sar(edx, 6);
   __ sar_cl(edx);
diff --git a/test/mjsunit/bitops-info.js b/test/mjsunit/bitops-info.js
new file mode 100644
index 0000000..4660fdf
--- /dev/null
+++ b/test/mjsunit/bitops-info.js
@@ -0,0 +1,77 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function non_int32() {
+  return 2600822924;  // It's not a signed Int32.
+}
+
+function hidden_smi() {
+  return 46512102;  // It's a Smi
+}
+
+function hidden_int32() {
+  return 1600822924;  // It's a signed Int32.
+}
+
+
+function f() {
+  var x = non_int32();  // Not a constant.
+  var y = hidden_smi();  // Not a constant.
+  var z = hidden_int32();
+  assertEquals(46512102 & 2600822924, 46512102 & x, "1");
+  assertEquals(1600822924 & 2600822924, 1600822924 & x, "2");
+  assertEquals(2600822924 & 2600822924, 2600822924 & x, "3");
+  assertEquals(46512102 & 46512102, 46512102 & y, "4");
+  assertEquals(1600822924 & 46512102, 1600822924 & y, "5");
+  assertEquals(2600822924 & 46512102, 2600822924 & y, "6");
+  assertEquals(46512102 & 1600822924, 46512102 & z, "7");
+  assertEquals(1600822924 & 1600822924, 1600822924 & z, "8");
+  assertEquals(2600822924 & 1600822924, 2600822924 & z, "9");
+  assertEquals(46512102 & 2600822924, y & x, "10");
+  assertEquals(1600822924 & 2600822924, z & x, "11");
+
+  assertEquals(46512102 & 2600822924, x & 46512102, "1rev");
+  assertEquals(1600822924 & 2600822924, x & 1600822924, "2rev");
+  assertEquals(2600822924 & 2600822924, x & 2600822924, "3rev");
+  assertEquals(46512102 & 46512102, y & 46512102, "4rev");
+  assertEquals(1600822924 & 46512102, y & 1600822924, "5rev");
+  assertEquals(2600822924 & 46512102, y & 2600822924, "6rev");
+  assertEquals(46512102 & 1600822924, z & 46512102, "7rev");
+  assertEquals(1600822924 & 1600822924, z & 1600822924, "8rev");
+  assertEquals(2600822924 & 1600822924, z & 2600822924, "9rev");
+  assertEquals(46512102 & 2600822924, x & y, "10rev");
+  assertEquals(1600822924 & 2600822924, x & z, "11rev");
+
+  assertEquals(2600822924 & 2600822924, x & x, "xx");
+  assertEquals(y, y & y, "yy");
+  assertEquals(z, z & z, "zz");
+}
+
+
+for (var i = 0; i < 5; i++) {
+  f();
+}
diff --git a/test/mjsunit/for-in-delete.js b/test/mjsunit/for-in-delete.js
new file mode 100644
index 0000000..e9fc060
--- /dev/null
+++ b/test/mjsunit/for-in-delete.js
@@ -0,0 +1,50 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that properties deleted during a for-in iteration do not show up in
+// the for-in.
+
+function f(o, expected, del) {
+  var index = 0;
+  for (p in o) {
+    if (del) delete o[del];
+    assertEquals(expected[index], p);
+    index++;
+  }
+  assertEquals(expected.length, index);
+}
+
+var o = {}
+o.a = 1;
+o.b = 2;
+o.c = 3;
+o.d = 3;
+
+f(o, ['a', 'b', 'c', 'd']);
+f(o, ['a', 'b', 'c', 'd']);
+f(o, ['a', 'c', 'd'], 'b');
+f(o, ['a', 'c'], 'd');
diff --git a/test/mjsunit/fuzz-natives.js b/test/mjsunit/fuzz-natives.js
index 66841bb..11ac2e0 100644
--- a/test/mjsunit/fuzz-natives.js
+++ b/test/mjsunit/fuzz-natives.js
@@ -174,6 +174,9 @@
   // This function performs some checks compile time (it requires its first
   // argument to be a compile time smi).
   "_GetFromCache": true,
+
+  // This function expects its first argument to be a non-smi.
+  "_IsStringWrapperSafeForDefaultValueOf" : true
 };
 
 var currentlyUncallable = {
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index ceb5e62..3c8cbdb 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -72,8 +72,4 @@
 # Skip all tests on MIPS.
 *: SKIP
 
-[ $arch == x64 ]
-# Stack manipulations in LiveEdit is implemented for ia32 only.
-debug-liveedit-check-stack: SKIP
-
 
diff --git a/test/mjsunit/regress/regress-760-1.js b/test/mjsunit/regress/regress-760-1.js
new file mode 100644
index 0000000..2e0cee5
--- /dev/null
+++ b/test/mjsunit/regress/regress-760-1.js
@@ -0,0 +1,49 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Check that when valueOf for a String object is overwritten it is called and
+// the result used when that object is added with a string.  
+
+// See: http://code.google.com/p/v8/issues/detail?id=760
+
+String.prototype.valueOf = function() { return 'y' };
+
+function test() {
+  var o = Object('x');
+  assertEquals('y', o + '');
+  assertEquals('y', '' + o);
+}
+
+for (var i = 0; i < 10; i++) {
+  var o = Object('x');
+  assertEquals('y', o + '');
+  assertEquals('y', '' + o);
+}
+
+for (var i = 0; i < 10; i++) {
+  test()
+}
diff --git a/test/mjsunit/regress/regress-760-2.js b/test/mjsunit/regress/regress-760-2.js
new file mode 100644
index 0000000..1b1cbfe
--- /dev/null
+++ b/test/mjsunit/regress/regress-760-2.js
@@ -0,0 +1,49 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Check that when valueOf for a String object is overwritten it is called and
+// the result used when that object is added with a string.  
+
+// See: http://code.google.com/p/v8/issues/detail?id=760
+
+function test() {
+  var o = Object('x');
+  o.valueOf = function() { return 'y' };
+  assertEquals('y', o + '');
+  assertEquals('y', '' + o);
+}
+
+for (var i = 0; i < 10; i++) {
+  var o = Object('x');
+  o.valueOf = function() { return 'y' };
+  assertEquals('y', o + '');
+  assertEquals('y', '' + o);
+}
+
+for (var i = 0; i < 10; i++) {
+  test()
+}
diff --git a/test/mjsunit/regress/regress-798.js b/test/mjsunit/regress/regress-798.js
new file mode 100644
index 0000000..423c883
--- /dev/null
+++ b/test/mjsunit/regress/regress-798.js
@@ -0,0 +1,109 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x = {};
+
+// Add property a with getter/setter.
+x.__defineGetter__("a", function() {
+  try {
+    y.x = 40;
+  } catch (e) {
+    assertEquals(3, e.stack.split('\n').length); 
+  }
+  return 40;
+});
+
+x.__defineSetter__("a", function(val) {
+  try {
+    y.x = 40;
+  } catch(e) {
+    assertEquals(3, e.stack.split('\n').length); 
+  }
+});
+
+// Add property b with getter/setter.
+function getB() {
+  try {
+    y.x = 30;
+  } catch (e) {
+    assertEquals(3, e.stack.split('\n').length); 
+  }
+  return 30;
+}
+
+function setB(val) {
+  try {
+    y.x = 30;
+  } catch(e) {
+    assertEquals(3, e.stack.split('\n').length); 
+  }
+}
+
+x.__defineGetter__("b", getB);
+x.__defineSetter__("b", setB);
+
+// Add property c with getter/setter.
+var descriptor  = {
+  get: function() {
+    try {
+      y.x = 40;
+    } catch (e) {
+      assertEquals(3, e.stack.split('\n').length); 
+    }
+    return 40;
+  },
+  set: function(val) {
+    try {
+      y.x = 40;
+    } catch(e) {
+      assertEquals(3, e.stack.split('\n').length); 
+    }
+  }
+}
+
+Object.defineProperty(x, 'c', descriptor)
+
+// Check that the stack for an exception in a getter and setter produce the
+// expected stack height.   
+x.a;
+x.b;
+x.c;
+x.a = 1;
+x.b = 1;
+x.c = 1;
+
+// Do the same with the getters/setters on the a prototype object.
+xx = {}
+xx.__proto__ = x
+
+xx.a;
+xx.b;
+xx.c;
+xx.a = 1;
+xx.b = 1;
+xx.c = 1;
+
diff --git a/test/mjsunit/regress/regress-815.js b/test/mjsunit/regress/regress-815.js
new file mode 100644
index 0000000..803c0fb
--- /dev/null
+++ b/test/mjsunit/regress/regress-815.js
@@ -0,0 +1,49 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// 815 describes a situation in which the ARM code generator could
+// end up in a spilled scope in code that only worked in a register
+// allocated scope.  Test that this no longer happens.
+//
+// The code generated for unary + assumes that we are not in a spilled
+// scope.
+
+var o = new Object();
+
+// The code for the iterated-over object in for-in used to be emitted
+// in a spilled scope:
+for (x in +o) { }
+
+// Emitting code for the left hand side of a for-in.
+for (a[+o] in o) {}
+
+// The receiver in an obj[index](1, 2, 3) call:
+try {
+  o[+o](1,2,3)
+} catch(e) {
+  // It's OK as long as it does not hit an assert.
+}
diff --git a/tools/gc-nvp-trace-processor.py b/tools/gc-nvp-trace-processor.py
index 44aa0a2..f1f9dc0 100755
--- a/tools/gc-nvp-trace-processor.py
+++ b/tools/gc-nvp-trace-processor.py
@@ -38,7 +38,7 @@
 
 
 from __future__ import with_statement
-import sys, types, re, subprocess
+import sys, types, re, subprocess, math
 
 def flatten(l):
   flat = []
@@ -262,48 +262,57 @@
   ],
 ]
 
+def freduce(f, field, trace, init):
+  return reduce(lambda t,r: f(t, r[field]), trace, init)
+
 def calc_total(trace, field):
-  return reduce(lambda t,r: t + r[field], trace, 0)
+  return freduce(lambda t,v: t + v, field, trace, 0)
 
 def calc_max(trace, field):
-  return reduce(lambda t,r: max(t, r[field]), trace, 0)
+  return freduce(lambda t,r: max(t, r), field, trace, 0)
+
+def count_nonzero(trace, field):
+  return freduce(lambda t,r: t if r == 0 else t + 1, field, trace, 0)
+
 
 def process_trace(filename):
   trace = parse_gc_trace(filename)
-  total_gc = calc_total(trace, 'pause')
-  max_gc = calc_max(trace, 'pause')
-  avg_gc = total_gc / len(trace)
 
-  total_sweep = calc_total(trace, 'sweep')
-  max_sweep = calc_max(trace, 'sweep')
-
-  total_mark = calc_total(trace, 'mark')
-  max_mark = calc_max(trace, 'mark')
-
+  marksweeps = filter(lambda r: r['gc'] == 'ms', trace)
+  markcompacts = filter(lambda r: r['gc'] == 'mc', trace)
   scavenges = filter(lambda r: r['gc'] == 's', trace)
-  total_scavenge = calc_total(scavenges, 'pause')
-  max_scavenge = calc_max(scavenges, 'pause')
-  avg_scavenge = total_scavenge / len(scavenges)
 
   charts = plot_all(plots, trace, filename)
 
+  def stats(out, prefix, trace, field):
+    n = len(trace)
+    total = calc_total(trace, field)
+    max = calc_max(trace, field)
+    avg = total / n
+    if n > 1:
+      dev = math.sqrt(freduce(lambda t,r: (r - avg) ** 2, field, trace, 0) /
+                      (n - 1))
+    else:
+      dev = 0
+
+    out.write('<tr><td>%s</td><td>%d</td><td>%d</td>'
+              '<td>%d</td><td>%d [dev %f]</td></tr>' %
+              (prefix, n, total, max, avg, dev))
+
+
   with open(filename + '.html', 'w') as out:
     out.write('<html><body>')
-    out.write('<table><tr><td>')
-    out.write('Total in GC: <b>%d</b><br/>' % total_gc)
-    out.write('Max in GC: <b>%d</b><br/>' % max_gc)
-    out.write('Avg in GC: <b>%d</b><br/>' % avg_gc)
-    out.write('</td><td>')
-    out.write('Total in Scavenge: <b>%d</b><br/>' % total_scavenge)
-    out.write('Max in Scavenge: <b>%d</b><br/>' % max_scavenge)
-    out.write('Avg in Scavenge: <b>%d</b><br/>' % avg_scavenge)
-    out.write('</td><td>')
-    out.write('Total in Sweep: <b>%d</b><br/>' % total_sweep)
-    out.write('Max in Sweep: <b>%d</b><br/>' % max_sweep)
-    out.write('</td><td>')
-    out.write('Total in Mark: <b>%d</b><br/>' % total_mark)
-    out.write('Max in Mark: <b>%d</b><br/>' % max_mark)
-    out.write('</td></tr></table>')
+    out.write('<table>')
+    out.write('<tr><td>Phase</td><td>Count</td><td>Time (ms)</td><td>Max</td><td>Avg</td></tr>')
+    stats(out, 'Total in GC', trace, 'pause')
+    stats(out, 'Scavenge', scavenges, 'pause')
+    stats(out, 'MarkSweep', marksweeps, 'pause')
+    stats(out, 'MarkCompact', markcompacts, 'pause')
+    stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark')
+    stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep')
+    stats(out, 'Flush Code', filter(lambda r: r['flushcode'] != 0, trace), 'flushcode')
+    stats(out, 'Compact', filter(lambda r: r['compact'] != 0, trace), 'compact')
+    out.write('</table>')
     for chart in charts:
       out.write('<img src="%s">' % chart)
       out.write('</body></html>')
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index dbd94bf..47f9502 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -336,7 +336,6 @@
         '../../src/execution.h',
         '../../src/factory.cc',
         '../../src/factory.h',
-        '../../src/fast-codegen.h',
         '../../src/fast-dtoa.cc',
         '../../src/fast-dtoa.h',
         '../../src/flag-definitions.h',
@@ -396,6 +395,8 @@
         '../../src/natives.h',
         '../../src/objects-debug.cc',
         '../../src/objects-inl.h',
+        '../../src/objects-visiting.cc',
+        '../../src/objects-visiting.h',
         '../../src/objects.cc',
         '../../src/objects.h',
         '../../src/oprofile-agent.h',
@@ -485,7 +486,6 @@
             '../../src/arm',
           ],
           'sources': [
-            '../../src/fast-codegen.cc',
             '../../src/jump-target-light.h',
             '../../src/jump-target-light-inl.h',
             '../../src/jump-target-light.cc',
@@ -502,7 +502,6 @@
             '../../src/arm/cpu-arm.cc',
             '../../src/arm/debug-arm.cc',
             '../../src/arm/disasm-arm.cc',
-            '../../src/arm/fast-codegen-arm.cc',
             '../../src/arm/frames-arm.cc',
             '../../src/arm/frames-arm.h',
             '../../src/arm/full-codegen-arm.cc',
@@ -547,8 +546,6 @@
             '../../src/ia32/cpu-ia32.cc',
             '../../src/ia32/debug-ia32.cc',
             '../../src/ia32/disasm-ia32.cc',
-            '../../src/ia32/fast-codegen-ia32.cc',
-            '../../src/ia32/fast-codegen-ia32.h',
             '../../src/ia32/frames-ia32.cc',
             '../../src/ia32/frames-ia32.h',
             '../../src/ia32/full-codegen-ia32.cc',
@@ -569,7 +566,6 @@
             '../../src/x64',
           ],
           'sources': [
-            '../../src/fast-codegen.cc',
             '../../src/jump-target-heavy.h',
             '../../src/jump-target-heavy-inl.h',
             '../../src/jump-target-heavy.cc',
@@ -584,7 +580,6 @@
             '../../src/x64/cpu-x64.cc',
             '../../src/x64/debug-x64.cc',
             '../../src/x64/disasm-x64.cc',
-            '../../src/x64/fast-codegen-x64.cc',
             '../../src/x64/frames-x64.cc',
             '../../src/x64/frames-x64.h',
             '../../src/x64/full-codegen-x64.cc',
diff --git a/tools/oom_dump/README b/tools/oom_dump/README
new file mode 100644
index 0000000..5adbf65
--- /dev/null
+++ b/tools/oom_dump/README
@@ -0,0 +1,30 @@
+oom_dump extracts useful information from Google Chrome OOM  minidumps.
+
+To build one needs a google-breakpad checkout
+(http://code.google.com/p/google-breakpad/).
+
+First, one needs to build and install breakpad itself. For instructions
+check google-breakpad, but currently it's as easy as:
+
+  ./configure
+  make
+  sudo make install
+
+(the catch: breakpad installs .so into /usr/local/lib, so you might
+need some additional tweaking to make it discoverable, for example,
+put a soft link into /usr/lib directory).
+
+Next step is to build v8.  Note: you should build x64 version of v8,
+if you're on 64-bit platform, otherwise you would get link error when
+building oom_dump.
+
+The last step is to build oom_dump itself.  The following command should work:
+
+  cd <v8 working copy>/tools/oom_dump
+  scons BREAKPAD_DIR=<path to google-breakpad working copy>
+
+(Additionally you can control v8 working copy dir, but default---../..---
+should work just fine).
+
+If everything goes fine, oom_dump <path to minidump> should print
+some useful information about OOM crash.
diff --git a/tools/oom_dump/SConstruct b/tools/oom_dump/SConstruct
new file mode 100644
index 0000000..f228c89
--- /dev/null
+++ b/tools/oom_dump/SConstruct
@@ -0,0 +1,42 @@
+# Copyright 2010 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#     * Neither the name of Google Inc. nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+vars = Variables('custom.py')
+vars.Add(PathVariable('BREAKPAD_DIR',
+                      'Path to checkout of google-breakpad project',
+                      '~/google-breakpad',
+                      PathVariable.PathIsDir))
+vars.Add(PathVariable('V8_DIR',
+                      'Path to checkout of v8 project',
+                      '../..',
+                      PathVariable.PathIsDir))
+
+env = Environment(variables = vars,
+                  CPPPATH = ['${BREAKPAD_DIR}/src', '${V8_DIR}/src'],
+                  LIBPATH = ['/usr/local/lib', '${V8_DIR}'])
+
+env.Program('oom_dump.cc', LIBS = ['breakpad', 'v8', 'pthread'])
diff --git a/tools/oom_dump/oom_dump.cc b/tools/oom_dump/oom_dump.cc
new file mode 100644
index 0000000..01f6005
--- /dev/null
+++ b/tools/oom_dump/oom_dump.cc
@@ -0,0 +1,285 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <algorithm>
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <google_breakpad/processor/minidump.h>
+#include <processor/logging.h>
+
+#define ENABLE_DEBUGGER_SUPPORT
+
+#include <v8.h>
+
+namespace {
+
+using google_breakpad::Minidump;
+using google_breakpad::MinidumpContext;
+using google_breakpad::MinidumpThread;
+using google_breakpad::MinidumpThreadList;
+using google_breakpad::MinidumpException;
+using google_breakpad::MinidumpMemoryRegion;
+
+const char* InstanceTypeToString(int type) {
+  static char const* names[v8::internal::LAST_TYPE] = {0};
+  if (names[v8::internal::STRING_TYPE] == NULL) {
+    using namespace v8::internal;
+#define SET(type) names[type] = #type;
+    INSTANCE_TYPE_LIST(SET)
+#undef SET
+  }
+  return names[type];
+}
+
+
+u_int32_t ReadPointedValue(MinidumpMemoryRegion* region,
+                           u_int64_t base,
+                           int offset) {
+  u_int32_t ptr = 0;
+  CHECK(region->GetMemoryAtAddress(base + 4 * offset, &ptr));
+  u_int32_t value = 0;
+  CHECK(region->GetMemoryAtAddress(ptr, &value));
+  return value;
+}
+
+
+void ReadArray(MinidumpMemoryRegion* region,
+               u_int64_t array_ptr,
+               int size,
+               int* output) {
+  for (int i = 0; i < size; i++) {
+    u_int32_t value;
+    CHECK(region->GetMemoryAtAddress(array_ptr + 4 * i, &value));
+    output[i] = value;
+  }
+}
+
+
+u_int32_t ReadArrayFrom(MinidumpMemoryRegion* region,
+                        u_int64_t base,
+                        int offset,
+                        int size,
+                        int* output) {
+  u_int32_t ptr = 0;
+  CHECK(region->GetMemoryAtAddress(base + 4 * offset, &ptr));
+  ReadArray(region, ptr, size, output);
+}
+
+
+double toM(int size) {
+  return size / (1024. * 1024.);
+}
+
+
+class IndirectSorter {
+ public:
+  explicit IndirectSorter(int* a) : a_(a) { }
+
+  bool operator() (int i0, int i1) {
+    return a_[i0] > a_[i1];
+  }
+
+ private:
+  int* a_;
+};
+
+void DumpHeapStats(const char *minidump_file) {
+  Minidump minidump(minidump_file);
+  CHECK(minidump.Read());
+
+  MinidumpException *exception = minidump.GetException();
+  CHECK(exception);
+
+  MinidumpContext* crash_context = exception->GetContext();
+  CHECK(crash_context);
+
+  u_int32_t exception_thread_id = 0;
+  CHECK(exception->GetThreadID(&exception_thread_id));
+
+  MinidumpThreadList* thread_list = minidump.GetThreadList();
+  CHECK(thread_list);
+
+  MinidumpThread* exception_thread =
+      thread_list->GetThreadByID(exception_thread_id);
+  CHECK(exception_thread);
+
+  const MDRawContextX86* contextX86 = crash_context->GetContextX86();
+  CHECK(contextX86);
+
+  const u_int32_t esp = contextX86->esp;
+
+  MinidumpMemoryRegion* memory_region = exception_thread->GetMemory();
+  CHECK(memory_region);
+
+  const u_int64_t last = memory_region->GetBase() + memory_region->GetSize();
+
+  u_int64_t heap_stats_addr = 0;
+  for (u_int64_t addr = esp; addr < last; addr += 4) {
+    u_int32_t value = 0;
+    CHECK(memory_region->GetMemoryAtAddress(addr, &value));
+    if (value >= esp && value < last) {
+      u_int32_t value2 = 0;
+      CHECK(memory_region->GetMemoryAtAddress(value, &value2));
+      if (value2 == 0xdecade00) {
+        heap_stats_addr = addr;
+        break;
+      }
+    }
+  }
+  CHECK(heap_stats_addr);
+
+  // Read heap stats.
+
+#define READ_FIELD(offset) \
+  ReadPointedValue(memory_region, heap_stats_addr, offset)
+
+  CHECK(READ_FIELD(0) == 0xdecade00);
+  CHECK(READ_FIELD(23) == 0xdecade01);
+
+  const int new_space_size = READ_FIELD(1);
+  const int new_space_capacity = READ_FIELD(2);
+  const int old_pointer_space_size = READ_FIELD(3);
+  const int old_pointer_space_capacity = READ_FIELD(4);
+  const int old_data_space_size = READ_FIELD(5);
+  const int old_data_space_capacity = READ_FIELD(6);
+  const int code_space_size = READ_FIELD(7);
+  const int code_space_capacity = READ_FIELD(8);
+  const int map_space_size = READ_FIELD(9);
+  const int map_space_capacity = READ_FIELD(10);
+  const int cell_space_size = READ_FIELD(11);
+  const int cell_space_capacity = READ_FIELD(12);
+  const int lo_space_size = READ_FIELD(13);
+  const int global_handle_count = READ_FIELD(14);
+  const int weak_global_handle_count = READ_FIELD(15);
+  const int pending_global_handle_count = READ_FIELD(16);
+  const int near_death_global_handle_count = READ_FIELD(17);
+  const int destroyed_global_handle_count = READ_FIELD(18);
+  const int memory_allocator_size = READ_FIELD(19);
+  const int memory_allocator_capacity = READ_FIELD(20);
+#undef READ_FIELD
+
+  int objects_per_type[v8::internal::LAST_TYPE + 1] = {0};
+  ReadArrayFrom(memory_region, heap_stats_addr, 21,
+                v8::internal::LAST_TYPE + 1, objects_per_type);
+
+  int size_per_type[v8::internal::LAST_TYPE + 1] = {0};
+  ReadArrayFrom(memory_region, heap_stats_addr, 22, v8::internal::LAST_TYPE + 1,
+                size_per_type);
+
+  int js_global_objects =
+      objects_per_type[v8::internal::JS_GLOBAL_OBJECT_TYPE];
+  int js_builtins_objects =
+      objects_per_type[v8::internal::JS_BUILTINS_OBJECT_TYPE];
+  int js_global_proxies =
+      objects_per_type[v8::internal::JS_GLOBAL_PROXY_TYPE];
+
+  int indices[v8::internal::LAST_TYPE + 1];
+  for (int i = 0; i <= v8::internal::LAST_TYPE; i++) {
+    indices[i] = i;
+  }
+
+  std::stable_sort(indices, indices + sizeof(indices)/sizeof(indices[0]),
+                  IndirectSorter(size_per_type));
+
+  int total_size = 0;
+  for (int i = 0; i <= v8::internal::LAST_TYPE; i++) {
+    total_size += size_per_type[i];
+  }
+
+  // Print heap stats.
+
+  printf("exception thread ID: %d (%x)\n",
+         exception_thread_id, exception_thread_id);
+  printf("heap stats address: %p\n", (void*)heap_stats_addr);
+#define PRINT_INT_STAT(stat) \
+    printf("\t%-25s\t% 10d\n", #stat ":", stat);
+#define PRINT_MB_STAT(stat) \
+    printf("\t%-25s\t% 10.3f MB\n", #stat ":", toM(stat));
+  PRINT_MB_STAT(new_space_size);
+  PRINT_MB_STAT(new_space_capacity);
+  PRINT_MB_STAT(old_pointer_space_size);
+  PRINT_MB_STAT(old_pointer_space_capacity);
+  PRINT_MB_STAT(old_data_space_size);
+  PRINT_MB_STAT(old_data_space_capacity);
+  PRINT_MB_STAT(code_space_size);
+  PRINT_MB_STAT(code_space_capacity);
+  PRINT_MB_STAT(map_space_size);
+  PRINT_MB_STAT(map_space_capacity);
+  PRINT_MB_STAT(cell_space_size);
+  PRINT_MB_STAT(cell_space_capacity);
+  PRINT_MB_STAT(lo_space_size);
+  PRINT_INT_STAT(global_handle_count);
+  PRINT_INT_STAT(weak_global_handle_count);
+  PRINT_INT_STAT(pending_global_handle_count);
+  PRINT_INT_STAT(near_death_global_handle_count);
+  PRINT_INT_STAT(destroyed_global_handle_count);
+  PRINT_MB_STAT(memory_allocator_size);
+  PRINT_MB_STAT(memory_allocator_capacity);
+#undef PRINT_STAT
+
+  printf("\n");
+
+  printf(
+      "\tJS_GLOBAL_OBJECT_TYPE/JS_BUILTINS_OBJECT_TYPE/JS_GLOBAL_PROXY_TYPE: "
+      "%d/%d/%d\n\n",
+      js_global_objects, js_builtins_objects, js_global_proxies);
+
+  int running_size = 0;
+  for (int i = 0; i <= v8::internal::LAST_TYPE; i++) {
+    int type = indices[i];
+    const char* name = InstanceTypeToString(type);
+    if (name == NULL) {
+      // Unknown instance type.  Check that there is no objects of that type.
+      CHECK(objects_per_type[type] == 0);
+      CHECK(size_per_type[type] == 0);
+      continue;
+    }
+    int size = size_per_type[type];
+    running_size += size;
+    printf("\t%-37s% 9d% 11.3f MB% 10.3f%%% 10.3f%%\n",
+           name, objects_per_type[type], toM(size),
+           100.*size/total_size, 100.*running_size/total_size);
+  }
+  printf("\t%-37s% 9d% 11.3f MB% 10.3f%%% 10.3f%%\n",
+         "total", 0, toM(total_size), 100., 100.);
+}
+
+}  // namespace
+
+int main(int argc, char **argv) {
+  BPLOG_INIT(&argc, &argv);
+
+  if (argc != 2) {
+    fprintf(stderr, "usage: %s <minidump>\n", argv[0]);
+    return 1;
+  }
+
+  DumpHeapStats(argv[1]);
+
+  return 0;
+}
diff --git a/tools/v8.xcodeproj/project.pbxproj b/tools/v8.xcodeproj/project.pbxproj
index b289454..0ca6a9d 100644
--- a/tools/v8.xcodeproj/project.pbxproj
+++ b/tools/v8.xcodeproj/project.pbxproj
@@ -240,15 +240,14 @@
 		9FA38BCF1175B30400C4CD55 /* full-codegen-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FA38BCB1175B30400C4CD55 /* full-codegen-arm.cc */; };
 		9FA38BD01175B30400C4CD55 /* jump-target-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FA38BCC1175B30400C4CD55 /* jump-target-arm.cc */; };
 		9FA38BD11175B30400C4CD55 /* virtual-frame-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FA38BCD1175B30400C4CD55 /* virtual-frame-arm.cc */; };
-		9FBE03DF10BD409900F8BFBA /* fast-codegen.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FBE03DC10BD409900F8BFBA /* fast-codegen.cc */; };
-		9FBE03E210BD40EA00F8BFBA /* fast-codegen-ia32.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FBE03E110BD40EA00F8BFBA /* fast-codegen-ia32.cc */; };
-		9FBE03E510BD412600F8BFBA /* fast-codegen-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FBE03E410BD412600F8BFBA /* fast-codegen-arm.cc */; };
 		9FC86ABD0F5FEDAC00F22668 /* oprofile-agent.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */; };
 		9FC86ABE0F5FEDAC00F22668 /* oprofile-agent.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */; };
 		C2BD4BD7120165460046BF9F /* dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD5120165460046BF9F /* dtoa.cc */; };
 		C2BD4BDB120165A70046BF9F /* fixed-dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD9120165A70046BF9F /* fixed-dtoa.cc */; };
 		C2BD4BE4120166180046BF9F /* fixed-dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD9120165A70046BF9F /* fixed-dtoa.cc */; };
 		C2BD4BE51201661F0046BF9F /* dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD5120165460046BF9F /* dtoa.cc */; };
+		C2D1E9731212F2BC00187A52 /* objects-visiting.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2D1E9711212F27B00187A52 /* objects-visiting.cc */; };
+		C2D1E9741212F2CF00187A52 /* objects-visiting.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2D1E9711212F27B00187A52 /* objects-visiting.cc */; };
 /* End PBXBuildFile section */
 
 /* Begin PBXContainerItemProxy section */
@@ -613,17 +612,12 @@
 		9FA38BB01175B2D200C4CD55 /* virtual-frame-heavy-inl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "virtual-frame-heavy-inl.h"; sourceTree = "<group>"; };
 		9FA38BB11175B2D200C4CD55 /* virtual-frame-inl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "virtual-frame-inl.h"; sourceTree = "<group>"; };
 		9FA38BB21175B2D200C4CD55 /* virtual-frame-light-inl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "virtual-frame-light-inl.h"; sourceTree = "<group>"; };
-		9FA38BC11175B2E500C4CD55 /* fast-codegen-ia32.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "fast-codegen-ia32.h"; path = "ia32/fast-codegen-ia32.h"; sourceTree = "<group>"; };
 		9FA38BC21175B2E500C4CD55 /* full-codegen-ia32.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "full-codegen-ia32.cc"; path = "ia32/full-codegen-ia32.cc"; sourceTree = "<group>"; };
 		9FA38BC31175B2E500C4CD55 /* jump-target-ia32.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "jump-target-ia32.cc"; path = "ia32/jump-target-ia32.cc"; sourceTree = "<group>"; };
 		9FA38BC41175B2E500C4CD55 /* virtual-frame-ia32.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "virtual-frame-ia32.cc"; path = "ia32/virtual-frame-ia32.cc"; sourceTree = "<group>"; };
 		9FA38BCB1175B30400C4CD55 /* full-codegen-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "full-codegen-arm.cc"; path = "arm/full-codegen-arm.cc"; sourceTree = "<group>"; };
 		9FA38BCC1175B30400C4CD55 /* jump-target-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "jump-target-arm.cc"; path = "arm/jump-target-arm.cc"; sourceTree = "<group>"; };
 		9FA38BCD1175B30400C4CD55 /* virtual-frame-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "virtual-frame-arm.cc"; path = "arm/virtual-frame-arm.cc"; sourceTree = "<group>"; };
-		9FBE03DC10BD409900F8BFBA /* fast-codegen.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "fast-codegen.cc"; sourceTree = "<group>"; };
-		9FBE03DD10BD409900F8BFBA /* fast-codegen.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "fast-codegen.h"; sourceTree = "<group>"; };
-		9FBE03E110BD40EA00F8BFBA /* fast-codegen-ia32.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "fast-codegen-ia32.cc"; path = "ia32/fast-codegen-ia32.cc"; sourceTree = "<group>"; };
-		9FBE03E410BD412600F8BFBA /* fast-codegen-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "fast-codegen-arm.cc"; path = "arm/fast-codegen-arm.cc"; sourceTree = "<group>"; };
 		9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "oprofile-agent.cc"; sourceTree = "<group>"; };
 		9FC86ABC0F5FEDAC00F22668 /* oprofile-agent.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "oprofile-agent.h"; sourceTree = "<group>"; };
 		9FF7A28211A642EA0051B8F2 /* unbound-queue-inl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "unbound-queue-inl.h"; sourceTree = "<group>"; };
@@ -632,6 +626,8 @@
 		C2BD4BD6120165460046BF9F /* dtoa.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = dtoa.h; sourceTree = "<group>"; };
 		C2BD4BD9120165A70046BF9F /* fixed-dtoa.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "fixed-dtoa.cc"; sourceTree = "<group>"; };
 		C2BD4BDA120165A70046BF9F /* fixed-dtoa.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "fixed-dtoa.h"; sourceTree = "<group>"; };
+		C2D1E9711212F27B00187A52 /* objects-visiting.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "objects-visiting.cc"; sourceTree = "<group>"; };
+		C2D1E9721212F27B00187A52 /* objects-visiting.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "objects-visiting.h"; sourceTree = "<group>"; };
 /* End PBXFileReference section */
 
 /* Begin PBXFrameworksBuildPhase section */
@@ -726,6 +722,8 @@
 				897FF0F80E719B8F00D62E90 /* allocation.cc */,
 				897FF0F90E719B8F00D62E90 /* allocation.h */,
 				897FF0FA0E719B8F00D62E90 /* api.cc */,
+				C2D1E9711212F27B00187A52 /* objects-visiting.cc */,
+				C2D1E9721212F27B00187A52 /* objects-visiting.h */,
 				897FF0FB0E719B8F00D62E90 /* api.h */,
 				893986D40F29020C007D5254 /* apiutils.h */,
 				897FF0FC0E719B8F00D62E90 /* arguments.h */,
@@ -813,11 +811,6 @@
 				897FF1310E719B8F00D62E90 /* execution.h */,
 				897FF1320E719B8F00D62E90 /* factory.cc */,
 				897FF1330E719B8F00D62E90 /* factory.h */,
-				9FBE03E410BD412600F8BFBA /* fast-codegen-arm.cc */,
-				9FBE03E110BD40EA00F8BFBA /* fast-codegen-ia32.cc */,
-				9FA38BC11175B2E500C4CD55 /* fast-codegen-ia32.h */,
-				9FBE03DC10BD409900F8BFBA /* fast-codegen.cc */,
-				9FBE03DD10BD409900F8BFBA /* fast-codegen.h */,
 				9FA38BA11175B2D200C4CD55 /* fast-dtoa.cc */,
 				9FA38BA21175B2D200C4CD55 /* fast-dtoa.h */,
 				89471C7F0EB23EE400B6874B /* flag-definitions.h */,
@@ -1268,6 +1261,7 @@
 			isa = PBXSourcesBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
+				C2D1E9731212F2BC00187A52 /* objects-visiting.cc in Sources */,
 				89A88DEC0E71A5FF0043BA31 /* accessors.cc in Sources */,
 				89A88DED0E71A6000043BA31 /* allocation.cc in Sources */,
 				89A88DEE0E71A6010043BA31 /* api.cc in Sources */,
@@ -1302,7 +1296,6 @@
 				89A88E020E71A65A0043BA31 /* dtoa-config.c in Sources */,
 				89A88E030E71A65B0043BA31 /* execution.cc in Sources */,
 				89A88E040E71A65D0043BA31 /* factory.cc in Sources */,
-				9FBE03E210BD40EA00F8BFBA /* fast-codegen-ia32.cc in Sources */,
 				9FA38BBC1175B2D200C4CD55 /* fast-dtoa.cc in Sources */,
 				89A88E050E71A65D0043BA31 /* flags.cc in Sources */,
 				9FA38BBD1175B2D200C4CD55 /* flow-graph.cc in Sources */,
@@ -1391,6 +1384,7 @@
 			isa = PBXSourcesBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
+				C2D1E9741212F2CF00187A52 /* objects-visiting.cc in Sources */,
 				89F23C3F0E78D5B2006B2466 /* accessors.cc in Sources */,
 				89F23C400E78D5B2006B2466 /* allocation.cc in Sources */,
 				89F23C410E78D5B2006B2466 /* api.cc in Sources */,
@@ -1426,8 +1420,6 @@
 				89F23C550E78D5B2006B2466 /* dtoa-config.c in Sources */,
 				89F23C560E78D5B2006B2466 /* execution.cc in Sources */,
 				89F23C570E78D5B2006B2466 /* factory.cc in Sources */,
-				9FBE03E510BD412600F8BFBA /* fast-codegen-arm.cc in Sources */,
-				9FBE03DF10BD409900F8BFBA /* fast-codegen.cc in Sources */,
 				9FA38BB51175B2D200C4CD55 /* fast-dtoa.cc in Sources */,
 				89F23C580E78D5B2006B2466 /* flags.cc in Sources */,
 				9FA38BB61175B2D200C4CD55 /* flow-graph.cc in Sources */,
diff --git a/tools/visual_studio/v8_base.vcproj b/tools/visual_studio/v8_base.vcproj
index 2571b65..ef08773 100644
--- a/tools/visual_studio/v8_base.vcproj
+++ b/tools/visual_studio/v8_base.vcproj
@@ -464,18 +464,6 @@
 				RelativePath="..\..\src\factory.h"
 				>
 			</File>
-                        <File
-                                RelativePath="..\..\src\ia32\fast-codegen-ia32.cc"
-                                >
-                        </File>
-                        <File
-                                RelativePath="..\..\src\ia32\fast-codegen-ia32.h"
-                                >
-                        </File>
-                        <File
-                                RelativePath="..\..\src\fast-codegen.h"
-                                >
-                        </File>
 			<File
 				RelativePath="..\..\src\fast-dtoa.cc"
 				>
@@ -753,6 +741,15 @@
 				>
 			</File>
 			<File
+				RelativePath="..\..\src\objects-visiting.cc"
+				>
+			</File>
+			<File
+				RelativePath="..\..\src\objects-visiting.h"
+				>
+			</File>
+
+			<File
 				RelativePath="..\..\src\objects.cc"
 				>
 			</File>
diff --git a/tools/visual_studio/v8_base_arm.vcproj b/tools/visual_studio/v8_base_arm.vcproj
index a3c5970..aa1e822 100644
--- a/tools/visual_studio/v8_base_arm.vcproj
+++ b/tools/visual_studio/v8_base_arm.vcproj
@@ -432,18 +432,6 @@
 				RelativePath="..\..\src\factory.h"
 				>
 			</File>
-                        <File
-                                RelativePath="..\..\src\arm\fast-codegen-arm.cc"
-                                >
-                        </File>
-                        <File
-                                RelativePath="..\..\src\fast-codegen.cc"
-                                >
-                        </File>
-                        <File
-                                RelativePath="..\..\src\fast-codegen.h"
-                                >
-                        </File>
 			<File
 				RelativePath="..\..\src\flags.cc"
 				>
@@ -713,6 +701,13 @@
 				>
 			</File>
 			<File
+				RelativePath="..\..\src\objects-visiting.cc"
+				>
+			</File>
+			<File
+				RelativePath="..\..\src\objects-visiting.h"
+				>
+			<File
 				RelativePath="..\..\src\objects.cc"
 				>
 			</File>
diff --git a/tools/visual_studio/v8_base_x64.vcproj b/tools/visual_studio/v8_base_x64.vcproj
index 708b380..33c5394 100644
--- a/tools/visual_studio/v8_base_x64.vcproj
+++ b/tools/visual_studio/v8_base_x64.vcproj
@@ -425,18 +425,6 @@
 				>
 			</File>
 			<File
-				RelativePath="..\..\src\x64\fast-codegen-x64.cc"
-				>
-			</File>
-                        <File
-                                RelativePath="..\..\src\fast-codegen.cc"
-                                >
-                        </File>
-                        <File
-                                RelativePath="..\..\src\fast-codegen.h"
-                                >
-                        </File>
-			<File
 				RelativePath="..\..\src\flags.cc"
 				>
 			</File>
@@ -706,6 +694,13 @@
 				>
 			</File>
 			<File
+				RelativePath="..\..\src\objects-visiting.cc"
+				>
+			</File>
+			<File
+				RelativePath="..\..\src\objects-visiting.h"
+				>
+			<File
 				RelativePath="..\..\src\objects.cc"
 				>
 			</File>