Update V8 to r6238 as required by WebKit r75993

Change-Id: I12f638fcdd02d9102abab17d81c23cde63c08f22
diff --git a/src/SConscript b/src/SConscript
index b1f9bb6..0c8e140 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -1,4 +1,4 @@
-# Copyright 2008 the V8 project authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
@@ -85,6 +85,7 @@
     jsregexp.cc
     jump-target.cc
     lithium-allocator.cc
+    lithium.cc
     liveedit.cc
     log-utils.cc
     log.cc
@@ -211,6 +212,7 @@
     x64/full-codegen-x64.cc
     x64/ic-x64.cc
     x64/jump-target-x64.cc
+    x64/lithium-x64.cc
     x64/macro-assembler-x64.cc
     x64/regexp-macro-assembler-x64.cc
     x64/register-allocator-x64.cc
diff --git a/src/accessors.cc b/src/accessors.cc
index 43d54fe..c7d9cfe 100644
--- a/src/accessors.cc
+++ b/src/accessors.cc
@@ -126,8 +126,8 @@
       // This means one of the object's prototypes is a JSArray and
       // the object does not have a 'length' property.
       // Calling SetProperty causes an infinite loop.
-      return object->IgnoreAttributesAndSetLocalProperty(Heap::length_symbol(),
-                                                         value, NONE);
+      return object->SetLocalPropertyIgnoreAttributes(Heap::length_symbol(),
+                                                      value, NONE);
     }
   }
   return Top::Throw(*Factory::NewRangeError("invalid_array_length",
@@ -775,7 +775,7 @@
         if (index >= 0) {
           Handle<Object> arguments =
               Handle<Object>(frame->GetExpression(index));
-          if (!arguments->IsTheHole()) return *arguments;
+          if (!arguments->IsArgumentsMarker()) return *arguments;
         }
 
         // If there isn't an arguments variable in the stack, we need to
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 8fdcf18..fbe97ad 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -2340,12 +2340,14 @@
                      const SBit s,
                      const Condition cond) {
   // vcmp(Dd, Dm) double precision floating point comparison.
+  // We set bit E, as we want any NaN to set the cumulative exception flag
+  // in the FPSCR.
   // Instruction details available in ARM DDI 0406A, A8-570.
   // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0100 (19-16) |
-  // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=? | 1(6) | M(5)=? | 0(4) | Vm(3-0)
+  // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=1 | 1(6) | M(5)=? | 0(4) | Vm(3-0)
   ASSERT(CpuFeatures::IsEnabled(VFP3));
   emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 |
-       src1.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
+       src1.code()*B12 | 0x5*B9 | B8 | B7 | B6 | src2.code());
 }
 
 
@@ -2355,12 +2357,14 @@
                      const Condition cond) {
   // vcmp(Dd, Dm) double precision floating point comparison.
   // Instruction details available in ARM DDI 0406A, A8-570.
+  // We set bit E, as we want any NaN to set the cumulative exception flag
+  // in the FPSCR.
   // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) |
-  // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=? | 1(6) | M(5)=? | 0(4) | 0000(3-0)
+  // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=1 | 1(6) | M(5)=? | 0(4) | 0000(3-0)
   ASSERT(CpuFeatures::IsEnabled(VFP3));
   ASSERT(src2 == 0.0);
   emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 |
-       src1.code()*B12 | 0x5*B9 | B8 | B6);
+       src1.code()*B12 | 0x5*B9 | B8 | B7 | B6);
 }
 
 
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index cd7f07f..7e8c084 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -302,6 +302,8 @@
 static const uint32_t kVFPRoundingModeMask = 3 << 22;
 static const uint32_t kVFPFlushToZeroMask = 1 << 24;
 static const uint32_t kVFPRoundToMinusInfinityBits = 2 << 22;
+static const uint32_t kVFPZConditionFlagBit = 1 << 30;
+static const uint32_t kVFPInvalidExceptionBit = 1;
 
 // Coprocessor register
 struct CRegister {
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 577ac63..e72c5d3 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -2905,7 +2905,7 @@
   const Register prototype = r4;  // Prototype of the function.
   const Register scratch = r2;
   Label slow, loop, is_instance, is_not_instance, not_js_object;
-  if (!args_in_registers()) {
+  if (!HasArgsInRegisters()) {
     __ ldr(object, MemOperand(sp, 1 * kPointerSize));
     __ ldr(function, MemOperand(sp, 0));
   }
@@ -2923,7 +2923,7 @@
   __ cmp(map, ip);
   __ b(ne, &miss);
   __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
-  __ Ret(args_in_registers() ? 0 : 2);
+  __ Ret(HasArgsInRegisters() ? 0 : 2);
 
   __ bind(&miss);
   __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
@@ -2953,12 +2953,12 @@
   __ bind(&is_instance);
   __ mov(r0, Operand(Smi::FromInt(0)));
   __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
-  __ Ret(args_in_registers() ? 0 : 2);
+  __ Ret(HasArgsInRegisters() ? 0 : 2);
 
   __ bind(&is_not_instance);
   __ mov(r0, Operand(Smi::FromInt(1)));
   __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
-  __ Ret(args_in_registers() ? 0 : 2);
+  __ Ret(HasArgsInRegisters() ? 0 : 2);
 
   Label object_not_null, object_not_null_or_smi;
   __ bind(&not_js_object);
@@ -2972,25 +2972,25 @@
   __ cmp(scratch, Operand(Factory::null_value()));
   __ b(ne, &object_not_null);
   __ mov(r0, Operand(Smi::FromInt(1)));
-  __ Ret(args_in_registers() ? 0 : 2);
+  __ Ret(HasArgsInRegisters() ? 0 : 2);
 
   __ bind(&object_not_null);
   // Smi values are not instances of anything.
   __ BranchOnNotSmi(object, &object_not_null_or_smi);
   __ mov(r0, Operand(Smi::FromInt(1)));
-  __ Ret(args_in_registers() ? 0 : 2);
+  __ Ret(HasArgsInRegisters() ? 0 : 2);
 
   __ bind(&object_not_null_or_smi);
   // String values are not instances of anything.
   __ IsObjectJSStringType(object, scratch, &slow);
   __ mov(r0, Operand(Smi::FromInt(1)));
-  __ Ret(args_in_registers() ? 0 : 2);
+  __ Ret(HasArgsInRegisters() ? 0 : 2);
 
   // Slow-case.  Tail call builtin.
-  if (args_in_registers()) {
+  __ bind(&slow);
+  if (HasArgsInRegisters()) {
     __ Push(r0, r1);
   }
-  __ bind(&slow);
   __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
 }
 
@@ -3016,7 +3016,7 @@
   // through register r0. Use unsigned comparison to get negative
   // check for free.
   __ cmp(r1, r0);
-  __ b(cs, &slow);
+  __ b(hs, &slow);
 
   // Read the argument from the stack and return it.
   __ sub(r3, r0, r1);
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 4d061d2..d41c1d2 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -596,7 +596,7 @@
     // When using lazy arguments allocation, we store the hole value
     // as a sentinel indicating that the arguments object hasn't been
     // allocated yet.
-    frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
+    frame_->EmitPushRoot(Heap::kArgumentsMarkerRootIndex);
   } else {
     frame_->SpillAll();
     ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
@@ -623,7 +623,7 @@
     // has a local variable named 'arguments'.
     LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
     Register arguments = frame_->PopToRegister();
-    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+    __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
     __ cmp(arguments, ip);
     done.Branch(ne);
   }
@@ -1748,7 +1748,7 @@
   // named 'arguments' has been introduced.
   JumpTarget slow;
   Label done;
-  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+  __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
   __ cmp(ip, arguments_reg);
   slow.Branch(ne);
 
@@ -3255,7 +3255,7 @@
   // If the loaded value is the sentinel that indicates that we
   // haven't loaded the arguments object yet, we need to do it now.
   JumpTarget exit;
-  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+  __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
   __ cmp(tos, ip);
   exit.Branch(ne);
   frame_->Drop();
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 0275730..338e39c 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -221,10 +221,17 @@
   __ b(hs, &ok);
   StackCheckStub stub;
   __ CallStub(&stub);
+  // Record a mapping of this PC offset to the OSR id.  This is used to find
+  // the AST id from the unoptimized code in order to use it as a key into
+  // the deoptimization input data found in the optimized code.
+  RecordStackCheck(stmt->OsrEntryId());
+
   __ bind(&ok);
   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+  // Record a mapping of the OSR id to this PC.  This is used if the OSR
+  // entry becomes the target of a bailout.  We don't expect it to be, but
+  // we want it to work if it is.
   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
-  RecordStackCheck(stmt->OsrEntryId());
 }
 
 
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index 87efc92..e53e96d 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -472,151 +472,6 @@
 }
 
 
-class LGapNode: public ZoneObject {
- public:
-  explicit LGapNode(LOperand* operand)
-      : operand_(operand), resolved_(false), visited_id_(-1) { }
-
-  LOperand* operand() const { return operand_; }
-  bool IsResolved() const { return !IsAssigned() || resolved_; }
-  void MarkResolved() {
-    ASSERT(!IsResolved());
-    resolved_ = true;
-  }
-  int visited_id() const { return visited_id_; }
-  void set_visited_id(int id) {
-    ASSERT(id > visited_id_);
-    visited_id_ = id;
-  }
-
-  bool IsAssigned() const { return assigned_from_.is_set(); }
-  LGapNode* assigned_from() const { return assigned_from_.get(); }
-  void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
-
- private:
-  LOperand* operand_;
-  SetOncePointer<LGapNode> assigned_from_;
-  bool resolved_;
-  int visited_id_;
-};
-
-
-LGapResolver::LGapResolver(const ZoneList<LMoveOperands>* moves,
-                           LOperand* marker_operand)
-    : nodes_(4),
-      identified_cycles_(4),
-      result_(4),
-      marker_operand_(marker_operand),
-      next_visited_id_(0) {
-  for (int i = 0; i < moves->length(); ++i) {
-    LMoveOperands move = moves->at(i);
-    if (!move.IsRedundant()) RegisterMove(move);
-  }
-}
-
-
-const ZoneList<LMoveOperands>* LGapResolver::ResolveInReverseOrder() {
-  for (int i = 0; i < identified_cycles_.length(); ++i) {
-    ResolveCycle(identified_cycles_[i]);
-  }
-
-  int unresolved_nodes;
-  do {
-    unresolved_nodes = 0;
-    for (int j = 0; j < nodes_.length(); j++) {
-      LGapNode* node = nodes_[j];
-      if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
-        AddResultMove(node->assigned_from(), node);
-        node->MarkResolved();
-      }
-      if (!node->IsResolved()) ++unresolved_nodes;
-    }
-  } while (unresolved_nodes > 0);
-  return &result_;
-}
-
-
-void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
-  AddResultMove(from->operand(), to->operand());
-}
-
-
-void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
-  result_.Add(LMoveOperands(from, to));
-}
-
-
-void LGapResolver::ResolveCycle(LGapNode* start) {
-  ZoneList<LOperand*> circle_operands(8);
-  circle_operands.Add(marker_operand_);
-  LGapNode* cur = start;
-  do {
-    cur->MarkResolved();
-    circle_operands.Add(cur->operand());
-    cur = cur->assigned_from();
-  } while (cur != start);
-  circle_operands.Add(marker_operand_);
-
-  for (int i = circle_operands.length() - 1; i > 0; --i) {
-    LOperand* from = circle_operands[i];
-    LOperand* to = circle_operands[i - 1];
-    AddResultMove(from, to);
-  }
-}
-
-
-bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
-  ASSERT(a != b);
-  LGapNode* cur = a;
-  while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
-    cur->set_visited_id(visited_id);
-    cur = cur->assigned_from();
-  }
-
-  return cur == b;
-}
-
-
-bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
-  ASSERT(a != b);
-  return CanReach(a, b, next_visited_id_++);
-}
-
-
-void LGapResolver::RegisterMove(LMoveOperands move) {
-  if (move.from()->IsConstantOperand()) {
-    // Constant moves should be last in the machine code. Therefore add them
-    // first to the result set.
-    AddResultMove(move.from(), move.to());
-  } else {
-    LGapNode* from = LookupNode(move.from());
-    LGapNode* to = LookupNode(move.to());
-    if (to->IsAssigned() && to->assigned_from() == from) {
-      move.Eliminate();
-      return;
-    }
-    ASSERT(!to->IsAssigned());
-    if (CanReach(from, to)) {
-      // This introduces a circle. Save.
-      identified_cycles_.Add(from);
-    }
-    to->set_assigned_from(from);
-  }
-}
-
-
-LGapNode* LGapResolver::LookupNode(LOperand* operand) {
-  for (int i = 0; i < nodes_.length(); ++i) {
-    if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
-  }
-
-  // No node found => create a new one.
-  LGapNode* result = new LGapNode(operand);
-  nodes_.Add(result);
-  return result;
-}
-
-
 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
   return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
 }
@@ -1289,7 +1144,7 @@
 
 
 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
-  return DefineAsRegister(new LArgumentsLength(Use(length->value())));
+  return DefineAsRegister(new LArgumentsLength(UseRegister(length->value())));
 }
 
 
@@ -1306,6 +1161,14 @@
 }
 
 
+LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
+    HInstanceOfKnownGlobal* instr) {
+  LInstruction* result =
+      new LInstanceOfKnownGlobal(UseFixed(instr->value(), r0));
+  return MarkAsCall(DefineFixed(result, r0), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
   LOperand* function = UseFixed(instr->function(), r1);
   LOperand* receiver = UseFixed(instr->receiver(), r0);
@@ -1674,7 +1537,7 @@
 
 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
   return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
-                                            Use(instr->length())));
+                                            UseRegister(instr->length())));
 }
 
 
@@ -1754,8 +1617,7 @@
 
 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
   LOperand* value = UseRegisterAtStart(instr->value());
-  LOperand* temp = TempRegister();
-  LInstruction* result = new LCheckInstanceType(value, temp);
+  LInstruction* result = new LCheckInstanceType(value);
   return AssignEnvironment(result);
 }
 
@@ -1917,7 +1779,7 @@
 
 
 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
-  bool needs_write_barrier = !instr->value()->type().IsSmi();
+  bool needs_write_barrier = instr->NeedsWriteBarrier();
 
   LOperand* obj = needs_write_barrier
       ? UseTempRegister(instr->object())
@@ -1927,17 +1789,11 @@
       ? UseTempRegister(instr->value())
       : UseRegister(instr->value());
 
-  // We only need a scratch register if we have a write barrier or we
-  // have a store into the properties array (not in-object-property).
-  LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
-      ? TempRegister() : NULL;
-
   return new LStoreNamedField(obj,
                               instr->name(),
                               val,
                               instr->is_in_object(),
                               instr->offset(),
-                              temp,
                               needs_write_barrier,
                               instr->transition());
 }
@@ -2014,14 +1870,14 @@
 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
   LOperand* arguments = UseRegister(instr->arguments());
   LOperand* length = UseTempRegister(instr->length());
-  LOperand* index = Use(instr->index());
+  LOperand* index = UseRegister(instr->index());
   LInstruction* result = new LAccessArgumentsAt(arguments, length, index);
   return DefineAsRegister(AssignEnvironment(result));
 }
 
 
 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
-  LInstruction* result = new LTypeof(Use(instr->value()));
+  LInstruction* result = new LTypeof(UseRegisterAtStart(instr->value()));
   return MarkAsCall(DefineFixed(result, r0), instr);
 }
 
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 2f8cc1c..4ddb281 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -30,6 +30,7 @@
 
 #include "hydrogen.h"
 #include "lithium-allocator.h"
+#include "lithium.h"
 #include "safepoint-table.h"
 
 namespace v8 {
@@ -62,6 +63,7 @@
 //     LDivI
 //     LInstanceOf
 //     LInstanceOfAndBranch
+//     LInstanceOfKnownGlobal
 //     LLoadKeyedFastElement
 //     LLoadKeyedGeneric
 //     LModI
@@ -204,6 +206,7 @@
   V(Goto)                                       \
   V(InstanceOf)                                 \
   V(InstanceOfAndBranch)                        \
+  V(InstanceOfKnownGlobal)                      \
   V(Integer32ToDouble)                          \
   V(IsNull)                                     \
   V(IsNullAndBranch)                            \
@@ -329,32 +332,6 @@
 };
 
 
-class LGapNode;
-
-
-class LGapResolver BASE_EMBEDDED {
- public:
-  LGapResolver(const ZoneList<LMoveOperands>* moves, LOperand* marker_operand);
-  const ZoneList<LMoveOperands>* ResolveInReverseOrder();
-
- private:
-  LGapNode* LookupNode(LOperand* operand);
-  bool CanReach(LGapNode* a, LGapNode* b, int visited_id);
-  bool CanReach(LGapNode* a, LGapNode* b);
-  void RegisterMove(LMoveOperands move);
-  void AddResultMove(LOperand* from, LOperand* to);
-  void AddResultMove(LGapNode* from, LGapNode* to);
-  void ResolveCycle(LGapNode* start);
-
-  ZoneList<LGapNode*> nodes_;
-  ZoneList<LGapNode*> identified_cycles_;
-  ZoneList<LMoveOperands> result_;
-  LOperand* marker_operand_;
-  int next_visited_id_;
-  int bailout_after_ast_id_;
-};
-
-
 class LParallelMove : public ZoneObject {
  public:
   LParallelMove() : move_operands_(4) { }
@@ -993,6 +970,19 @@
 };
 
 
+class LInstanceOfKnownGlobal: public LUnaryOperation {
+ public:
+  explicit LInstanceOfKnownGlobal(LOperand* left)
+      : LUnaryOperation(left) { }
+
+  DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
+                               "instance-of-known-global")
+  DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
+
+  Handle<JSFunction> function() const { return hydrogen()->function(); }
+};
+
+
 class LBoundsCheck: public LBinaryOperation {
  public:
   LBoundsCheck(LOperand* index, LOperand* length)
@@ -1548,13 +1538,11 @@
                    LOperand* val,
                    bool in_object,
                    int offset,
-                   LOperand* temp,
                    bool needs_write_barrier,
                    Handle<Map> transition)
       : LStoreNamed(obj, name, val),
         is_in_object_(in_object),
         offset_(offset),
-        temp_(temp),
         needs_write_barrier_(needs_write_barrier),
         transition_(transition) { }
 
@@ -1562,7 +1550,6 @@
 
   bool is_in_object() { return is_in_object_; }
   int offset() { return offset_; }
-  LOperand* temp() { return temp_; }
   bool needs_write_barrier() { return needs_write_barrier_; }
   Handle<Map> transition() const { return transition_; }
   void set_transition(Handle<Map> map) { transition_ = map; }
@@ -1570,7 +1557,6 @@
  private:
   bool is_in_object_;
   int offset_;
-  LOperand* temp_;
   bool needs_write_barrier_;
   Handle<Map> transition_;
 };
@@ -1638,8 +1624,7 @@
 
 class LCheckInstanceType: public LUnaryOperation {
  public:
-  LCheckInstanceType(LOperand* use, LOperand* temp)
-      : LUnaryOperation(use), temp_(temp) { }
+  explicit LCheckInstanceType(LOperand* use) : LUnaryOperation(use) { }
 
   DECLARE_CONCRETE_INSTRUCTION(CheckInstanceType, "check-instance-type")
   DECLARE_HYDROGEN_ACCESSOR(CheckInstanceType)
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index bb2461c..f53cebb 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -1076,7 +1076,16 @@
     EmitBranch(true_block, false_block, nz);
   } else if (r.IsDouble()) {
     DoubleRegister reg = ToDoubleRegister(instr->input());
+    Register scratch = scratch0();
+
+    // Test for the double value. Zero and NaN are false.
+    // Clear the Invalid cumulative exception flags.
+    __ ClearFPSCRBits(kVFPInvalidExceptionBit, scratch);
     __ vcmp(reg, 0.0);
+      // Retrieve the exception and status flags and
+      // check for zero or an invalid exception.
+    __ vmrs(scratch);
+    __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPInvalidExceptionBit));
     EmitBranch(true_block, false_block, ne);
   } else {
     ASSERT(r.IsTagged());
@@ -1103,7 +1112,7 @@
       __ tst(reg, Operand(kSmiTagMask));
       __ b(eq, true_label);
 
-      // Test for double values. Zero is false.
+      // Test for double values. Zero and NaN are false.
       Label call_stub;
       DoubleRegister dbl_scratch = d0;
       Register scratch = scratch0();
@@ -1113,8 +1122,14 @@
       __ b(ne, &call_stub);
       __ sub(ip, reg, Operand(kHeapObjectTag));
       __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
+      // Clear the Invalid cumulative exception flags.
+      __ ClearFPSCRBits(kVFPInvalidExceptionBit, scratch);
       __ vcmp(dbl_scratch, 0.0);
-      __ b(eq, false_label);
+      // Retrieve the exception and status flags and
+      // check for zero or an invalid exception.
+      __ vmrs(scratch);
+      __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPInvalidExceptionBit));
+      __ b(ne, false_label);
       __ b(true_label);
 
       // The conversion stub doesn't cause garbage collections so it's
@@ -1431,6 +1446,10 @@
 }
 
 
+void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
+  Abort("DoInstanceOfKnownGlobal unimplemented.");
+}
+
 
 static Condition ComputeCompareCondition(Token::Value op) {
   switch (op) {
@@ -1577,17 +1596,76 @@
 
 
 void LCodeGen::DoLoadElements(LLoadElements* instr) {
-  Abort("DoLoadElements unimplemented.");
+  ASSERT(instr->result()->Equals(instr->input()));
+  Register reg = ToRegister(instr->input());
+  Register scratch = scratch0();
+
+  __ ldr(reg, FieldMemOperand(reg, JSObject::kElementsOffset));
+  if (FLAG_debug_code) {
+    Label done;
+    __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
+    __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
+    __ cmp(scratch, ip);
+    __ b(eq, &done);
+    __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
+    __ cmp(scratch, ip);
+    __ Check(eq, "Check for fast elements failed.");
+    __ bind(&done);
+  }
 }
 
 
 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
-  Abort("DoAccessArgumentsAt unimplemented.");
+  Register arguments = ToRegister(instr->arguments());
+  Register length = ToRegister(instr->length());
+  Register index = ToRegister(instr->index());
+  Register result = ToRegister(instr->result());
+
+  // Bailout index is not a valid argument index. Use unsigned check to get
+  // negative check for free.
+  __ sub(length, length, index, SetCC);
+  DeoptimizeIf(ls, instr->environment());
+
+  // There are two words between the frame pointer and the last argument.
+  // Subtracting from length accounts for one of them add one more.
+  __ add(length, length, Operand(1));
+  __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
 }
 
 
 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
-  Abort("DoLoadKeyedFastElement unimplemented.");
+  Register elements = ToRegister(instr->elements());
+  Register key = EmitLoadRegister(instr->key(), scratch0());
+  Register result;
+  Register scratch = scratch0();
+
+  if (instr->load_result() != NULL) {
+    result = ToRegister(instr->load_result());
+  } else {
+    result = ToRegister(instr->result());
+    ASSERT(result.is(elements));
+  }
+
+  // Load the result.
+  __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
+  __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
+
+  Representation r = instr->hydrogen()->representation();
+  if (r.IsInteger32()) {
+    // Untag and check for smi.
+    __ SmiUntag(result);
+    DeoptimizeIf(cs, instr->environment());
+  } else if (r.IsDouble()) {
+    EmitNumberUntagD(result,
+                     ToDoubleRegister(instr->result()),
+                     instr->environment());
+  } else {
+    // Check for the hole value.
+    ASSERT(r.IsTagged());
+    __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+    __ cmp(result, scratch);
+    DeoptimizeIf(eq, instr->environment());
+  }
 }
 
 
@@ -1601,12 +1679,41 @@
 
 
 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
-  Abort("DoArgumentsElements unimplemented.");
+  Register scratch = scratch0();
+  Register result = ToRegister(instr->result());
+
+  // Check if the calling frame is an arguments adaptor frame.
+  Label done, adapted;
+  __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+  __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
+  __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+
+  // Result is the frame pointer for the frame if not adapted and for the real
+  // frame below the adaptor frame if adapted.
+  __ mov(result, fp, LeaveCC, ne);
+  __ mov(result, scratch, LeaveCC, eq);
 }
 
 
 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
-  Abort("DoArgumentsLength unimplemented.");
+  Register elem = ToRegister(instr->input());
+  Register result = ToRegister(instr->result());
+
+  Label done;
+
+  // If no arguments adaptor frame the number of arguments is fixed.
+  __ cmp(fp, elem);
+  __ mov(result, Operand(scope()->num_parameters()));
+  __ b(eq, &done);
+
+  // Arguments adaptor frame present. Get argument length from there.
+  __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+  __ ldr(result,
+         MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
+  __ SmiUntag(result);
+
+  // Argument length is in result register.
+  __ bind(&done);
 }
 
 
@@ -1718,7 +1825,12 @@
 
 
 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
-  Abort("DoCallKeyed unimplemented.");
+  ASSERT(ToRegister(instr->result()).is(r0));
+
+  int arity = instr->arity();
+  Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
+  CallCode(ic, RelocInfo::CODE_TARGET, instr);
+  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
 }
 
 
@@ -1746,7 +1858,13 @@
 
 
 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
-  Abort("DoCallGlobal unimplemented.");
+  ASSERT(ToRegister(instr->result()).is(r0));
+
+  int arity = instr->arity();
+  Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+  __ mov(r2, Operand(instr->name()));
+  CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
+  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
 }
 
 
@@ -1773,7 +1891,34 @@
 
 
 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
-  Abort("DoStoreNamedField unimplemented.");
+  Register object = ToRegister(instr->object());
+  Register value = ToRegister(instr->value());
+  Register scratch = scratch0();
+  int offset = instr->offset();
+
+  ASSERT(!object.is(value));
+
+  if (!instr->transition().is_null()) {
+    __ mov(scratch, Operand(instr->transition()));
+    __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+  }
+
+  // Do the store.
+  if (instr->is_in_object()) {
+    __ str(value, FieldMemOperand(object, offset));
+    if (instr->needs_write_barrier()) {
+      // Update the write barrier for the object for in-object properties.
+      __ RecordWrite(object, Operand(offset), value, scratch);
+    }
+  } else {
+    __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
+    __ str(value, FieldMemOperand(scratch, offset));
+    if (instr->needs_write_barrier()) {
+      // Update the write barrier for the properties array.
+      // object is used as a scratch register.
+      __ RecordWrite(scratch, Operand(offset), value, object);
+    }
+  }
 }
 
 
@@ -1789,13 +1934,34 @@
 
 
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
-  __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
+  __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
   DeoptimizeIf(hs, instr->environment());
 }
 
 
 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
-  Abort("DoStoreKeyedFastElement unimplemented.");
+  Register value = ToRegister(instr->value());
+  Register elements = ToRegister(instr->object());
+  Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
+  Register scratch = scratch0();
+
+  // Do the store.
+  if (instr->key()->IsConstantOperand()) {
+    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+    LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+    int offset =
+        ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
+    __ str(value, FieldMemOperand(elements, offset));
+  } else {
+    __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
+    __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
+  }
+
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    // Compute address of modified element and store it into key register.
+    __ add(key, scratch, Operand(FixedArray::kHeaderSize));
+    __ RecordWrite(elements, key, value);
+  }
 }
 
 
@@ -1939,7 +2105,13 @@
 
 
 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
-  Abort("DoSmiUntag unimplemented.");
+  LOperand* input = instr->input();
+  ASSERT(input->IsRegister() && input->Equals(instr->result()));
+  if (instr->needs_check()) {
+    __ tst(ToRegister(input), Operand(kSmiTagMask));
+    DeoptimizeIf(ne, instr->environment());
+  }
+  __ SmiUntag(ToRegister(input));
 }
 
 
@@ -2108,7 +2280,26 @@
 
 
 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
-  Abort("DoCheckInstanceType unimplemented.");
+  Register input = ToRegister(instr->input());
+  Register scratch = scratch0();
+  InstanceType first = instr->hydrogen()->first();
+  InstanceType last = instr->hydrogen()->last();
+
+  __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
+  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
+  __ cmp(scratch, Operand(first));
+
+  // If there is only one type in the interval check for equality.
+  if (first == last) {
+    DeoptimizeIf(ne, instr->environment());
+  } else {
+    DeoptimizeIf(lo, instr->environment());
+    // Omit check for the last type.
+    if (last != LAST_TYPE) {
+      __ cmp(scratch, Operand(last));
+      DeoptimizeIf(hi, instr->environment());
+    }
+  }
 }
 
 
@@ -2218,22 +2409,108 @@
 
 
 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
-  Abort("DoRegExpLiteral unimplemented.");
+  Label materialized;
+  // Registers will be used as follows:
+  // r3 = JS function.
+  // r7 = literals array.
+  // r1 = regexp literal.
+  // r0 = regexp literal clone.
+  // r2 and r4-r6 are used as temporaries.
+  __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+  __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
+  int literal_offset = FixedArray::kHeaderSize +
+      instr->hydrogen()->literal_index() * kPointerSize;
+  __ ldr(r1, FieldMemOperand(r7, literal_offset));
+  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+  __ cmp(r1, ip);
+  __ b(ne, &materialized);
+
+  // Create regexp literal using runtime function
+  // Result will be in r0.
+  __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
+  __ mov(r5, Operand(instr->hydrogen()->pattern()));
+  __ mov(r4, Operand(instr->hydrogen()->flags()));
+  __ Push(r7, r6, r5, r4);
+  CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
+  __ mov(r1, r0);
+
+  __ bind(&materialized);
+  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+  Label allocated, runtime_allocate;
+
+  __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
+  __ jmp(&allocated);
+
+  __ bind(&runtime_allocate);
+  __ mov(r0, Operand(Smi::FromInt(size)));
+  __ Push(r1, r0);
+  CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+  __ pop(r1);
+
+  __ bind(&allocated);
+  // Copy the content into the newly allocated memory.
+  // (Unroll copy loop once for better throughput).
+  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
+    __ ldr(r3, FieldMemOperand(r1, i));
+    __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
+    __ str(r3, FieldMemOperand(r0, i));
+    __ str(r2, FieldMemOperand(r0, i + kPointerSize));
+  }
+  if ((size % (2 * kPointerSize)) != 0) {
+    __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
+    __ str(r3, FieldMemOperand(r0, size - kPointerSize));
+  }
 }
 
 
 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
-  Abort("DoFunctionLiteral unimplemented.");
+  // Use the fast case closure allocation code that allocates in new
+  // space for nested functions that don't need literals cloning.
+  Handle<SharedFunctionInfo> shared_info = instr->shared_info();
+  bool pretenure = !instr->hydrogen()->pretenure();
+  if (shared_info->num_literals() == 0 && !pretenure) {
+    FastNewClosureStub stub;
+    __ mov(r1, Operand(shared_info));
+    __ push(r1);
+    CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+  } else {
+    __ mov(r2, Operand(shared_info));
+    __ mov(r1, Operand(pretenure
+                       ? Factory::true_value()
+                       : Factory::false_value()));
+    __ Push(cp, r2, r1);
+    CallRuntime(Runtime::kNewClosure, 3, instr);
+  }
 }
 
 
 void LCodeGen::DoTypeof(LTypeof* instr) {
-  Abort("DoTypeof unimplemented.");
+  Register input = ToRegister(instr->input());
+  __ push(input);
+  CallRuntime(Runtime::kTypeof, 1, instr);
 }
 
 
 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
-  Abort("DoTypeofIs unimplemented.");
+  Register input = ToRegister(instr->input());
+  Register result = ToRegister(instr->result());
+  Label true_label;
+  Label false_label;
+  Label done;
+
+  Condition final_branch_condition = EmitTypeofIs(&true_label,
+                                                  &false_label,
+                                                  input,
+                                                  instr->type_literal());
+  __ b(final_branch_condition, &true_label);
+  __ bind(&false_label);
+  __ LoadRoot(result, Heap::kFalseValueRootIndex);
+  __ b(&done);
+
+  __ bind(&true_label);
+  __ LoadRoot(result, Heap::kTrueValueRootIndex);
+
+  __ bind(&done);
 }
 
 
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 4a13146..5cba955 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -519,6 +519,13 @@
 }
 
 
+void MacroAssembler::ClearFPSCRBits(uint32_t bits_to_clear, Register scratch) {
+  vmrs(scratch);
+  bic(scratch, scratch, Operand(bits_to_clear));
+  vmsr(scratch);
+}
+
+
 void MacroAssembler::EnterFrame(StackFrame::Type type) {
   // r0-r3: preserved
   stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
@@ -1795,7 +1802,7 @@
   }
 #endif
   // Disable stub call restrictions to always allow calls to abort.
-  set_allow_stub_calls(true);
+  AllowStubCallsScope allow_scope(this, true);
 
   mov(r0, Operand(p0));
   push(r0);
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 97bbb2f..02bc384 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -243,6 +243,9 @@
             const MemOperand& dst,
             Condition cond = al);
 
+  // Clear FPSCR bits.
+  void ClearFPSCRBits(uint32_t bits_to_clear, Register scratch);
+
   // ---------------------------------------------------------------------------
   // Activation frames
 
@@ -379,12 +382,13 @@
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space. The object_size is specified in words (not
-  // bytes). If the new space is exhausted control continues at the gc_required
-  // label. The allocated object is returned in result. If the flag
-  // tag_allocated_object is true the result is tagged as as a heap object. All
-  // registers are clobbered also when control continues at the gc_required
-  // label.
+  // Allocate an object in new space. The object_size is specified
+  // either in bytes or in words if the allocation flag SIZE_IN_WORDS
+  // is passed. If the new space is exhausted control continues at the
+  // gc_required label. The allocated object is returned in result. If
+  // the flag tag_allocated_object is true the result is tagged as as
+  // a heap object. All registers are clobbered also when control
+  // continues at the gc_required label.
   void AllocateInNewSpace(int object_size,
                           Register result,
                           Register scratch1,
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 143b839..0065057 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -2600,11 +2600,6 @@
     precision = kDoublePrecision;
   }
 
-  if (instr->Bit(7) != 0) {
-    // Raising exceptions for quiet NaNs are not supported.
-    UNIMPLEMENTED();  // Not used by V8.
-  }
-
   int d = instr->VFPDRegCode(precision);
   int m = 0;
   if (instr->Opc2Field() == 0x4) {
@@ -2618,6 +2613,13 @@
       dm_value = get_double_from_d_register(m);
     }
 
+    // Raise exceptions for quiet NaNs if necessary.
+    if (instr->Bit(7) == 1) {
+      if (isnan(dd_value)) {
+        inv_op_vfp_flag_ = true;
+      }
+    }
+
     Compute_FPSCR_Flags(dd_value, dm_value);
   } else {
     UNIMPLEMENTED();  // Not used by V8.
diff --git a/src/array.js b/src/array.js
index 0f1e969..56f5254 100644
--- a/src/array.js
+++ b/src/array.js
@@ -121,37 +121,49 @@
         if (IS_STRING(e)) return e;
         return convert(e);
       }
+      return '';
     }
 
     // Construct an array for the elements.
-    var elements;
+    var elements = new $Array(length);
     var elements_length = 0;
 
     // We pull the empty separator check outside the loop for speed!
     if (separator.length == 0) {
-      elements = new $Array(length);
       for (var i = 0; i < length; i++) {
         var e = array[i];
-        if (!IS_UNDEFINED(e) || (i in array)) {
+        if (!IS_UNDEFINED(e)) {
           if (!IS_STRING(e)) e = convert(e);
           elements[elements_length++] = e;
         }
       }
-    } else {
-      elements = new $Array(length << 1);
-      for (var i = 0; i < length; i++) {
-        var e = array[i];
-        if (i != 0) elements[elements_length++] = separator;
-        if (!IS_UNDEFINED(e) || (i in array)) {
-          if (!IS_STRING(e)) e = convert(e);
-          elements[elements_length++] = e;
-        }
+      elements.length = elements_length;
+      var result = %_FastAsciiArrayJoin(elements, '');
+      if (!IS_UNDEFINED(result)) return result;
+      return %StringBuilderConcat(elements, elements_length, '');
+    }
+    // Non-empty separator.
+    for (var i = 0; i < length; i++) {
+      var e = array[i];
+      if (!IS_UNDEFINED(e)) {
+        if (!IS_STRING(e)) e = convert(e);
+        elements[i] = e;
+      } else {
+        elements[i] = '';
       }
     }
-    elements.length = elements_length;
-    var result = %_FastAsciiArrayJoin(elements, "");
-    if (!IS_UNDEFINED(result)) return result;
-    return %StringBuilderConcat(elements, elements_length, '');
+    var result = %_FastAsciiArrayJoin(elements, separator);
+    if (!IS_UNDEFINED(result)) return result;   
+
+    var length2 = (length << 1) - 1;
+    var j = length2;
+    var i = length;
+    elements[--j] = elements[--i];
+    while (i > 0) {
+      elements[--j] = separator;
+      elements[--j] = elements[--i];
+    }
+    return %StringBuilderConcat(elements, length2, '');    
   } finally {
     // Make sure to pop the visited array no matter what happens.
     if (is_array) visited_arrays.pop();
@@ -160,7 +172,7 @@
 
 
 function ConvertToString(x) {
-  if (IS_STRING(x)) return x;
+  // Assumes x is a non-string. 
   if (IS_NUMBER(x)) return %_NumberToString(x);
   if (IS_BOOLEAN(x)) return x ? 'true' : 'false';
   return (IS_NULL_OR_UNDEFINED(x)) ? '' : %ToString(%DefaultString(x));
diff --git a/src/assembler.cc b/src/assembler.cc
index eeb8412..e8bcd91 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -647,6 +647,11 @@
 }
 
 
+ExternalReference ExternalReference::arguments_marker_location() {
+  return ExternalReference(Factory::arguments_marker().location());
+}
+
+
 ExternalReference ExternalReference::roots_address() {
   return ExternalReference(Heap::roots_address());
 }
diff --git a/src/assembler.h b/src/assembler.h
index b68ad38..0219de2 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -512,6 +512,9 @@
   // Static variable Factory::the_hole_value.location()
   static ExternalReference the_hole_value_location();
 
+  // Static variable Factory::arguments_marker.location()
+  static ExternalReference arguments_marker_location();
+
   // Static variable Heap::roots_address()
   static ExternalReference roots_address();
 
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 1b0d8b0..ba027e9 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -49,8 +49,10 @@
 void CodeStub::GenerateCode(MacroAssembler* masm) {
   // Update the static counter each time a new code stub is generated.
   Counters::code_stubs.Increment();
+
   // Nested stubs are not allowed for leafs.
-  masm->set_allow_stub_calls(AllowsStubCalls());
+  AllowStubCallsScope allow_scope(masm, AllowsStubCalls());
+
   // Generate the code for the stub.
   masm->set_generating_stub(true);
   Generate(masm);
@@ -197,4 +199,34 @@
 }
 
 
+const char* InstanceofStub::GetName() {
+  if (name_ != NULL) return name_;
+  const int kMaxNameLength = 100;
+  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  if (name_ == NULL) return "OOM";
+
+  const char* args = "";
+  if (HasArgsInRegisters()) {
+    args = "_REGS";
+  }
+
+  const char* inline_check = "";
+  if (HasCallSiteInlineCheck()) {
+    inline_check = "_INLINE";
+  }
+
+  const char* return_true_false_object = "";
+  if (ReturnTrueFalseObject()) {
+    return_true_false_object = "_TRUEFALSE";
+  }
+
+  OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
+               "InstanceofStub%s%s%s",
+               args,
+               inline_check,
+               return_true_false_object);
+  return name_;
+}
+
+
 } }  // namespace v8::internal
diff --git a/src/code-stubs.h b/src/code-stubs.h
index b7804b7..76f29f0 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -34,7 +34,7 @@
 namespace internal {
 
 // List of code stubs used on all platforms. The order in this list is important
-// as only the stubs up to and including RecordWrite allows nested stub calls.
+// as only the stubs up to and including Instanceof allows nested stub calls.
 #define CODE_STUB_LIST_ALL_PLATFORMS(V)  \
   V(CallFunction)                        \
   V(GenericBinaryOp)                     \
@@ -48,7 +48,7 @@
   V(CompareIC)                           \
   V(MathPow)                             \
   V(TranscendentalCache)                 \
-  V(RecordWrite)                         \
+  V(Instanceof)                          \
   V(ConvertToDouble)                     \
   V(WriteInt32ToHeapNumber)              \
   V(IntegerMod)                          \
@@ -59,7 +59,6 @@
   V(GenericUnaryOp)                      \
   V(RevertToNumber)                      \
   V(ToBoolean)                           \
-  V(Instanceof)                          \
   V(CounterOp)                           \
   V(ArgumentsAccess)                     \
   V(RegExpExec)                          \
@@ -180,7 +179,7 @@
            MajorKeyBits::encode(MajorKey());
   }
 
-  bool AllowsStubCalls() { return MajorKey() <= RecordWrite; }
+  bool AllowsStubCalls() { return MajorKey() <= Instanceof; }
 
   class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
   class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
@@ -327,22 +326,38 @@
  public:
   enum Flags {
     kNoFlags = 0,
-    kArgsInRegisters = 1 << 0
+    kArgsInRegisters = 1 << 0,
+    kCallSiteInlineCheck = 1 << 1,
+    kReturnTrueFalseObject = 1 << 2
   };
 
-  explicit InstanceofStub(Flags flags) : flags_(flags) { }
+  explicit InstanceofStub(Flags flags) : flags_(flags), name_(NULL) { }
+
+  static Register left();
+  static Register right();
 
   void Generate(MacroAssembler* masm);
 
  private:
   Major MajorKey() { return Instanceof; }
-  int MinorKey() { return args_in_registers() ? 1 : 0; }
+  int MinorKey() { return static_cast<int>(flags_); }
 
-  bool args_in_registers() {
+  bool HasArgsInRegisters() const {
     return (flags_ & kArgsInRegisters) != 0;
   }
 
+  bool HasCallSiteInlineCheck() const {
+    return (flags_ & kCallSiteInlineCheck) != 0;
+  }
+
+  bool ReturnTrueFalseObject() const {
+    return (flags_ & kReturnTrueFalseObject) != 0;
+  }
+
+  const char* GetName();
+
   Flags flags_;
+  char* name_;
 };
 
 
@@ -707,6 +722,10 @@
 
   void Generate(MacroAssembler* masm);
 
+  static int ExtractArgcFromMinorKey(int minor_key) {
+    return ArgcBits::decode(minor_key);
+  }
+
  private:
   int argc_;
   InLoopFlag in_loop_;
@@ -738,11 +757,6 @@
   bool ReceiverMightBeValue() {
     return (flags_ & RECEIVER_MIGHT_BE_VALUE) != 0;
   }
-
- public:
-  static int ExtractArgcFromMinorKey(int minor_key) {
-    return ArgcBits::decode(minor_key);
-  }
 };
 
 
@@ -902,6 +916,24 @@
   DISALLOW_COPY_AND_ASSIGN(StringCharAtGenerator);
 };
 
+
+class AllowStubCallsScope {
+ public:
+  AllowStubCallsScope(MacroAssembler* masm, bool allow)
+       : masm_(masm), previous_allow_(masm->allow_stub_calls()) {
+    masm_->set_allow_stub_calls(allow);
+  }
+  ~AllowStubCallsScope() {
+    masm_->set_allow_stub_calls(previous_allow_);
+  }
+
+ private:
+  MacroAssembler* masm_;
+  bool previous_allow_;
+
+  DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
+};
+
 } }  // namespace v8::internal
 
 #endif  // V8_CODE_STUBS_H_
diff --git a/src/d8.js b/src/d8.js
index a758e09..b0edb70 100644
--- a/src/d8.js
+++ b/src/d8.js
@@ -110,17 +110,32 @@
 const kNoFrame = -1;
 Debug.State = {
   currentFrame: kNoFrame,
+  displaySourceStartLine: -1,
+  displaySourceEndLine: -1,
   currentSourceLine: -1
 }
 var trace_compile = false;  // Tracing all compile events?
+var trace_debug_json = false; // Tracing all debug json packets?
+var last_cmd_line = '';
+var repeat_cmd_line = '';
+var is_running = true;
+
+// Copied from debug-delay.js.  This is needed below:
+function ScriptTypeFlag(type) {
+  return (1 << type);
+}
 
 
 // Process a debugger JSON message into a display text and a running status.
 // This function returns an object with properties "text" and "running" holding
 // this information.
 function DebugMessageDetails(message) {
+  if (trace_debug_json) {
+    print("received: '" + message + "'");
+  }
   // Convert the JSON string to an object.
   var response = new ProtocolPackage(message);
+  is_running = response.running();
 
   if (response.type() == 'event') {
     return DebugEventDetails(response);
@@ -161,6 +176,8 @@
       result += '\n';
       result += SourceUnderline(body.sourceLineText, body.sourceColumn);
       Debug.State.currentSourceLine = body.sourceLine;
+      Debug.State.displaySourceStartLine = -1;
+      Debug.State.displaySourceEndLine = -1;
       Debug.State.currentFrame = 0;
       details.text = result;
       break;
@@ -180,10 +197,14 @@
         result += '\n';
         result += SourceUnderline(body.sourceLineText, body.sourceColumn);
         Debug.State.currentSourceLine = body.sourceLine;
+        Debug.State.displaySourceStartLine = -1;
+        Debug.State.displaySourceEndLine = -1;
         Debug.State.currentFrame = 0;
       } else {
         result += ' (empty stack)';
         Debug.State.currentSourceLine = -1;
+        Debug.State.displaySourceStartLine = -1;
+        Debug.State.displaySourceEndLine = -1;
         Debug.State.currentFrame = kNoFrame;
       }
       details.text = result;
@@ -202,6 +223,10 @@
       details.text = result;
       break;
 
+    case 'scriptCollected':
+      details.text = result;
+      break;
+
     default:
       details.text = 'Unknown debug event ' + response.event();
   }
@@ -254,7 +279,11 @@
 
 // Converts a text command to a JSON request.
 function DebugCommandToJSONRequest(cmd_line) {
-  return new DebugRequest(cmd_line).JSONRequest();
+  var result = new DebugRequest(cmd_line).JSONRequest();
+  if (trace_debug_json && result) {
+    print("sending: '" + result + "'");
+  }
+  return result;
 };
 
 
@@ -266,6 +295,20 @@
     return;
   }
 
+  // Check for a simple carriage return to repeat the last command:
+  var is_repeating = false;
+  if (cmd_line == '\n') {
+    if (is_running) {
+      cmd_line = 'break'; // Not in debugger mode, break with a frame request.
+    } else {
+      cmd_line = repeat_cmd_line; // use command to repeat.
+      is_repeating = true;
+    }
+  }
+  if (!is_running) { // Only save the command if in debugger mode.
+    repeat_cmd_line = cmd_line;   // save last command.
+  }
+
   // Trim string for leading and trailing whitespace.
   cmd_line = cmd_line.replace(/^\s+|\s+$/g, '');
 
@@ -281,6 +324,13 @@
     args = cmd_line.slice(pos).replace(/^\s+|\s+$/g, '');
   }
 
+  if ((cmd === undefined) || !cmd) {
+    this.request_ = void 0;
+    return;
+  }
+
+  last_cmd = cmd;
+
   // Switch on command.
   switch (cmd) {
     case 'continue':
@@ -290,7 +340,22 @@
 
     case 'step':
     case 's':
-      this.request_ = this.stepCommandToJSONRequest_(args);
+      this.request_ = this.stepCommandToJSONRequest_(args, 'in');
+      break;
+
+    case 'stepi':
+    case 'si':
+      this.request_ = this.stepCommandToJSONRequest_(args, 'min');
+      break;
+
+    case 'next':
+    case 'n':
+      this.request_ = this.stepCommandToJSONRequest_(args, 'next');
+      break;
+
+    case 'finish':
+    case 'fin':
+      this.request_ = this.stepCommandToJSONRequest_(args, 'out');
       break;
 
     case 'backtrace':
@@ -311,6 +376,26 @@
       this.request_ = this.scopeCommandToJSONRequest_(args);
       break;
 
+    case 'disconnect':
+    case 'exit':
+    case 'quit':
+      this.request_ = this.disconnectCommandToJSONRequest_(args);
+      break;
+
+    case 'up':
+      this.request_ =
+          this.frameCommandToJSONRequest_('' +
+                                          (Debug.State.currentFrame + 1));
+      break;
+      
+    case 'down':
+    case 'do':
+      this.request_ =
+          this.frameCommandToJSONRequest_('' +
+                                          (Debug.State.currentFrame - 1));
+      break;
+      
+    case 'set':
     case 'print':
     case 'p':
       this.request_ = this.printCommandToJSONRequest_(args);
@@ -328,11 +413,17 @@
       this.request_ = this.instancesCommandToJSONRequest_(args);
       break;
 
+    case 'list':
+    case 'l':
+      this.request_ = this.listCommandToJSONRequest_(args);
+      break;
     case 'source':
       this.request_ = this.sourceCommandToJSONRequest_(args);
       break;
 
     case 'scripts':
+    case 'script':
+    case 'scr':
       this.request_ = this.scriptsCommandToJSONRequest_(args);
       break;
 
@@ -347,6 +438,8 @@
       break;
 
     case 'clear':
+    case 'delete':
+    case 'd':
       this.request_ = this.clearCommandToJSONRequest_(args);
       break;
 
@@ -354,7 +447,42 @@
       this.request_ = this.threadsCommandToJSONRequest_(args);
       break;
 
+    case 'cond':
+      this.request_ = this.changeBreakpointCommandToJSONRequest_(args, 'cond');
+      break;
+
+    case 'enable':
+    case 'en':
+      this.request_ =
+          this.changeBreakpointCommandToJSONRequest_(args, 'enable');
+      break;
+
+    case 'disable':
+    case 'dis':
+      this.request_ =
+          this.changeBreakpointCommandToJSONRequest_(args, 'disable');
+      break;
+
+    case 'ignore':
+      this.request_ =
+          this.changeBreakpointCommandToJSONRequest_(args, 'ignore');
+      break;
+
+    case 'info':
+    case 'inf':
+      this.request_ = this.infoCommandToJSONRequest_(args);
+      break;
+
+    case 'flags':
+      this.request_ = this.v8FlagsToJSONRequest_(args);
+      break;
+
+    case 'gc':
+      this.request_ = this.gcToJSONRequest_(args);
+      break;
+
     case 'trace':
+    case 'tr':
       // Return undefined to indicate command handled internally (no JSON).
       this.request_ = void 0;
       this.traceCommand_(args);
@@ -370,8 +498,6 @@
     default:
       throw new Error('Unknown command "' + cmd + '"');
   }
-
-  last_cmd = cmd;
 }
 
 DebugRequest.prototype.JSONRequest = function() {
@@ -465,59 +591,73 @@
 
 
 // Create a JSON request for the step command.
-DebugRequest.prototype.stepCommandToJSONRequest_ = function(args) {
+DebugRequest.prototype.stepCommandToJSONRequest_ = function(args, type) {
   // Requesting a step is through the continue command with additional
   // arguments.
   var request = this.createRequest('continue');
   request.arguments = {};
 
   // Process arguments if any.
+
+  // Only process args if the command is 'step' which is indicated by type being
+  // set to 'in'.  For all other commands, ignore the args.
   if (args && args.length > 0) {
-    args = args.split(/\s*[ ]+\s*/g);
+    args = args.split(/\s+/g);
 
     if (args.length > 2) {
       throw new Error('Invalid step arguments.');
     }
 
     if (args.length > 0) {
-      // Get step count argument if any.
-      if (args.length == 2) {
-        var stepcount = parseInt(args[1]);
-        if (isNaN(stepcount) || stepcount <= 0) {
-          throw new Error('Invalid step count argument "' + args[0] + '".');
+      // Check if we have a gdb stype step command.  If so, the 1st arg would
+      // be the step count.  If it's not a number, then assume that we're
+      // parsing for the legacy v8 step command.
+      var stepcount = Number(args[0]);
+      if (stepcount == Number.NaN) {
+        // No step count at arg 1.  Process as legacy d8 step command:
+        if (args.length == 2) {
+          var stepcount = parseInt(args[1]);
+          if (isNaN(stepcount) || stepcount <= 0) {
+            throw new Error('Invalid step count argument "' + args[0] + '".');
+          }
+          request.arguments.stepcount = stepcount;
         }
+
+        // Get the step action.
+        switch (args[0]) {
+          case 'in':
+          case 'i':
+            request.arguments.stepaction = 'in';
+            break;
+
+          case 'min':
+          case 'm':
+            request.arguments.stepaction = 'min';
+            break;
+
+          case 'next':
+          case 'n':
+            request.arguments.stepaction = 'next';
+            break;
+
+          case 'out':
+          case 'o':
+            request.arguments.stepaction = 'out';
+            break;
+
+          default:
+            throw new Error('Invalid step argument "' + args[0] + '".');
+        }
+
+      } else {
+        // gdb style step commands:
+        request.arguments.stepaction = type;
         request.arguments.stepcount = stepcount;
       }
-
-      // Get the step action.
-      switch (args[0]) {
-        case 'in':
-        case 'i':
-          request.arguments.stepaction = 'in';
-          break;
-
-        case 'min':
-        case 'm':
-          request.arguments.stepaction = 'min';
-          break;
-
-        case 'next':
-        case 'n':
-          request.arguments.stepaction = 'next';
-          break;
-
-        case 'out':
-        case 'o':
-          request.arguments.stepaction = 'out';
-          break;
-
-        default:
-          throw new Error('Invalid step argument "' + args[0] + '".');
-      }
     }
   } else {
-    // Default is step next.
-    request.arguments.stepaction = 'next';
+    // Default is step of the specified type.
+    request.arguments.stepaction = type;
   }
 
   return request.toJSONProtocol();
@@ -648,6 +788,41 @@
 };
 
 
+// Create a JSON request for the list command.
+DebugRequest.prototype.listCommandToJSONRequest_ = function(args) {
+
+  // Default is ten lines starting five lines before the current location.
+  if (Debug.State.displaySourceEndLine == -1) {
+    // If we list forwards, we will start listing after the last source end
+    // line.  Set it to start from 5 lines before the current location.
+    Debug.State.displaySourceEndLine = Debug.State.currentSourceLine - 5;
+    // If we list backwards, we will start listing backwards from the last
+    // source start line.  Set it to start from 1 lines before the current
+    // location.
+    Debug.State.displaySourceStartLine = Debug.State.currentSourceLine + 1;
+  }
+
+  var from = Debug.State.displaySourceEndLine + 1;
+  var lines = 10;
+
+  // Parse the arguments.
+  args = args.split(/\s*,\s*/g);
+  if (args == '') {
+  } else if ((args.length == 1) && (args[0] == '-')) {
+    from = Debug.State.displaySourceStartLine - lines;
+  } else if (args.length == 2) {
+    from = parseInt(args[0]);
+    lines = parseInt(args[1]) - from + 1; // inclusive of the ending line.
+  } else {
+    throw new Error('Invalid list arguments.');
+  }
+  Debug.State.displaySourceStartLine = from;
+  Debug.State.displaySourceEndLine = from + lines - 1;
+  var sourceArgs = '' + from + ' ' + lines;
+  return this.sourceCommandToJSONRequest_(sourceArgs);
+};
+
+
 // Create a JSON request for the source command.
 DebugRequest.prototype.sourceCommandToJSONRequest_ = function(args) {
   // Build a evaluate request from the text command.
@@ -709,7 +884,10 @@
         break;
 
       default:
-        throw new Error('Invalid argument "' + args[0] + '".');
+        // If the arg is not one of the know one aboves, then it must be a
+        // filter used for filtering the results:
+        request.arguments.filter = args[0];
+        break;
     }
   }
 
@@ -731,6 +909,8 @@
 
     var request = this.createRequest('setbreakpoint');
 
+    // Break the args into target spec and condition if appropriate.
+
     // Check for breakpoint condition.
     pos = args.indexOf(' ');
     if (pos > 0) {
@@ -801,6 +981,178 @@
 };
 
 
+// Create a JSON request for the change breakpoint command.
+DebugRequest.prototype.changeBreakpointCommandToJSONRequest_ =
+    function(args, command) {
+
+  var request;
+
+  // Check for exception breaks first:
+  //   en[able] exc[eptions] [all|unc[aught]]
+  //   en[able] [all|unc[aught]] exc[eptions]
+  //   dis[able] exc[eptions] [all|unc[aught]]
+  //   dis[able] [all|unc[aught]] exc[eptions]
+  if ((command == 'enable' || command == 'disable') &&
+      args && args.length > 1) {
+    var nextPos = args.indexOf(' ');
+    var arg1 = (nextPos > 0) ? args.substring(0, nextPos) : args;
+    var excType = null;
+
+    // Check for:
+    //   en[able] exc[eptions] [all|unc[aught]]
+    //   dis[able] exc[eptions] [all|unc[aught]]
+    if (arg1 == 'exc' || arg1 == 'exception' || arg1 == 'exceptions') {
+
+      var arg2 = (nextPos > 0) ?
+          args.substring(nextPos + 1, args.length) : 'all';
+      if (!arg2) {
+        arg2 = 'all'; // if unspecified, set for all.
+      } if (arg2 == 'unc') { // check for short cut.
+        arg2 = 'uncaught';
+      }
+      excType = arg2;
+      
+    // Check for:
+    //   en[able] [all|unc[aught]] exc[eptions]
+    //   dis[able] [all|unc[aught]] exc[eptions]
+    } else if (arg1 == 'all' || arg1 == 'unc' || arg1 == 'uncaught') {
+
+      var arg2 = (nextPos > 0) ?
+          args.substring(nextPos + 1, args.length) : null;
+      if (arg2 == 'exc' || arg1 == 'exception' || arg1 == 'exceptions') {
+        excType = arg1;
+        if (excType == 'unc') {
+          excType = 'uncaught';
+        }
+      }
+    }
+
+    // If we matched one of the command formats, then excType will be non-null:
+    if (excType) {
+      // Build a evaluate request from the text command.
+      request = this.createRequest('setexceptionbreak');
+
+      request.arguments = {};
+      request.arguments.type = excType;
+      request.arguments.enabled = (command == 'enable');
+
+      return request.toJSONProtocol();
+    }
+  }
+
+  // Build a evaluate request from the text command.
+  request = this.createRequest('changebreakpoint');
+
+  // Process arguments if any.
+  if (args && args.length > 0) {
+    request.arguments = {};
+    var pos = args.indexOf(' ');
+    var breakpointArg = args;
+    var otherArgs;
+    if (pos > 0) {
+      breakpointArg = args.substring(0, pos);
+      otherArgs = args.substring(pos + 1, args.length);
+    }
+
+    request.arguments.breakpoint = parseInt(breakpointArg);
+
+    switch(command) {
+      case 'cond':
+        request.arguments.condition = otherArgs ? otherArgs : null;
+        break;
+      case 'enable':
+        request.arguments.enabled = true;
+        break;
+      case 'disable':
+        request.arguments.enabled = false;
+        break;
+      case 'ignore':
+        request.arguments.ignoreCount = parseInt(otherArgs);
+        break;
+      default:
+        throw new Error('Invalid arguments.');  
+    }
+  } else {
+    throw new Error('Invalid arguments.');
+  }
+
+  return request.toJSONProtocol();
+};
+
+
+// Create a JSON request for the disconnect command.
+DebugRequest.prototype.disconnectCommandToJSONRequest_ = function(args) {
+  var request;
+  request = this.createRequest('disconnect');
+  return request.toJSONProtocol();
+};
+
+
+// Create a JSON request for the info command.
+DebugRequest.prototype.infoCommandToJSONRequest_ = function(args) {
+  var request;
+  if (args && (args == 'break' || args == 'br')) {
+    // Build a evaluate request from the text command.
+    request = this.createRequest('listbreakpoints');
+    last_cmd = 'info break';
+  } else if (args && (args == 'locals' || args == 'lo')) {
+    // Build a evaluate request from the text command.
+    request = this.createRequest('frame');
+    last_cmd = 'info locals';
+  } else if (args && (args == 'args' || args == 'ar')) {
+    // Build a evaluate request from the text command.
+    request = this.createRequest('frame');
+    last_cmd = 'info args';
+  } else {
+    throw new Error('Invalid info arguments.');
+  }
+
+  return request.toJSONProtocol();
+};
+
+
+DebugRequest.prototype.v8FlagsToJSONRequest_ = function(args) {
+  var request;
+  request = this.createRequest('v8flags');
+  request.arguments = {};
+  request.arguments.flags = args;
+  return request.toJSONProtocol();
+};
+
+
+DebugRequest.prototype.gcToJSONRequest_ = function(args) {
+  var request;
+  if (!args) {
+    args = 'all';
+  }
+  var args = args.split(/\s+/g);
+  var cmd = args[0];
+
+  switch(cmd) {
+    case 'all':
+    case 'quick':
+    case 'full':
+    case 'young':
+    case 'old':
+    case 'compact':
+    case 'sweep':
+    case 'scavenge': {
+      if (cmd == 'young') { cmd = 'quick'; }
+      else if (cmd == 'old') { cmd = 'full'; }
+
+      request = this.createRequest('gc');
+      request.arguments = {};
+      request.arguments.type = cmd;
+      break;
+    }
+      // Else fall thru to the default case below to report the error.
+    default:
+      throw new Error('Missing arguments after ' + cmd + '.');
+  }
+  return request.toJSONProtocol();
+};
+
+
 // Create a JSON request for the threads command.
 DebugRequest.prototype.threadsCommandToJSONRequest_ = function(args) {
   // Build a threads request from the text command.
@@ -816,6 +1168,10 @@
     if (args == 'compile') {
       trace_compile = !trace_compile;
       print('Tracing of compiled scripts ' + (trace_compile ? 'on' : 'off'));
+    } else if (args === 'debug json' || args === 'json' || args === 'packets') {
+      trace_debug_json = !trace_debug_json;
+      print('Tracing of debug json packets ' +
+            (trace_debug_json ? 'on' : 'off'));
     } else {
       throw new Error('Invalid trace arguments.');
     }
@@ -831,24 +1187,63 @@
     print('warning: arguments to \'help\' are ignored');
   }
 
-  print('break');
-  print('break location [condition]');
-  print('  break on named function: location is a function name');
-  print('  break on function: location is #<id>#');
-  print('  break on script position: location is name:line[:column]');
-  print('clear <breakpoint #>');
-  print('backtrace [n] | [-n] | [from to]');
-  print('frame <frame #>');
+  print('Note: <> denotes symbollic values to be replaced with real values.');
+  print('Note: [] denotes optional parts of commands, or optional options / arguments.');
+  print('      e.g. d[elete] - you get the same command if you type d or delete.');
+  print('');
+  print('[break] - break as soon as possible');
+  print('b[reak] location [condition]');
+  print('        - break on named function: location is a function name');
+  print('        - break on function: location is #<id>#');
+  print('        - break on script position: location is name:line[:column]');
+  print('');
+  print('clear <breakpoint #>       - deletes the specified user defined breakpoint');
+  print('d[elete]  <breakpoint #>   - deletes the specified user defined breakpoint');
+  print('dis[able] <breakpoint #>   - disables the specified user defined breakpoint');
+  print('dis[able] exc[eptions] [[all] | unc[aught]]');
+  print('                           - disables breaking on exceptions');
+  print('en[able]  <breakpoint #>   - enables the specified user defined breakpoint');
+  print('en[able]  exc[eptions] [[all] | unc[aught]]');
+  print('                           - enables breaking on exceptions');
+  print('');
+  print('b[ack]t[race] [n] | [-n] | [from to]');
+  print('                           - prints the stack back trace');
+  print('f[rame]                    - prints info about the current frame context');
+  print('f[rame] <frame #>          - set context to specified frame #');
   print('scopes');
   print('scope <scope #>');
+  print('');
+  print('up                         - set context to caller of current frame');
+  print('do[wn]                     - set context to callee of current frame');
+  print('inf[o] br[eak]             - prints info about breakpoints in use');
+  print('inf[o] ar[gs]              - prints info about arguments of the current function');
+  print('inf[o] lo[cals]            - prints info about locals in the current function');
+  print('inf[o] liveobjectlist|lol  - same as \'lol info\'');
+  print('');
   print('step [in | next | out| min [step count]]');
-  print('print <expression>');
-  print('dir <expression>');
+  print('c[ontinue]                 - continue executing after a breakpoint');
+  print('s[tep]   [<N>]             - step into the next N callees (default N is 1)');
+  print('s[tep]i  [<N>]             - step into the next N callees (default N is 1)');
+  print('n[ext]   [<N>]             - step over the next N callees (default N is 1)');
+  print('fin[ish] [<N>]             - step out of N frames (default N is 1)');
+  print('');
+  print('p[rint] <expression>       - prints the result of the specified expression');
+  print('dir <expression>           - prints the object structure of the result');
+  print('set <var> = <expression>   - executes the specified statement');
+  print('');
+  print('l[ist]                     - list the source code around for the current pc');
+  print('l[ist] [- | <start>,<end>] - list the specified range of source code');
   print('source [from line [num lines]]');
-  print('scripts');
-  print('continue');
+  print('scr[ipts] [native|extensions|all]');
+  print('scr[ipts] [<filter text>]  - list scripts with the specified text in its description');
+  print('');
+  print('gc                         - runs the garbage collector');
+  print('');
   print('trace compile');
-  print('help');
+  // hidden command: trace debug json - toggles tracing of debug json packets
+  print('');
+  print('disconnect|exit|quit       - disconnects and quits the debugger');
+  print('help                       - prints this help information');
 }
 
 
@@ -930,6 +1325,27 @@
 }
 
 
+function refObjectToString_(protocolPackage, handle) {
+  var value = protocolPackage.lookup(handle);
+  var result = '';
+  if (value.isString()) {
+    result = '"' + value.value() + '"';
+  } else if (value.isPrimitive()) {
+    result = value.valueString();
+  } else if (value.isObject()) {
+    result += formatObject_(value, true);
+  }
+  return result;
+}
+
+
+// Rounds number 'num' to 'length' decimal places.
+function roundNumber(num, length) {
+  var factor = Math.pow(10, length);
+  return Math.round(num * factor) / factor;
+}
+
+
 // Convert a JSON response to text for display in a text based debugger.
 function DebugResponseDetails(response) {
   details = {text:'', running:false}
@@ -962,6 +1378,11 @@
         details.text = result;
         break;
 
+      case 'changebreakpoint':
+        result = 'successfully changed breakpoint';
+        details.text = result;
+        break;
+
       case 'listbreakpoints':
         result = 'breakpoints: (' + body.breakpoints.length + ')';
         for (var i = 0; i < body.breakpoints.length; i++) {
@@ -974,9 +1395,9 @@
           if (breakpoint.script_name) {
               result += ' script_name=' + breakpoint.script_name;
           }
-          result += ' line=' + breakpoint.line;
+          result += ' line=' + (breakpoint.line + 1);
           if (breakpoint.column != null) {
-            result += ' column=' + breakpoint.column;
+            result += ' column=' + (breakpoint.column + 1);
           }
           if (breakpoint.groupId) {
             result += ' groupId=' + breakpoint.groupId;
@@ -992,6 +1413,24 @@
           }
           result += ' hit_count=' + breakpoint.hit_count;
         }
+        if (body.breakpoints.length === 0) {
+          result = "No user defined breakpoints\n";
+        } else {
+          result += '\n';
+        }
+        if (body.breakOnExceptions) {
+          result += '* breaking on ALL exceptions is enabled\n';
+        } else if (body.breakOnUncaughtExceptions) {
+          result += '* breaking on UNCAUGHT exceptions is enabled\n';
+        } else {
+          result += '* all exception breakpoints are disabled\n';            
+        }
+        details.text = result;
+        break;
+
+      case 'setexceptionbreak':
+        result = 'Break on ' + body.type + ' exceptions: ';
+        result += body.enabled ? 'enabled' : 'disabled';
         details.text = result;
         break;
 
@@ -1010,10 +1449,39 @@
         break;
 
       case 'frame':
-        details.text = SourceUnderline(body.sourceLineText,
-                                       body.column);
-        Debug.State.currentSourceLine = body.line;
-        Debug.State.currentFrame = body.index;
+        if (last_cmd === 'info locals') {
+          var locals = body.locals;
+          if (locals.length === 0) {
+            result = 'No locals';
+          } else {
+            for (var i = 0; i < locals.length; i++) {
+              var local = locals[i];
+              result += local.name + ' = ';
+              result += refObjectToString_(response, local.value.ref);
+              result += '\n';
+            }
+          }
+        } else if (last_cmd === 'info args') {
+          var args = body.arguments;
+          if (args.length === 0) {
+            result = 'No arguments';
+          } else {
+            for (var i = 0; i < args.length; i++) {
+              var arg = args[i];
+              result += arg.name + ' = ';
+              result += refObjectToString_(response, arg.value.ref);
+              result += '\n';
+            }
+          }
+        } else {
+          result = SourceUnderline(body.sourceLineText,
+                                   body.column);
+          Debug.State.currentSourceLine = body.line;
+          Debug.State.currentFrame = body.index;
+          Debug.State.displaySourceStartLine = -1;
+          Debug.State.displaySourceEndLine = -1;
+        }
+        details.text = result;
         break;
 
       case 'scopes':
@@ -1132,7 +1600,9 @@
           if (body[i].name) {
             result += body[i].name;
           } else {
-            if (body[i].compilationType == Debug.ScriptCompilationType.Eval) {
+            if (body[i].compilationType == Debug.ScriptCompilationType.Eval
+                && body[i].evalFromScript
+                ) {
               result += 'eval from ';
               var script_value = response.lookup(body[i].evalFromScript.ref);
               result += ' ' + script_value.field('name');
@@ -1162,6 +1632,9 @@
           result += sourceStart;
           result += ']';
         }
+        if (body.length == 0) {
+          result = "no matching scripts found";
+        }
         details.text = result;
         break;
 
@@ -1181,6 +1654,23 @@
         details.text = "(running)";
         break;
 
+      case 'v8flags':
+        details.text = "flags set";
+        break;
+
+      case 'gc':
+        details.text = "GC " + body.before + " => " + body.after;
+        if (body.after > (1024*1024)) {
+          details.text +=
+              " (" + roundNumber(body.before/(1024*1024), 1) + "M => " +
+                     roundNumber(body.after/(1024*1024), 1) + "M)";
+        } else if (body.after > 1024) {
+          details.text +=
+              " (" + roundNumber(body.before/1024, 1) + "K => " +
+                     roundNumber(body.after/1024, 1) + "K)";
+        }
+        break;
+
       default:
         details.text =
             'Response for unknown command \'' + response.command() + '\'' +
@@ -1467,6 +1957,11 @@
 }
 
 
+ProtocolValue.prototype.valueString = function() {
+  return this.value_.text;
+}
+
+
 function ProtocolReference(handle) {
   this.handle_ = handle;
 }
@@ -1613,7 +2108,9 @@
       var property_value_json;
       switch (typeof property_value) {
         case 'object':
-          if (typeof property_value.toJSONProtocol == 'function') {
+          if (property_value === null) {
+            property_value_json = 'null';
+          } else if (typeof property_value.toJSONProtocol == 'function') {
             property_value_json = property_value.toJSONProtocol(true)
           } else if (property_value.constructor.name == 'Array'){
             property_value_json = SimpleArrayToJSON_(property_value);
diff --git a/src/debug-agent.cc b/src/debug-agent.cc
index e2d9304..6901079 100644
--- a/src/debug-agent.cc
+++ b/src/debug-agent.cc
@@ -27,9 +27,11 @@
 
 
 #include "v8.h"
+#include "debug.h"
 #include "debug-agent.h"
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
+
 namespace v8 {
 namespace internal {
 
@@ -167,22 +169,33 @@
   while (true) {
     // Read data from the debugger front end.
     SmartPointer<char> message = DebuggerAgentUtil::ReceiveMessage(client_);
-    if (*message == NULL) {
-      // Session is closed.
-      agent_->OnSessionClosed(this);
-      return;
+
+    const char* msg = *message;
+    bool is_closing_session = (msg == NULL);
+
+    if (msg == NULL) {
+      // If we lost the connection, then simulate a disconnect msg:
+      msg = "{\"seq\":1,\"type\":\"request\",\"command\":\"disconnect\"}";
+
+    } else {
+      // Check if we're getting a disconnect request:
+      const char* disconnectRequestStr =
+          "\"type\":\"request\",\"command\":\"disconnect\"}";
+      const char* result = strstr(msg, disconnectRequestStr);
+      if (result != NULL) {
+        is_closing_session = true;
+      }
     }
 
     // Convert UTF-8 to UTF-16.
-    unibrow::Utf8InputBuffer<> buf(*message,
-                                   StrLength(*message));
+    unibrow::Utf8InputBuffer<> buf(msg, StrLength(msg));
     int len = 0;
     while (buf.has_more()) {
       buf.GetNext();
       len++;
     }
     ScopedVector<int16_t> temp(len + 1);
-    buf.Reset(*message, StrLength(*message));
+    buf.Reset(msg, StrLength(msg));
     for (int i = 0; i < len; i++) {
       temp[i] = buf.GetNext();
     }
@@ -190,6 +203,12 @@
     // Send the request received to the debugger.
     v8::Debug::SendCommand(reinterpret_cast<const uint16_t *>(temp.start()),
                            len);
+
+    if (is_closing_session) {
+      // Session is closed.
+      agent_->OnSessionClosed(this);
+      return;
+    }
   }
 }
 
diff --git a/src/debug-debugger.js b/src/debug-debugger.js
index 090c661..dcff07c 100644
--- a/src/debug-debugger.js
+++ b/src/debug-debugger.js
@@ -654,13 +654,19 @@
 
 Debug.enableBreakPoint = function(break_point_number) {
   var break_point = this.findBreakPoint(break_point_number, false);
-  break_point.enable();
+  // Only enable if the breakpoint hasn't been deleted:
+  if (break_point) {
+    break_point.enable();
+  }
 };
 
 
 Debug.disableBreakPoint = function(break_point_number) {
   var break_point = this.findBreakPoint(break_point_number, false);
-  break_point.disable();
+  // Only enable if the breakpoint hasn't been deleted:
+  if (break_point) {
+    break_point.disable();
+  }
 };
 
 
@@ -701,6 +707,17 @@
 };
 
 
+Debug.disableAllBreakPoints = function() {
+  // Disable all user defined breakpoints:
+  for (var i = 1; i < next_break_point_number; i++) {
+    Debug.disableBreakPoint(i);
+  }
+  // Disable all exception breakpoints:
+  %ChangeBreakOnException(Debug.ExceptionBreak.Caught, false);
+  %ChangeBreakOnException(Debug.ExceptionBreak.Uncaught, false);
+};
+
+
 Debug.findScriptBreakPoint = function(break_point_number, remove) {
   var script_break_point;
   for (var i = 0; i < script_break_points.length; i++) {
@@ -1341,6 +1358,10 @@
         this.clearBreakPointRequest_(request, response);
       } else if (request.command == 'clearbreakpointgroup') {
         this.clearBreakPointGroupRequest_(request, response);
+      } else if (request.command == 'disconnect') {
+        this.disconnectRequest_(request, response);
+      } else if (request.command == 'setexceptionbreak') {
+        this.setExceptionBreakRequest_(request, response);
       } else if (request.command == 'listbreakpoints') {
         this.listBreakpointsRequest_(request, response);
       } else if (request.command == 'backtrace') {
@@ -1373,6 +1394,13 @@
         this.changeLiveRequest_(request, response);
       } else if (request.command == 'flags') {
         this.debuggerFlagsRequest_(request, response);
+      } else if (request.command == 'v8flags') {
+        this.v8FlagsRequest_(request, response);
+
+      // GC tools:
+      } else if (request.command == 'gc') {
+        this.gcRequest_(request, response);
+
       } else {
         throw new Error('Unknown command "' + request.command + '" in request');
       }
@@ -1690,7 +1718,63 @@
     array.push(description);
   }
 
-  response.body = { breakpoints: array }
+  response.body = {
+    breakpoints: array,
+    breakOnExceptions: Debug.isBreakOnException(),
+    breakOnUncaughtExceptions: Debug.isBreakOnUncaughtException()
+  }
+}
+
+
+DebugCommandProcessor.prototype.disconnectRequest_ =
+    function(request, response) {
+  Debug.disableAllBreakPoints();
+  this.continueRequest_(request, response);
+}
+
+
+DebugCommandProcessor.prototype.setExceptionBreakRequest_ =
+    function(request, response) {
+  // Check for legal request.
+  if (!request.arguments) {
+    response.failed('Missing arguments');
+    return;
+  }
+
+  // Pull out and check the 'type' argument:
+  var type = request.arguments.type;
+  if (!type) {
+    response.failed('Missing argument "type"');
+    return;
+  }
+
+  // Initialize the default value of enable:
+  var enabled;
+  if (type == 'all') {
+    enabled = !Debug.isBreakOnException();
+  } else if (type == 'uncaught') {
+    enabled = !Debug.isBreakOnUncaughtException();
+  }  
+
+  // Pull out and check the 'enabled' argument if present:
+  if (!IS_UNDEFINED(request.arguments.enabled)) {
+    enabled = request.arguments.enabled;
+    if ((enabled != true) && (enabled != false)) {
+      response.failed('Illegal value for "enabled":"' + enabled + '"');
+    }
+  }
+
+  // Now set the exception break state:
+  if (type == 'all') {
+    %ChangeBreakOnException(Debug.ExceptionBreak.Caught, enabled);
+  } else if (type == 'uncaught') {
+    %ChangeBreakOnException(Debug.ExceptionBreak.Uncaught, enabled);
+  } else {
+    response.failed('Unknown "type":"' + type + '"');
+  }
+
+  // Add the cleared break point number to the response.
+  response.body = { 'type': type, 'enabled': enabled };
 }
 
 
@@ -2047,6 +2131,16 @@
         idsToInclude[ids[i]] = true;
       }
     }
+
+    var filterStr = null;
+    var filterNum = null;
+    if (!IS_UNDEFINED(request.arguments.filter)) {
+      var num = %ToNumber(request.arguments.filter);
+      if (!isNaN(num)) {
+        filterNum = num;
+      }
+      filterStr = request.arguments.filter;
+    }
   }
 
   // Collect all scripts in the heap.
@@ -2058,6 +2152,21 @@
     if (idsToInclude && !idsToInclude[scripts[i].id]) {
       continue;
     }
+    if (filterStr || filterNum) {
+      var script = scripts[i];
+      var found = false;
+      if (filterNum && !found) {
+        if (script.id && script.id === filterNum) {
+          found = true;
+        }
+      }
+      if (filterStr && !found) {
+        if (script.name && script.name.indexOf(filterStr) >= 0) {
+          found = true;
+        }
+      }
+      if (!found) continue;
+    }
     if (types & ScriptTypeFlag(scripts[i].type)) {
       response.body.push(MakeMirror(scripts[i]));
     }
@@ -2196,6 +2305,27 @@
 }
 
 
+DebugCommandProcessor.prototype.v8FlagsRequest_ = function(request, response) {
+  var flags = request.arguments.flags;
+  if (!flags) flags = '';
+  %SetFlags(flags);
+};
+
+
+DebugCommandProcessor.prototype.gcRequest_ = function(request, response) {
+  var type = request.arguments.type;
+  if (!type) type = 'all';
+
+  var before = %GetHeapUsage();
+  %CollectGarbage(type);
+  var after = %GetHeapUsage();
+
+  response.body = { "before": before, "after": after };
+};
+
+
+
+
 // Check whether the previously processed command caused the VM to become
 // running.
 DebugCommandProcessor.prototype.isRunning = function() {
diff --git a/src/debug.cc b/src/debug.cc
index c41e545..8ec77e7 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -622,7 +622,7 @@
 
 // Default call debugger on uncaught exception.
 bool Debug::break_on_exception_ = false;
-bool Debug::break_on_uncaught_exception_ = true;
+bool Debug::break_on_uncaught_exception_ = false;
 
 Handle<Context> Debug::debug_context_ = Handle<Context>();
 Code* Debug::debug_break_return_ = NULL;
@@ -2740,8 +2740,10 @@
   }
 
   if (Socket::Setup()) {
-    agent_ = new DebuggerAgent(name, port);
-    agent_->Start();
+    if (agent_ == NULL) {
+      agent_ = new DebuggerAgent(name, port);
+      agent_->Start();
+    }
     return true;
   }
 
diff --git a/src/debug.h b/src/debug.h
index 0d63085..85c4d53 100644
--- a/src/debug.h
+++ b/src/debug.h
@@ -32,6 +32,7 @@
 #include "debug-agent.h"
 #include "execution.h"
 #include "factory.h"
+#include "flags.h"
 #include "hashmap.h"
 #include "platform.h"
 #include "string-stream.h"
@@ -772,6 +773,15 @@
       }
     }
 
+    if (((event == v8::BeforeCompile) || (event == v8::AfterCompile)) &&
+        !FLAG_debug_compile_events) {
+      return false;
+
+    } else if ((event == v8::ScriptCollected) &&
+               !FLAG_debug_script_collected_events) {
+      return false;
+    }
+
     // Currently argument event is not used.
     return !compiling_natives_ && Debugger::IsDebuggerActive();
   }
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index 185ff92..a3d2002 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -618,17 +618,17 @@
     }
 
     case Translation::ARGUMENTS_OBJECT: {
-      // Use the hole value as a sentinel and fill in the arguments object
-      // after the deoptimized frame is built.
+      // Use the arguments marker value as a sentinel and fill in the arguments
+      // object after the deoptimized frame is built.
       ASSERT(frame_index == 0);  // Only supported for first frame.
       if (FLAG_trace_deopt) {
         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
                output_[frame_index]->GetTop() + output_offset,
                output_offset);
-        Heap::the_hole_value()->ShortPrint();
+        Heap::arguments_marker()->ShortPrint();
         PrintF(" ; arguments object\n");
       }
-      intptr_t value = reinterpret_cast<intptr_t>(Heap::the_hole_value());
+      intptr_t value = reinterpret_cast<intptr_t>(Heap::arguments_marker());
       output_[frame_index]->SetFrameSlot(output_offset, value);
       return;
     }
diff --git a/src/disassembler.cc b/src/disassembler.cc
index bb0a072..194a299 100644
--- a/src/disassembler.cc
+++ b/src/disassembler.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -268,10 +268,13 @@
                              Code::Kind2String(kind),
                              CodeStub::MajorName(major_key, false));
             switch (major_key) {
-              case CodeStub::CallFunction:
-                out.AddFormatted("argc = %d", minor_key);
+              case CodeStub::CallFunction: {
+                int argc =
+                    CallFunctionStub::ExtractArgcFromMinorKey(minor_key);
+                out.AddFormatted("argc = %d", argc);
                 break;
-            default:
+              }
+              default:
                 out.AddFormatted("minor: %d", minor_key);
             }
           }
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 2b24d13..6e73258 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -355,6 +355,16 @@
 DEFINE_args(js_arguments, JSArguments(),
             "Pass all remaining arguments to the script. Alias for \"--\".")
 
+#if defined(WEBOS__)
+DEFINE_bool(debug_compile_events, false, "Enable debugger compile events")
+DEFINE_bool(debug_script_collected_events, false,
+            "Enable debugger script collected events")
+#else
+DEFINE_bool(debug_compile_events, true, "Enable debugger compile events")
+DEFINE_bool(debug_script_collected_events, true,
+            "Enable debugger script collected events")
+#endif
+
 //
 // Debug only flags
 //
diff --git a/src/handles.cc b/src/handles.cc
index 68c61b5..461c3f5 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -280,13 +280,13 @@
 }
 
 
-Handle<Object> IgnoreAttributesAndSetLocalProperty(
+Handle<Object> SetLocalPropertyIgnoreAttributes(
     Handle<JSObject> object,
     Handle<String> key,
     Handle<Object> value,
     PropertyAttributes attributes) {
   CALL_HEAP_FUNCTION(object->
-      IgnoreAttributesAndSetLocalProperty(*key, *value, attributes), Object);
+      SetLocalPropertyIgnoreAttributes(*key, *value, attributes), Object);
 }
 
 
@@ -422,6 +422,15 @@
 }
 
 
+Handle<Object> SetOwnElement(Handle<JSObject> object,
+                             uint32_t index,
+                             Handle<Object> value) {
+  ASSERT(!object->HasPixelElements());
+  ASSERT(!object->HasExternalArrayElements());
+  CALL_HEAP_FUNCTION(object->SetElement(index, *value, false), Object);
+}
+
+
 Handle<JSObject> Copy(Handle<JSObject> obj) {
   CALL_HEAP_FUNCTION(Heap::CopyJSObject(*obj), JSObject);
 }
diff --git a/src/handles.h b/src/handles.h
index 8fd25dc..aa9d8b9 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -217,9 +217,10 @@
 Handle<Object> ForceDeleteProperty(Handle<JSObject> object,
                                    Handle<Object> key);
 
-Handle<Object> IgnoreAttributesAndSetLocalProperty(Handle<JSObject> object,
-                                                   Handle<String> key,
-                                                   Handle<Object> value,
+Handle<Object> SetLocalPropertyIgnoreAttributes(
+    Handle<JSObject> object,
+    Handle<String> key,
+    Handle<Object> value,
     PropertyAttributes attributes);
 
 Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
@@ -231,6 +232,10 @@
                           uint32_t index,
                           Handle<Object> value);
 
+Handle<Object> SetOwnElement(Handle<JSObject> object,
+                             uint32_t index,
+                             Handle<Object> value);
+
 Handle<Object> GetProperty(Handle<JSObject> obj,
                            const char* name);
 
diff --git a/src/heap-inl.h b/src/heap-inl.h
index e7700e9..7b91e87 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -521,10 +521,6 @@
   CALL_AND_RETRY(FUNCTION_CALL, return, return)
 
 
-#define CALL_HEAP_FUNCTION_INLINE(FUNCTION_CALL) \
-  CALL_AND_RETRY(FUNCTION_CALL, break, break)
-
-
 #ifdef DEBUG
 
 inline bool Heap::allow_allocation(bool new_state) {
diff --git a/src/heap.cc b/src/heap.cc
index 44229f0..5832ccb 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -2011,6 +2011,12 @@
   }
   set_the_hole_value(obj);
 
+  { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
+                                           Smi::FromInt(-4));
+    if (!maybe_obj->ToObject(&obj)) return false;
+  }
+  set_arguments_marker(obj);
+
   { MaybeObject* maybe_obj =
         CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2));
     if (!maybe_obj->ToObject(&obj)) return false;
diff --git a/src/heap.h b/src/heap.h
index 3ceefd8..25384d2 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -53,6 +53,7 @@
   V(Object, null_value, NullValue)                                             \
   V(Object, true_value, TrueValue)                                             \
   V(Object, false_value, FalseValue)                                           \
+  V(Object, arguments_marker, ArgumentsMarker)                                 \
   V(Map, heap_number_map, HeapNumberMap)                                       \
   V(Map, global_context_map, GlobalContextMap)                                 \
   V(Map, fixed_array_map, FixedArrayMap)                                       \
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index a3c23c6..f7eb173 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -73,6 +73,7 @@
 //       HCompare
 //       HCompareJSObjectEq
 //       HInstanceOf
+//       HInstanceOfKnownGlobal
 //       HLoadKeyed
 //         HLoadKeyedFastElement
 //         HLoadKeyedGeneric
@@ -210,6 +211,7 @@
   V(GlobalReceiver)                            \
   V(Goto)                                      \
   V(InstanceOf)                                \
+  V(InstanceOfKnownGlobal)                     \
   V(IsNull)                                    \
   V(IsObject)                                  \
   V(IsSmi)                                     \
@@ -2262,6 +2264,28 @@
 };
 
 
+class HInstanceOfKnownGlobal: public HUnaryOperation {
+ public:
+  HInstanceOfKnownGlobal(HValue* left, Handle<JSFunction> right)
+      : HUnaryOperation(left), function_(right) {
+    set_representation(Representation::Tagged());
+    SetFlagMask(AllSideEffects());
+  }
+
+  Handle<JSFunction> function() { return function_; }
+
+  virtual Representation RequiredInputRepresentation(int index) const {
+    return Representation::Tagged();
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
+                               "instance_of_known_global")
+
+ private:
+  Handle<JSFunction> function_;
+};
+
+
 class HPower: public HBinaryOperation {
  public:
   HPower(HValue* left, HValue* right)
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index fbe4cd7..0d92b2e 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -4879,7 +4879,40 @@
   TypeInfo info = oracle()->CompareType(expr, TypeFeedbackOracle::RESULT);
   HInstruction* instr = NULL;
   if (op == Token::INSTANCEOF) {
-    instr = new HInstanceOf(left, right);
+    // Check to see if the rhs of the instanceof is a global function not
+    // residing in new space. If it is we assume that the function will stay the
+    // same.
+    Handle<JSFunction> target = Handle<JSFunction>::null();
+    Variable* var = expr->right()->AsVariableProxy()->AsVariable();
+    bool global_function = (var != NULL) && var->is_global() && !var->is_this();
+    CompilationInfo* info = graph()->info();
+    if (global_function &&
+        info->has_global_object() &&
+        !info->global_object()->IsAccessCheckNeeded()) {
+      Handle<String> name = var->name();
+      Handle<GlobalObject> global(info->global_object());
+      LookupResult lookup;
+      global->Lookup(*name, &lookup);
+      if (lookup.IsProperty() &&
+          lookup.type() == NORMAL &&
+          lookup.GetValue()->IsJSFunction()) {
+        Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
+        // If the function is in new space we assume it's more likely to
+        // change and thus prefer the general IC code.
+        if (!Heap::InNewSpace(*candidate)) {
+          target = candidate;
+        }
+      }
+    }
+
+    // If the target is not null we have found a known global function that is
+    // assumed to stay the same for this instanceof.
+    if (target.is_null()) {
+      instr = new HInstanceOf(left, right);
+    } else {
+      AddInstruction(new HCheckFunction(right, target));
+      instr = new HInstanceOfKnownGlobal(left, target);
+    }
   } else if (op == Token::IN) {
     BAILOUT("Unsupported comparison: in");
   } else if (info.IsNonPrimitive()) {
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index bd95c8d..91fb050 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -1772,7 +1772,6 @@
 }
 
 
-
 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
   Label call_runtime;
   ASSERT(operands_type_ == TRBinaryOpIC::STRING);
@@ -4973,7 +4972,26 @@
 }
 
 
+// Generate stub code for instanceof.
+// This code can patch a call site inlined cache of the instance of check,
+// which looks like this.
+//
+//   81 ff XX XX XX XX   cmp    edi, <the hole, patched to a map>
+//   75 0a               jne    <some near label>
+//   b8 XX XX XX XX      mov    eax, <the hole, patched to either true or false>
+//
+// If call site patching is requested the stack will have the delta from the
+// return address to the cmp instruction just below the return address. This
+// also means that call site patching can only take place with arguments in
+// registers. TOS looks like this when call site patching is requested
+//
+//   esp[0] : return address
+//   esp[4] : delta from return address to cmp instruction
+//
 void InstanceofStub::Generate(MacroAssembler* masm) {
+  // Call site inlining and patching implies arguments in registers.
+  ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
+
   // Fixed register usage throughout the stub.
   Register object = eax;  // Object (lhs).
   Register map = ebx;  // Map of the object.
@@ -4981,9 +4999,22 @@
   Register prototype = edi;  // Prototype of the function.
   Register scratch = ecx;
 
+  // Constants describing the call site code to patch.
+  static const int kDeltaToCmpImmediate = 2;
+  static const int kDeltaToMov = 8;
+  static const int kDeltaToMovImmediate = 9;
+  static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
+  static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
+  static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
+
+  ExternalReference roots_address = ExternalReference::roots_address();
+
+  ASSERT_EQ(object.code(), InstanceofStub::left().code());
+  ASSERT_EQ(function.code(), InstanceofStub::right().code());
+
   // Get the object and function - they are always both needed.
   Label slow, not_js_object;
-  if (!args_in_registers()) {
+  if (!HasArgsInRegisters()) {
     __ mov(object, Operand(esp, 2 * kPointerSize));
     __ mov(function, Operand(esp, 1 * kPointerSize));
   }
@@ -4993,22 +5024,26 @@
   __ j(zero, &not_js_object, not_taken);
   __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
 
-  // Look up the function and the map in the instanceof cache.
-  NearLabel miss;
-  ExternalReference roots_address = ExternalReference::roots_address();
-  __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
-  __ cmp(function,
-         Operand::StaticArray(scratch, times_pointer_size, roots_address));
-  __ j(not_equal, &miss);
-  __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
-  __ cmp(map, Operand::StaticArray(scratch, times_pointer_size, roots_address));
-  __ j(not_equal, &miss);
-  __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
-  __ mov(eax, Operand::StaticArray(scratch, times_pointer_size, roots_address));
-  __ IncrementCounter(&Counters::instance_of_cache, 1);
-  __ ret((args_in_registers() ? 0 : 2) * kPointerSize);
+  // If there is a call site cache don't look in the global cache, but do the
+  // real lookup and update the call site cache.
+  if (!HasCallSiteInlineCheck()) {
+    // Look up the function and the map in the instanceof cache.
+    NearLabel miss;
+    __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
+    __ cmp(function,
+           Operand::StaticArray(scratch, times_pointer_size, roots_address));
+    __ j(not_equal, &miss);
+    __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
+    __ cmp(map, Operand::StaticArray(
+        scratch, times_pointer_size, roots_address));
+    __ j(not_equal, &miss);
+    __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
+    __ mov(eax, Operand::StaticArray(
+        scratch, times_pointer_size, roots_address));
+    __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
+    __ bind(&miss);
+  }
 
-  __ bind(&miss);
   // Get the prototype of the function.
   __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
 
@@ -5017,13 +5052,29 @@
   __ j(zero, &slow, not_taken);
   __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
 
-  // Update the golbal instanceof cache with the current map and function. The
-  // cached answer will be set when it is known.
+  // Update the global instanceof or call site inlined cache with the current
+  // map and function. The cached answer will be set when it is known below.
+  if (!HasCallSiteInlineCheck()) {
   __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
   __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
   __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
   __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
          function);
+  } else {
+    // The constants for the code patching are based on no push instructions
+    // at the call site.
+    ASSERT(HasArgsInRegisters());
+    // Get return address and delta to inlined map check.
+    __ mov(scratch, Operand(esp, 0 * kPointerSize));
+    __ sub(scratch, Operand(esp, 1 * kPointerSize));
+    if (FLAG_debug_code) {
+      __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
+      __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
+      __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
+      __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
+    }
+    __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
+  }
 
   // Loop through the prototype chain of the object looking for the function
   // prototype.
@@ -5039,18 +5090,48 @@
   __ jmp(&loop);
 
   __ bind(&is_instance);
-  __ IncrementCounter(&Counters::instance_of_stub_true, 1);
-  __ Set(eax, Immediate(0));
-  __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
-  __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax);
-  __ ret((args_in_registers() ? 0 : 2) * kPointerSize);
+  if (!HasCallSiteInlineCheck()) {
+    __ Set(eax, Immediate(0));
+    __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
+    __ mov(Operand::StaticArray(scratch,
+                                times_pointer_size, roots_address), eax);
+  } else {
+    // Get return address and delta to inlined map check.
+    __ mov(eax, Factory::true_value());
+    __ mov(scratch, Operand(esp, 0 * kPointerSize));
+    __ sub(scratch, Operand(esp, 1 * kPointerSize));
+    if (FLAG_debug_code) {
+      __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
+      __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
+    }
+    __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
+    if (!ReturnTrueFalseObject()) {
+      __ Set(eax, Immediate(0));
+    }
+  }
+  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
 
   __ bind(&is_not_instance);
-  __ IncrementCounter(&Counters::instance_of_stub_false, 1);
-  __ Set(eax, Immediate(Smi::FromInt(1)));
-  __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
-  __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax);
-  __ ret((args_in_registers() ? 0 : 2) * kPointerSize);
+  if (!HasCallSiteInlineCheck()) {
+    __ Set(eax, Immediate(Smi::FromInt(1)));
+    __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
+    __ mov(Operand::StaticArray(
+        scratch, times_pointer_size, roots_address), eax);
+  } else {
+    // Get return address and delta to inlined map check.
+    __ mov(eax, Factory::false_value());
+    __ mov(scratch, Operand(esp, 0 * kPointerSize));
+    __ sub(scratch, Operand(esp, 1 * kPointerSize));
+    if (FLAG_debug_code) {
+      __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
+      __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
+    }
+    __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
+    if (!ReturnTrueFalseObject()) {
+      __ Set(eax, Immediate(Smi::FromInt(1)));
+    }
+  }
+  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
 
   Label object_not_null, object_not_null_or_smi;
   __ bind(&not_js_object);
@@ -5064,39 +5145,61 @@
   // Null is not instance of anything.
   __ cmp(object, Factory::null_value());
   __ j(not_equal, &object_not_null);
-  __ IncrementCounter(&Counters::instance_of_stub_false_null, 1);
   __ Set(eax, Immediate(Smi::FromInt(1)));
-  __ ret((args_in_registers() ? 0 : 2) * kPointerSize);
+  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
 
   __ bind(&object_not_null);
   // Smi values is not instance of anything.
   __ test(object, Immediate(kSmiTagMask));
   __ j(not_zero, &object_not_null_or_smi, not_taken);
   __ Set(eax, Immediate(Smi::FromInt(1)));
-  __ ret((args_in_registers() ? 0 : 2) * kPointerSize);
+  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
 
   __ bind(&object_not_null_or_smi);
   // String values is not instance of anything.
   Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
   __ j(NegateCondition(is_string), &slow);
-  __ IncrementCounter(&Counters::instance_of_stub_false_string, 1);
   __ Set(eax, Immediate(Smi::FromInt(1)));
-  __ ret((args_in_registers() ? 0 : 2) * kPointerSize);
+  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
 
   // Slow-case: Go through the JavaScript implementation.
   __ bind(&slow);
-  if (args_in_registers()) {
-    // Push arguments below return address.
-    __ pop(scratch);
+  if (!ReturnTrueFalseObject()) {
+    // Tail call the builtin which returns 0 or 1.
+    if (HasArgsInRegisters()) {
+      // Push arguments below return address.
+      __ pop(scratch);
+      __ push(object);
+      __ push(function);
+      __ push(scratch);
+    }
+    __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
+  } else {
+    // Call the builtin and convert 0/1 to true/false.
+    __ EnterInternalFrame();
     __ push(object);
     __ push(function);
-    __ push(scratch);
+    __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
+    __ LeaveInternalFrame();
+    NearLabel true_value, done;
+    __ test(eax, Operand(eax));
+    __ j(zero, &true_value);
+    __ mov(eax, Factory::false_value());
+    __ jmp(&done);
+    __ bind(&true_value);
+    __ mov(eax, Factory::true_value());
+    __ bind(&done);
+    __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
   }
-  __ IncrementCounter(&Counters::instance_of_slow, 1);
-  __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
 }
 
 
+Register InstanceofStub::left() { return eax; }
+
+
+Register InstanceofStub::right() { return edx; }
+
+
 int CompareStub::MinorKey() {
   // Encode the three parameters in a unique 16 bit value. To avoid duplicate
   // stubs the never NaN NaN condition is only taken into account if the
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index f66a8c7..4a56d0d 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -250,13 +250,6 @@
         result_type_(result_type),
         name_(NULL) { }
 
-  // Generate code to call the stub with the supplied arguments. This will add
-  // code at the call site to prepare arguments either in registers or on the
-  // stack together with the actual call.
-  void GenerateCall(MacroAssembler* masm, Register left, Register right);
-  void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
-  void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
-
  private:
   enum SmiCodeGenerateHeapNumberResults {
     ALLOW_HEAPNUMBER_RESULTS,
@@ -321,10 +314,6 @@
   void GenerateTypeTransition(MacroAssembler* masm);
   void GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm);
 
-  bool IsOperationCommutative() {
-    return (op_ == Token::ADD) || (op_ == Token::MUL);
-  }
-
   virtual int GetCodeKind() { return Code::TYPE_RECORDING_BINARY_OP_IC; }
 
   virtual InlineCacheState GetICState() {
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 2f14e82..e3b0dfc 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -745,10 +745,10 @@
 
   Comment cmnt(masm_, "[ store arguments object");
   if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
-    // When using lazy arguments allocation, we store the hole value
+    // When using lazy arguments allocation, we store the arguments marker value
     // as a sentinel indicating that the arguments object hasn't been
     // allocated yet.
-    frame_->Push(Factory::the_hole_value());
+    frame_->Push(Factory::arguments_marker());
   } else {
     ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
     frame_->PushFunction();
@@ -773,9 +773,9 @@
     if (probe.is_constant()) {
       // We have to skip updating the arguments object if it has
       // been assigned a proper value.
-      skip_arguments = !probe.handle()->IsTheHole();
+      skip_arguments = !probe.handle()->IsArgumentsMarker();
     } else {
-      __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
+      __ cmp(Operand(probe.reg()), Immediate(Factory::arguments_marker()));
       probe.Unuse();
       done.Branch(not_equal);
     }
@@ -3294,9 +3294,9 @@
     Label slow, done;
     bool try_lazy = true;
     if (probe.is_constant()) {
-      try_lazy = probe.handle()->IsTheHole();
+      try_lazy = probe.handle()->IsArgumentsMarker();
     } else {
-      __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
+      __ cmp(Operand(probe.reg()), Immediate(Factory::arguments_marker()));
       probe.Unuse();
       __ j(not_equal, &slow);
     }
@@ -5068,7 +5068,7 @@
   // object has been lazily loaded yet.
   Result result = frame()->Pop();
   if (result.is_constant()) {
-    if (result.handle()->IsTheHole()) {
+    if (result.handle()->IsArgumentsMarker()) {
       result = StoreArgumentsObject(false);
     }
     frame()->Push(&result);
@@ -5079,7 +5079,7 @@
   // indicates that we haven't loaded the arguments object yet, we
   // need to do it now.
   JumpTarget exit;
-  __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
+  __ cmp(Operand(result.reg()), Immediate(Factory::arguments_marker()));
   frame()->Push(&result);
   exit.Branch(not_equal);
 
diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc
index d95df3e..ceba249 100644
--- a/src/ia32/deoptimizer-ia32.cc
+++ b/src/ia32/deoptimizer-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -105,23 +105,25 @@
 
 void Deoptimizer::PatchStackCheckCode(RelocInfo* rinfo,
                                       Code* replacement_code) {
-  // The stack check code matches the pattern (on ia32, for example):
+  // The stack check code matches the pattern:
   //
   //     cmp esp, <limit>
   //     jae ok
   //     call <stack guard>
+  //     test eax, <loop nesting depth>
   // ok: ...
   //
-  // We will patch the code to:
+  // We will patch away the branch so the code is:
   //
   //     cmp esp, <limit>  ;; Not changed
   //     nop
   //     nop
   //     call <on-stack replacment>
+  //     test eax, <loop nesting depth>
   // ok:
   Address call_target_address = rinfo->pc();
   ASSERT(*(call_target_address - 3) == 0x73 &&  // jae
-         *(call_target_address - 2) == 0x05 &&  // offset
+         *(call_target_address - 2) == 0x07 &&  // offset
          *(call_target_address - 1) == 0xe8);   // call
   *(call_target_address - 3) = 0x90;  // nop
   *(call_target_address - 2) = 0x90;  // nop
@@ -130,12 +132,14 @@
 
 
 void Deoptimizer::RevertStackCheckCode(RelocInfo* rinfo, Code* check_code) {
+  // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
+  // restore the conditional branch.
   Address call_target_address = rinfo->pc();
   ASSERT(*(call_target_address - 3) == 0x90 &&  // nop
          *(call_target_address - 2) == 0x90 &&  // nop
          *(call_target_address - 1) == 0xe8);   // call
   *(call_target_address - 3) = 0x73;  // jae
-  *(call_target_address - 2) = 0x05;  // offset
+  *(call_target_address - 2) = 0x07;  // offset
   rinfo->set_target_address(check_code->entry());
 }
 
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 5beec0d..5f30858 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -264,16 +264,24 @@
   __ j(above_equal, &ok, taken);
   StackCheckStub stub;
   __ CallStub(&stub);
-  __ bind(&ok);
-  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
-  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
+  // Record a mapping of this PC offset to the OSR id.  This is used to find
+  // the AST id from the unoptimized code in order to use it as a key into
+  // the deoptimization input data found in the optimized code.
   RecordStackCheck(stmt->OsrEntryId());
-  // Loop stack checks can be patched to perform on-stack
-  // replacement. In order to decide whether or not to perform OSR we
-  // embed the loop depth in a test instruction after the call so we
-  // can extract it from the OSR builtin.
+
+  // Loop stack checks can be patched to perform on-stack replacement. In
+  // order to decide whether or not to perform OSR we embed the loop depth
+  // in a test instruction after the call so we can extract it from the OSR
+  // builtin.
   ASSERT(loop_depth() > 0);
   __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+
+  __ bind(&ok);
+  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+  // Record a mapping of the OSR id to this PC.  This is used if the OSR
+  // entry becomes the target of a bailout.  We don't expect it to be, but
+  // we want it to work if it is.
+  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
 }
 
 
@@ -1497,7 +1505,9 @@
       if (expr->is_compound()) {
         if (property->is_arguments_access()) {
           VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
-          __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+          MemOperand slot_operand =
+              EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
+          __ push(slot_operand);
           __ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
         } else {
           VisitForStackValue(property->obj());
@@ -1508,7 +1518,9 @@
       } else {
         if (property->is_arguments_access()) {
           VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
-          __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+          MemOperand slot_operand =
+              EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
+          __ push(slot_operand);
           __ push(Immediate(property->key()->AsLiteral()->handle()));
         } else {
           VisitForStackValue(property->obj());
@@ -3739,7 +3751,9 @@
     } else {
       if (prop->is_arguments_access()) {
         VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
-        __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+        MemOperand slot_operand =
+            EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
+        __ push(slot_operand);
         __ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
       } else {
         VisitForStackValue(prop->obj());
@@ -4042,7 +4056,6 @@
 
     case Token::INSTANCEOF: {
       VisitForStackValue(expr->right());
-      __ IncrementCounter(&Counters::instance_of_full, 1);
       InstanceofStub stub(InstanceofStub::kNoFlags);
       __ CallStub(&stub);
       PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 0f56825..d32f95d 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -1694,7 +1694,7 @@
 
 
 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
-  // Object and function are in fixed registers eax and edx.
+  // Object and function are in fixed registers defined by the stub.
   InstanceofStub stub(InstanceofStub::kArgsInRegisters);
   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
 
@@ -1720,6 +1720,107 @@
 }
 
 
+void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
+  class DeferredInstanceOfKnownGlobal: public LDeferredCode {
+   public:
+    DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
+                                  LInstanceOfKnownGlobal* instr)
+        : LDeferredCode(codegen), instr_(instr) { }
+    virtual void Generate() {
+      codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
+    }
+
+    Label* map_check() { return &map_check_; }
+
+   private:
+    LInstanceOfKnownGlobal* instr_;
+    Label map_check_;
+  };
+
+  DeferredInstanceOfKnownGlobal* deferred;
+  deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+
+  Label done, false_result;
+  Register object = ToRegister(instr->input());
+  Register temp = ToRegister(instr->temp());
+
+  // A Smi is not instance of anything.
+  __ test(object, Immediate(kSmiTagMask));
+  __ j(zero, &false_result, not_taken);
+
+  // This is the inlined call site instanceof cache. The two occourences of the
+  // hole value will be patched to the last map/result pair generated by the
+  // instanceof stub.
+  NearLabel cache_miss;
+  Register map = ToRegister(instr->temp());
+  __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
+  __ bind(deferred->map_check());  // Label for calculating code patching.
+  __ cmp(map, Factory::the_hole_value());  // Patched to cached map.
+  __ j(not_equal, &cache_miss, not_taken);
+  __ mov(eax, Factory::the_hole_value());  // Patched to either true or false.
+  __ jmp(&done);
+
+  // The inlined call site cache did not match. Check null and string before
+  // calling the deferred code.
+  __ bind(&cache_miss);
+  // Null is not instance of anything.
+  __ cmp(object, Factory::null_value());
+  __ j(equal, &false_result);
+
+  // String values are not instances of anything.
+  Condition is_string = masm_->IsObjectStringType(object, temp, temp);
+  __ j(is_string, &false_result);
+
+  // Go to the deferred code.
+  __ jmp(deferred->entry());
+
+  __ bind(&false_result);
+  __ mov(ToRegister(instr->result()), Factory::false_value());
+
+  // Here result has either true or false. Deferred code also produces true or
+  // false object.
+  __ bind(deferred->exit());
+  __ bind(&done);
+}
+
+
+void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+                                                Label* map_check) {
+  __ PushSafepointRegisters();
+
+  InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
+  flags = static_cast<InstanceofStub::Flags>(
+      flags | InstanceofStub::kArgsInRegisters);
+  flags = static_cast<InstanceofStub::Flags>(
+      flags | InstanceofStub::kCallSiteInlineCheck);
+  flags = static_cast<InstanceofStub::Flags>(
+      flags | InstanceofStub::kReturnTrueFalseObject);
+  InstanceofStub stub(flags);
+
+  // Get the temp register reserved by the instruction. This needs to be edi as
+  // its slot of the pushing of safepoint registers is used to communicate the
+  // offset to the location of the map check.
+  Register temp = ToRegister(instr->temp());
+  ASSERT(temp.is(edi));
+  __ mov(InstanceofStub::right(), Immediate(instr->function()));
+  static const int kAdditionalDelta = 13;
+  int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
+  Label before_push_delta;
+  __ bind(&before_push_delta);
+  __ mov(temp, Immediate(delta));
+  __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
+  __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
+  ASSERT_EQ(kAdditionalDelta,
+            masm_->SizeOfCodeGeneratedSince(&before_push_delta));
+  RecordSafepointWithRegisters(
+      instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
+  // Put the result value into the eax slot and restore all registers.
+  __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax);
+
+  __ PopSafepointRegisters();
+}
+
+
 static Condition ComputeCompareCondition(Token::Value op) {
   switch (op) {
     case Token::EQ_STRICT:
@@ -1899,6 +2000,8 @@
   __ sub(length, index);
   DeoptimizeIf(below_equal, instr->environment());
 
+  // There are two words between the frame pointer and the last argument.
+  // Subtracting from length accounts for one of them add one more.
   __ mov(result, Operand(arguments, length, times_4, kPointerSize));
 }
 
@@ -1948,7 +2051,7 @@
   Register result = ToRegister(instr->result());
 
   // Check for arguments adapter frame.
-  Label done, adapted;
+  NearLabel done, adapted;
   __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
   __ cmp(Operand(result),
@@ -1963,7 +2066,8 @@
   __ bind(&adapted);
   __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
 
-  // Done. Pointer to topmost argument is in result.
+  // Result is the frame pointer for the frame if not adapted and for the real
+  // frame below the adaptor frame if adapted.
   __ bind(&done);
 }
 
@@ -1972,9 +2076,9 @@
   Operand elem = ToOperand(instr->input());
   Register result = ToRegister(instr->result());
 
-  Label done;
+  NearLabel done;
 
-  // No arguments adaptor frame. Number of arguments is fixed.
+  // If no arguments adaptor frame the number of arguments is fixed.
   __ cmp(ebp, elem);
   __ mov(result, Immediate(scope()->num_parameters()));
   __ j(equal, &done);
@@ -1985,7 +2089,7 @@
                          ArgumentsAdaptorFrameConstants::kLengthOffset));
   __ SmiUntag(result);
 
-  // Done. Argument length is in result register.
+  // Argument length is in result register.
   __ bind(&done);
 }
 
@@ -2534,7 +2638,6 @@
            value);
   }
 
-  // Update the write barrier unless we're certain that we're storing a smi.
   if (instr->hydrogen()->NeedsWriteBarrier()) {
     // Compute address of modified element and store it into key register.
     __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index 6d8173a..41ac39a 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -77,6 +77,8 @@
   void DoDeferredTaggedToI(LTaggedToI* instr);
   void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
   void DoDeferredStackCheck(LGoto* instr);
+  void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+                                        Label* map_check);
 
   // Parallel move support.
   void DoParallelMove(LParallelMove* move);
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 4fde3d4..6355f16 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -322,15 +322,6 @@
 }
 
 
-LChunk::LChunk(HGraph* graph)
-    : spill_slot_count_(0),
-      graph_(graph),
-      instructions_(32),
-      pointer_maps_(8),
-      inlined_closures_(1) {
-}
-
-
 void LChunk::Verify() const {
   // TODO(twuerthinger): Implement verification for chunk.
 }
@@ -472,151 +463,6 @@
 }
 
 
-class LGapNode: public ZoneObject {
- public:
-  explicit LGapNode(LOperand* operand)
-      : operand_(operand), resolved_(false), visited_id_(-1) { }
-
-  LOperand* operand() const { return operand_; }
-  bool IsResolved() const { return !IsAssigned() || resolved_; }
-  void MarkResolved() {
-    ASSERT(!IsResolved());
-    resolved_ = true;
-  }
-  int visited_id() const { return visited_id_; }
-  void set_visited_id(int id) {
-    ASSERT(id > visited_id_);
-    visited_id_ = id;
-  }
-
-  bool IsAssigned() const { return assigned_from_.is_set(); }
-  LGapNode* assigned_from() const { return assigned_from_.get(); }
-  void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
-
- private:
-  LOperand* operand_;
-  SetOncePointer<LGapNode> assigned_from_;
-  bool resolved_;
-  int visited_id_;
-};
-
-
-LGapResolver::LGapResolver(const ZoneList<LMoveOperands>* moves,
-                           LOperand* marker_operand)
-    : nodes_(4),
-      identified_cycles_(4),
-      result_(4),
-      marker_operand_(marker_operand),
-      next_visited_id_(0) {
-  for (int i = 0; i < moves->length(); ++i) {
-    LMoveOperands move = moves->at(i);
-    if (!move.IsRedundant()) RegisterMove(move);
-  }
-}
-
-
-const ZoneList<LMoveOperands>* LGapResolver::ResolveInReverseOrder() {
-  for (int i = 0; i < identified_cycles_.length(); ++i) {
-    ResolveCycle(identified_cycles_[i]);
-  }
-
-  int unresolved_nodes;
-  do {
-    unresolved_nodes = 0;
-    for (int j = 0; j < nodes_.length(); j++) {
-      LGapNode* node = nodes_[j];
-      if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
-        AddResultMove(node->assigned_from(), node);
-        node->MarkResolved();
-      }
-      if (!node->IsResolved()) ++unresolved_nodes;
-    }
-  } while (unresolved_nodes > 0);
-  return &result_;
-}
-
-
-void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
-  AddResultMove(from->operand(), to->operand());
-}
-
-
-void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
-  result_.Add(LMoveOperands(from, to));
-}
-
-
-void LGapResolver::ResolveCycle(LGapNode* start) {
-  ZoneList<LOperand*> circle_operands(8);
-  circle_operands.Add(marker_operand_);
-  LGapNode* cur = start;
-  do {
-    cur->MarkResolved();
-    circle_operands.Add(cur->operand());
-    cur = cur->assigned_from();
-  } while (cur != start);
-  circle_operands.Add(marker_operand_);
-
-  for (int i = circle_operands.length() - 1; i > 0; --i) {
-    LOperand* from = circle_operands[i];
-    LOperand* to = circle_operands[i - 1];
-    AddResultMove(from, to);
-  }
-}
-
-
-bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
-  ASSERT(a != b);
-  LGapNode* cur = a;
-  while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
-    cur->set_visited_id(visited_id);
-    cur = cur->assigned_from();
-  }
-
-  return cur == b;
-}
-
-
-bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
-  ASSERT(a != b);
-  return CanReach(a, b, next_visited_id_++);
-}
-
-
-void LGapResolver::RegisterMove(LMoveOperands move) {
-  if (move.from()->IsConstantOperand()) {
-    // Constant moves should be last in the machine code. Therefore add them
-    // first to the result set.
-    AddResultMove(move.from(), move.to());
-  } else {
-    LGapNode* from = LookupNode(move.from());
-    LGapNode* to = LookupNode(move.to());
-    if (to->IsAssigned() && to->assigned_from() == from) {
-      move.Eliminate();
-      return;
-    }
-    ASSERT(!to->IsAssigned());
-    if (CanReach(from, to)) {
-      // This introduces a circle. Save.
-      identified_cycles_.Add(from);
-    }
-    to->set_assigned_from(from);
-  }
-}
-
-
-LGapNode* LGapResolver::LookupNode(LOperand* operand) {
-  for (int i = 0; i < nodes_.length(); ++i) {
-    if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
-  }
-
-  // No node found => create a new one.
-  LGapNode* result = new LGapNode(operand);
-  nodes_.Add(result);
-  return result;
-}
-
-
 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
   return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
 }
@@ -833,6 +679,12 @@
 }
 
 
+LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
+  allocator_->MarkAsSaveDoubles();
+  return instr;
+}
+
+
 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
   ASSERT(!instr->HasPointerMap());
   instr->set_pointer_map(new LPointerMap(position_));
@@ -1257,10 +1109,11 @@
     } else if (v->IsInstanceOf()) {
       HInstanceOf* instance_of = HInstanceOf::cast(v);
       LInstruction* result =
-          new LInstanceOfAndBranch(UseFixed(instance_of->left(), eax),
-                                   UseFixed(instance_of->right(), edx),
-                                   first_id,
-                                   second_id);
+          new LInstanceOfAndBranch(
+              UseFixed(instance_of->left(), InstanceofStub::left()),
+              UseFixed(instance_of->right(), InstanceofStub::right()),
+              first_id,
+              second_id);
       return MarkAsCall(result, instr);
     } else if (v->IsTypeofIs()) {
       HTypeofIs* typeof_is = HTypeofIs::cast(v);
@@ -1287,12 +1140,7 @@
     HCompareMapAndBranch* instr) {
   ASSERT(instr->value()->representation().IsTagged());
   LOperand* value = UseRegisterAtStart(instr->value());
-  HBasicBlock* first = instr->FirstSuccessor();
-  HBasicBlock* second = instr->SecondSuccessor();
-  return new LCmpMapAndBranch(value,
-                              instr->map(),
-                              first->block_id(),
-                              second->block_id());
+  return new LCmpMapAndBranch(value);
 }
 
 
@@ -1308,12 +1156,23 @@
 
 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
   LInstruction* result =
-      new LInstanceOf(UseFixed(instr->left(), eax),
-                      UseFixed(instr->right(), edx));
+      new LInstanceOf(UseFixed(instr->left(), InstanceofStub::left()),
+                      UseFixed(instr->right(), InstanceofStub::right()));
   return MarkAsCall(DefineFixed(result, eax), instr);
 }
 
 
+LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
+    HInstanceOfKnownGlobal* instr) {
+  LInstruction* result =
+      new LInstanceOfKnownGlobal(
+          UseFixed(instr->value(), InstanceofStub::left()),
+          FixedTemp(edi));
+  MarkAsSaveDoubles(result);
+  return AssignEnvironment(AssignPointerMap(DefineFixed(result, eax)));
+}
+
+
 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
   LOperand* function = UseFixed(instr->function(), edi);
   LOperand* receiver = UseFixed(instr->receiver(), eax);
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 00dc394..4b0db40 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -30,6 +30,7 @@
 
 #include "hydrogen.h"
 #include "lithium-allocator.h"
+#include "lithium.h"
 #include "safepoint-table.h"
 
 namespace v8 {
@@ -39,7 +40,6 @@
 class LCodeGen;
 class LEnvironment;
 class Translation;
-class LGapNode;
 
 
 // Type hierarchy:
@@ -63,6 +63,7 @@
 //     LDivI
 //     LInstanceOf
 //     LInstanceOfAndBranch
+//     LInstanceOfKnownGlobal
 //     LLoadKeyedFastElement
 //     LLoadKeyedGeneric
 //     LModI
@@ -207,6 +208,7 @@
   V(FixedArrayLength)                           \
   V(InstanceOf)                                 \
   V(InstanceOfAndBranch)                        \
+  V(InstanceOfKnownGlobal)                      \
   V(Integer32ToDouble)                          \
   V(IsNull)                                     \
   V(IsNullAndBranch)                            \
@@ -333,29 +335,6 @@
 };
 
 
-class LGapResolver BASE_EMBEDDED {
- public:
-  LGapResolver(const ZoneList<LMoveOperands>* moves, LOperand* marker_operand);
-  const ZoneList<LMoveOperands>* ResolveInReverseOrder();
-
- private:
-  LGapNode* LookupNode(LOperand* operand);
-  bool CanReach(LGapNode* a, LGapNode* b, int visited_id);
-  bool CanReach(LGapNode* a, LGapNode* b);
-  void RegisterMove(LMoveOperands move);
-  void AddResultMove(LOperand* from, LOperand* to);
-  void AddResultMove(LGapNode* from, LGapNode* to);
-  void ResolveCycle(LGapNode* start);
-
-  ZoneList<LGapNode*> nodes_;
-  ZoneList<LGapNode*> identified_cycles_;
-  ZoneList<LMoveOperands> result_;
-  LOperand* marker_operand_;
-  int next_visited_id_;
-  int bailout_after_ast_id_;
-};
-
-
 class LParallelMove : public ZoneObject {
  public:
   LParallelMove() : move_operands_(4) { }
@@ -1008,6 +987,23 @@
 };
 
 
+class LInstanceOfKnownGlobal: public LUnaryOperation {
+ public:
+  LInstanceOfKnownGlobal(LOperand* left, LOperand* temp)
+      : LUnaryOperation(left), temp_(temp) { }
+
+  DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
+                               "instance-of-known-global")
+  DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
+
+  Handle<JSFunction> function() const { return hydrogen()->function(); }
+  LOperand* temp() const { return temp_; }
+
+ private:
+  LOperand* temp_;
+};
+
+
 class LBoundsCheck: public LBinaryOperation {
  public:
   LBoundsCheck(LOperand* index, LOperand* length)
@@ -1126,27 +1122,20 @@
 
 class LCmpMapAndBranch: public LUnaryOperation {
  public:
-  LCmpMapAndBranch(LOperand* value,
-                   Handle<Map> map,
-                   int true_block_id,
-                   int false_block_id)
-      : LUnaryOperation(value),
-        map_(map),
-        true_block_id_(true_block_id),
-        false_block_id_(false_block_id) { }
+  explicit LCmpMapAndBranch(LOperand* value) : LUnaryOperation(value) { }
 
   DECLARE_CONCRETE_INSTRUCTION(CmpMapAndBranch, "cmp-map-and-branch")
+  DECLARE_HYDROGEN_ACCESSOR(CompareMapAndBranch)
 
   virtual bool IsControl() const { return true; }
 
-  Handle<Map> map() const { return map_; }
-  int true_block_id() const { return true_block_id_; }
-  int false_block_id() const { return false_block_id_; }
-
- private:
-  Handle<Map> map_;
-  int true_block_id_;
-  int false_block_id_;
+  Handle<Map> map() const { return hydrogen()->map(); }
+  int true_block_id() const {
+    return hydrogen()->true_destination()->block_id();
+  }
+  int false_block_id() const {
+    return hydrogen()->false_destination()->block_id();
+  }
 };
 
 
@@ -1952,7 +1941,12 @@
 class LChunkBuilder;
 class LChunk: public ZoneObject {
  public:
-  explicit LChunk(HGraph* graph);
+  explicit LChunk(HGraph* graph)
+    : spill_slot_count_(0),
+      graph_(graph),
+      instructions_(32),
+      pointer_maps_(8),
+      inlined_closures_(1) { }
 
   int AddInstruction(LInstruction* instruction, HBasicBlock* block);
   LConstantOperand* DefineConstantOperand(HConstant* constant);
@@ -2102,6 +2096,7 @@
       LInstruction* instr,
       HInstruction* hinstr,
       CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
+  LInstruction* MarkAsSaveDoubles(LInstruction* instr);
 
   LInstruction* SetInstructionPendingDeoptimizationEnvironment(
       LInstruction* instr, int ast_id);
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 7c33906..a6f4679 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -1715,7 +1715,7 @@
   }
 #endif
   // Disable stub call restrictions to always allow calls to abort.
-  set_allow_stub_calls(true);
+  AllowStubCallsScope allow_scope(this, true);
 
   push(eax);
   push(Immediate(p0));
diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc
index eecc441..abdef09 100644
--- a/src/lithium-allocator.cc
+++ b/src/lithium-allocator.cc
@@ -940,6 +940,9 @@
                                     curr_position.InstructionEnd());
             }
           }
+        }
+
+        if (summary->IsCall() || summary->IsSaveDoubles()) {
           for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
             if (output == NULL || !output->IsDoubleRegister() ||
                 output->index() != i) {
@@ -1607,6 +1610,11 @@
 }
 
 
+void LAllocator::MarkAsSaveDoubles() {
+  current_summary()->MarkAsSaveDoubles();
+}
+
+
 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
   operand->set_virtual_register(instr->id());
   current_summary()->SetOutput(operand);
diff --git a/src/lithium-allocator.h b/src/lithium-allocator.h
index fe837e2..454e302 100644
--- a/src/lithium-allocator.h
+++ b/src/lithium-allocator.h
@@ -482,7 +482,11 @@
 class InstructionSummary: public ZoneObject {
  public:
   InstructionSummary()
-      : output_operand_(NULL), input_count_(0), operands_(4), is_call_(false) {}
+      : output_operand_(NULL),
+        input_count_(0),
+        operands_(4),
+        is_call_(false),
+        is_save_doubles_(false) {}
 
   // Output operands.
   LOperand* Output() const { return output_operand_; }
@@ -510,11 +514,15 @@
   void MarkAsCall() { is_call_ = true; }
   bool IsCall() const { return is_call_; }
 
+  void MarkAsSaveDoubles() { is_save_doubles_ = true; }
+  bool IsSaveDoubles() const { return is_save_doubles_; }
+
  private:
   LOperand* output_operand_;
   int input_count_;
   ZoneList<LOperand*> operands_;
   bool is_call_;
+  bool is_save_doubles_;
 };
 
 // Representation of the non-empty interval [start,end[.
@@ -824,6 +832,9 @@
   // Marks the current instruction as a call.
   void MarkAsCall();
 
+  // Marks the current instruction as requiring saving double registers.
+  void MarkAsSaveDoubles();
+
   // Checks whether the value of a given virtual register is tagged.
   bool HasTaggedValue(int virtual_register) const;
 
diff --git a/src/lithium.cc b/src/lithium.cc
new file mode 100644
index 0000000..92e81d3
--- /dev/null
+++ b/src/lithium.cc
@@ -0,0 +1,179 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "lithium.h"
+
+namespace v8 {
+namespace internal {
+
+
+class LGapNode: public ZoneObject {
+ public:
+  explicit LGapNode(LOperand* operand)
+      : operand_(operand), resolved_(false), visited_id_(-1) { }
+
+  LOperand* operand() const { return operand_; }
+  bool IsResolved() const { return !IsAssigned() || resolved_; }
+  void MarkResolved() {
+    ASSERT(!IsResolved());
+    resolved_ = true;
+  }
+  int visited_id() const { return visited_id_; }
+  void set_visited_id(int id) {
+    ASSERT(id > visited_id_);
+    visited_id_ = id;
+  }
+
+  bool IsAssigned() const { return assigned_from_.is_set(); }
+  LGapNode* assigned_from() const { return assigned_from_.get(); }
+  void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
+
+ private:
+  LOperand* operand_;
+  SetOncePointer<LGapNode> assigned_from_;
+  bool resolved_;
+  int visited_id_;
+};
+
+
+LGapResolver::LGapResolver(const ZoneList<LMoveOperands>* moves,
+                           LOperand* marker_operand)
+    : nodes_(4),
+      identified_cycles_(4),
+      result_(4),
+      marker_operand_(marker_operand),
+      next_visited_id_(0) {
+  for (int i = 0; i < moves->length(); ++i) {
+    LMoveOperands move = moves->at(i);
+    if (!move.IsRedundant()) RegisterMove(move);
+  }
+}
+
+
+const ZoneList<LMoveOperands>* LGapResolver::ResolveInReverseOrder() {
+  for (int i = 0; i < identified_cycles_.length(); ++i) {
+    ResolveCycle(identified_cycles_[i]);
+  }
+
+  int unresolved_nodes;
+  do {
+    unresolved_nodes = 0;
+    for (int j = 0; j < nodes_.length(); j++) {
+      LGapNode* node = nodes_[j];
+      if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
+        AddResultMove(node->assigned_from(), node);
+        node->MarkResolved();
+      }
+      if (!node->IsResolved()) ++unresolved_nodes;
+    }
+  } while (unresolved_nodes > 0);
+  return &result_;
+}
+
+
+void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
+  AddResultMove(from->operand(), to->operand());
+}
+
+
+void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
+  result_.Add(LMoveOperands(from, to));
+}
+
+
+void LGapResolver::ResolveCycle(LGapNode* start) {
+  ZoneList<LOperand*> circle_operands(8);
+  circle_operands.Add(marker_operand_);
+  LGapNode* cur = start;
+  do {
+    cur->MarkResolved();
+    circle_operands.Add(cur->operand());
+    cur = cur->assigned_from();
+  } while (cur != start);
+  circle_operands.Add(marker_operand_);
+
+  for (int i = circle_operands.length() - 1; i > 0; --i) {
+    LOperand* from = circle_operands[i];
+    LOperand* to = circle_operands[i - 1];
+    AddResultMove(from, to);
+  }
+}
+
+
+bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
+  ASSERT(a != b);
+  LGapNode* cur = a;
+  while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
+    cur->set_visited_id(visited_id);
+    cur = cur->assigned_from();
+  }
+
+  return cur == b;
+}
+
+
+bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
+  ASSERT(a != b);
+  return CanReach(a, b, next_visited_id_++);
+}
+
+
+void LGapResolver::RegisterMove(LMoveOperands move) {
+  if (move.from()->IsConstantOperand()) {
+    // Constant moves should be last in the machine code. Therefore add them
+    // first to the result set.
+    AddResultMove(move.from(), move.to());
+  } else {
+    LGapNode* from = LookupNode(move.from());
+    LGapNode* to = LookupNode(move.to());
+    if (to->IsAssigned() && to->assigned_from() == from) {
+      move.Eliminate();
+      return;
+    }
+    ASSERT(!to->IsAssigned());
+    if (CanReach(from, to)) {
+      // This introduces a circle. Save.
+      identified_cycles_.Add(from);
+    }
+    to->set_assigned_from(from);
+  }
+}
+
+
+LGapNode* LGapResolver::LookupNode(LOperand* operand) {
+  for (int i = 0; i < nodes_.length(); ++i) {
+    if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
+  }
+
+  // No node found => create a new one.
+  LGapNode* result = new LGapNode(operand);
+  nodes_.Add(result);
+  return result;
+}
+
+
+} }  // namespace v8::internal
diff --git a/src/lithium.h b/src/lithium.h
new file mode 100644
index 0000000..0ea3769
--- /dev/null
+++ b/src/lithium.h
@@ -0,0 +1,63 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_LITHIUM_H_
+#define V8_LITHIUM_H_
+
+#include "lithium-allocator.h"
+
+namespace v8 {
+namespace internal {
+
+class LGapNode;
+
+class LGapResolver BASE_EMBEDDED {
+ public:
+  LGapResolver(const ZoneList<LMoveOperands>* moves, LOperand* marker_operand);
+  const ZoneList<LMoveOperands>* ResolveInReverseOrder();
+
+ private:
+  LGapNode* LookupNode(LOperand* operand);
+  bool CanReach(LGapNode* a, LGapNode* b, int visited_id);
+  bool CanReach(LGapNode* a, LGapNode* b);
+  void RegisterMove(LMoveOperands move);
+  void AddResultMove(LOperand* from, LOperand* to);
+  void AddResultMove(LGapNode* from, LGapNode* to);
+  void ResolveCycle(LGapNode* start);
+
+  ZoneList<LGapNode*> nodes_;
+  ZoneList<LGapNode*> identified_cycles_;
+  ZoneList<LMoveOperands> result_;
+  LOperand* marker_operand_;
+  int next_visited_id_;
+  int bailout_after_ast_id_;
+};
+
+
+} }  // namespace v8::internal
+
+#endif  // V8_LITHIUM_H_
diff --git a/src/macros.py b/src/macros.py
index 01512e4..69f36c0 100644
--- a/src/macros.py
+++ b/src/macros.py
@@ -126,6 +126,7 @@
 macro TO_INT32(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : (arg >> 0));
 macro TO_UINT32(arg) = (arg >>> 0);
 macro TO_STRING_INLINE(arg) = (IS_STRING(%IS_VAR(arg)) ? arg : NonStringToString(arg));
+macro TO_NUMBER_INLINE(arg) = (IS_NUMBER(%IS_VAR(arg)) ? arg : NonNumberToNumber(arg));
 
 
 # Macros implemented in Python.
diff --git a/src/math.js b/src/math.js
index 90667d7..02b19ab 100644
--- a/src/math.js
+++ b/src/math.js
@@ -44,26 +44,26 @@
 // ECMA 262 - 15.8.2.1
 function MathAbs(x) {
   if (%_IsSmi(x)) return x >= 0 ? x : -x;
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   if (x === 0) return 0;  // To handle -0.
   return x > 0 ? x : -x;
 }
 
 // ECMA 262 - 15.8.2.2
 function MathAcos(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_acos(x);
 }
 
 // ECMA 262 - 15.8.2.3
 function MathAsin(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_asin(x);
 }
 
 // ECMA 262 - 15.8.2.4
 function MathAtan(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_atan(x);
 }
 
@@ -71,32 +71,32 @@
 // The naming of y and x matches the spec, as does the order in which
 // ToNumber (valueOf) is called.
 function MathAtan2(y, x) {
-  if (!IS_NUMBER(y)) y = ToNumber(y);
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(y)) y = NonNumberToNumber(y);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_atan2(y, x);
 }
 
 // ECMA 262 - 15.8.2.6
 function MathCeil(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_ceil(x);
 }
 
 // ECMA 262 - 15.8.2.7
 function MathCos(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %_MathCos(x);
 }
 
 // ECMA 262 - 15.8.2.8
 function MathExp(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_exp(x);
 }
 
 // ECMA 262 - 15.8.2.9
 function MathFloor(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   // It's more common to call this with a positive number that's out
   // of range than negative numbers; check the upper bound first.
   if (x < 0x80000000 && x > 0) {
@@ -112,7 +112,7 @@
 
 // ECMA 262 - 15.8.2.10
 function MathLog(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %_MathLog(x);
 }
 
@@ -123,11 +123,11 @@
     return -1/0;  // Compiler constant-folds this to -Infinity.
   }
   var r = arg1;
-  if (!IS_NUMBER(r)) r = ToNumber(r);
+  if (!IS_NUMBER(r)) r = NonNumberToNumber(r);
   if (NUMBER_IS_NAN(r)) return r;
   for (var i = 1; i < length; i++) {
     var n = %_Arguments(i);
-    if (!IS_NUMBER(n)) n = ToNumber(n);
+    if (!IS_NUMBER(n)) n = NonNumberToNumber(n);
     if (NUMBER_IS_NAN(n)) return n;
     // Make sure +0 is considered greater than -0.  -0 is never a Smi, +0 can be
     // a Smi or heap number.
@@ -143,11 +143,11 @@
     return 1/0;  // Compiler constant-folds this to Infinity.
   }
   var r = arg1;
-  if (!IS_NUMBER(r)) r = ToNumber(r);
+  if (!IS_NUMBER(r)) r = NonNumberToNumber(r);
   if (NUMBER_IS_NAN(r)) return r;
   for (var i = 1; i < length; i++) {
     var n = %_Arguments(i);
-    if (!IS_NUMBER(n)) n = ToNumber(n);
+    if (!IS_NUMBER(n)) n = NonNumberToNumber(n);
     if (NUMBER_IS_NAN(n)) return n;
     // Make sure -0 is considered less than +0.  -0 is never a Smi, +0 can b a
     // Smi or a heap number.
@@ -158,8 +158,8 @@
 
 // ECMA 262 - 15.8.2.13
 function MathPow(x, y) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
-  if (!IS_NUMBER(y)) y = ToNumber(y);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
+  if (!IS_NUMBER(y)) y = NonNumberToNumber(y);
   return %_MathPow(x, y);
 }
 
@@ -170,25 +170,25 @@
 
 // ECMA 262 - 15.8.2.15
 function MathRound(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %RoundNumber(x);
 }
 
 // ECMA 262 - 15.8.2.16
 function MathSin(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %_MathSin(x);
 }
 
 // ECMA 262 - 15.8.2.17
 function MathSqrt(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %_MathSqrt(x);
 }
 
 // ECMA 262 - 15.8.2.18
 function MathTan(x) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+  if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
   return %Math_tan(x);
 }
 
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 0b83182..a3552c7 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -368,8 +368,10 @@
   } else {
     ASSERT(number->IsSmi());
     int value = Smi::cast(number)->value();
-    ASSERT(value == 0 || value == 1 || value == -1 ||
-           value == -2 || value == -3);
+    // Hidden oddballs have negative smis.
+    const int kLeastHiddenOddballNumber = -4;
+    ASSERT(value <= 1);
+    ASSERT(value >= kLeastHiddenOddballNumber);
   }
 }
 
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 7935912..3c9dc82 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -730,6 +730,11 @@
 }
 
 
+bool Object::IsArgumentsMarker() {
+  return this == Heap::arguments_marker();
+}
+
+
 double Object::Number() {
   ASSERT(IsNumber());
   return IsSmi()
diff --git a/src/objects.cc b/src/objects.cc
index 927194f..f3f8003 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1823,8 +1823,9 @@
 // We only need to deal with CALLBACKS and INTERCEPTORS
 MaybeObject* JSObject::SetPropertyWithFailedAccessCheck(LookupResult* result,
                                                         String* name,
-                                                        Object* value) {
-  if (!result->IsProperty()) {
+                                                        Object* value,
+                                                        bool check_prototype) {
+  if (check_prototype && !result->IsProperty()) {
     LookupCallbackSetterInPrototypes(name, result);
   }
 
@@ -1850,7 +1851,8 @@
           LookupResult r;
           LookupRealNamedProperty(name, &r);
           if (r.IsProperty()) {
-            return SetPropertyWithFailedAccessCheck(&r, name, value);
+            return SetPropertyWithFailedAccessCheck(&r, name, value,
+                                                    check_prototype);
           }
           break;
         }
@@ -1891,7 +1893,7 @@
   // Check access rights if needed.
   if (IsAccessCheckNeeded()
       && !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
-    return SetPropertyWithFailedAccessCheck(result, name, value);
+    return SetPropertyWithFailedAccessCheck(result, name, value, true);
   }
 
   if (IsJSGlobalProxy()) {
@@ -1981,7 +1983,7 @@
 // callback setter removed.  The two lines looking up the LookupResult
 // result are also added.  If one of the functions is changed, the other
 // should be.
-MaybeObject* JSObject::IgnoreAttributesAndSetLocalProperty(
+MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
     String* name,
     Object* value,
     PropertyAttributes attributes) {
@@ -1993,14 +1995,14 @@
   // Check access rights if needed.
   if (IsAccessCheckNeeded()
       && !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
-    return SetPropertyWithFailedAccessCheck(&result, name, value);
+    return SetPropertyWithFailedAccessCheck(&result, name, value, false);
   }
 
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
     if (proto->IsNull()) return value;
     ASSERT(proto->IsJSGlobalObject());
-    return JSObject::cast(proto)->IgnoreAttributesAndSetLocalProperty(
+    return JSObject::cast(proto)->SetLocalPropertyIgnoreAttributes(
         name,
         value,
         attributes);
diff --git a/src/objects.h b/src/objects.h
index eac7f92..063555e 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -709,6 +709,7 @@
   INLINE(bool IsNull());
   INLINE(bool IsTrue());
   INLINE(bool IsFalse());
+  inline bool IsArgumentsMarker();
 
   // Extract the number.
   inline double Number();
@@ -1341,7 +1342,8 @@
   MUST_USE_RESULT MaybeObject* SetPropertyWithFailedAccessCheck(
       LookupResult* result,
       String* name,
-      Object* value);
+      Object* value,
+      bool check_prototype);
   MUST_USE_RESULT MaybeObject* SetPropertyWithCallback(Object* structure,
                                                        String* name,
                                                        Object* value,
@@ -1356,7 +1358,7 @@
       String* name,
       Object* value,
       PropertyAttributes attributes);
-  MUST_USE_RESULT MaybeObject* IgnoreAttributesAndSetLocalProperty(
+  MUST_USE_RESULT MaybeObject* SetLocalPropertyIgnoreAttributes(
       String* key,
       Object* value,
       PropertyAttributes attributes);
diff --git a/src/parser.cc b/src/parser.cc
index 5526933..5ea1c5e 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -2323,26 +2323,6 @@
         }
       }
 
-      // Convert constant divisions to multiplications for speed.
-      if (op == Token::DIV &&
-          y && y->AsLiteral() && y->AsLiteral()->handle()->IsNumber()) {
-        double y_val = y->AsLiteral()->handle()->Number();
-        int64_t y_int = static_cast<int64_t>(y_val);
-        // There are rounding issues with this optimization, but they don't
-        // apply if the number to be divided with has a reciprocal that can be
-        // precisely represented as a floating point number.  This is the case
-        // if the number is an integer power of 2.  Negative integer powers of
-        // 2 work too, but for -2, -1, 1 and 2 we don't do the strength
-        // reduction because the inlined optimistic idiv has a reasonable
-        // chance of succeeding by producing a Smi answer with no remainder.
-        if (static_cast<double>(y_int) == y_val &&
-            (IsPowerOf2(y_int) || IsPowerOf2(-y_int)) &&
-            (y_int > 2 || y_int < -2)) {
-          y = NewNumberLiteral(1 / y_val);
-          op = Token::MUL;
-        }
-      }
-
       // For now we distinguish between comparisons and other binary
       // operations.  (We could combine the two and get rid of this
       // code and AST node eventually.)
@@ -3680,11 +3660,9 @@
       if (value.is_null()) return Handle<Object>::null();
       uint32_t index;
       if (key->AsArrayIndex(&index)) {
-        CALL_HEAP_FUNCTION_INLINE(
-            (*json_object)->SetElement(index, *value, true));
+        SetOwnElement(json_object, index, value);
       } else {
-        CALL_HEAP_FUNCTION_INLINE(
-            (*json_object)->SetPropertyPostInterceptor(*key, *value, NONE));
+        SetLocalPropertyIgnoreAttributes(json_object, key, value, NONE);
       }
     } while (scanner_.Next() == Token::COMMA);
     if (scanner_.current_token() != Token::RBRACE) {
@@ -4044,9 +4022,21 @@
         builder->AddCharacter('\v');
         break;
       case 'c': {
-        Advance(2);
-        uc32 control = ParseControlLetterEscape();
-        builder->AddCharacter(control);
+        Advance();
+        uc32 controlLetter = Next();
+        // Special case if it is an ASCII letter.
+        // Convert lower case letters to uppercase.
+        uc32 letter = controlLetter & ~('a' ^ 'A');
+        if (letter < 'A' || 'Z' < letter) {
+          // controlLetter is not in range 'A'-'Z' or 'a'-'z'.
+          // This is outside the specification. We match JSC in
+          // reading the backslash as a literal character instead
+          // of as starting an escape.
+          builder->AddCharacter('\\');
+        } else {
+          Advance(2);
+          builder->AddCharacter(controlLetter & 0x1f);
+        }
         break;
       }
       case 'x': {
@@ -4321,23 +4311,6 @@
 }
 
 
-// Upper and lower case letters differ by one bit.
-STATIC_CHECK(('a' ^ 'A') == 0x20);
-
-uc32 RegExpParser::ParseControlLetterEscape() {
-  if (!has_more())
-    return 'c';
-  uc32 letter = current() & ~(0x20);  // Collapse upper and lower case letters.
-  if (letter < 'A' || 'Z' < letter) {
-    // Non-spec error-correction: "\c" followed by non-control letter is
-    // interpreted as an IdentityEscape of 'c'.
-    return 'c';
-  }
-  Advance();
-  return letter & 0x1f;  // Remainder modulo 32, per specification.
-}
-
-
 uc32 RegExpParser::ParseOctalLiteral() {
   ASSERT('0' <= current() && current() <= '7');
   // For compatibility with some other browsers (not all), we parse
@@ -4403,9 +4376,23 @@
     case 'v':
       Advance();
       return '\v';
-    case 'c':
-      Advance();
-      return ParseControlLetterEscape();
+    case 'c': {
+      uc32 controlLetter = Next();
+      uc32 letter = controlLetter & ~('A' ^ 'a');
+      // For compatibility with JSC, inside a character class
+      // we also accept digits and underscore as control characters.
+      if ((controlLetter >= '0' && controlLetter <= '9') ||
+          controlLetter == '_' ||
+          (letter >= 'A' && letter <= 'Z')) {
+        Advance(2);
+        // Control letters mapped to ASCII control characters in the range
+        // 0x00-0x1f.
+        return controlLetter & 0x1f;
+      }
+      // We match JSC in reading the backslash as a literal
+      // character instead of as starting an escape.
+      return '\\';
+    }
     case '0': case '1': case '2': case '3': case '4': case '5':
     case '6': case '7':
       // For compatibility, we interpret a decimal escape that isn't
diff --git a/src/parser.h b/src/parser.h
index 8623f38..1dfc153 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -321,7 +321,6 @@
   // and sets the value if it is.
   bool ParseHexEscape(int length, uc32* value);
 
-  uc32 ParseControlLetterEscape();
   uc32 ParseOctalLiteral();
 
   // Tries to parse the input as a back reference.  If successful it
diff --git a/src/runtime.cc b/src/runtime.cc
index 724a436..2aa4431 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -330,13 +330,18 @@
       Handle<Object> result;
       uint32_t element_index = 0;
       if (key->IsSymbol()) {
-        // If key is a symbol it is not an array element.
-        Handle<String> name(String::cast(*key));
-        ASSERT(!name->AsArrayIndex(&element_index));
-        result = SetProperty(boilerplate, name, value, NONE);
+        if (Handle<String>::cast(key)->AsArrayIndex(&element_index)) {
+          // Array index as string (uint32).
+          result = SetOwnElement(boilerplate, element_index, value);
+        } else {
+          Handle<String> name(String::cast(*key));
+          ASSERT(!name->AsArrayIndex(&element_index));
+          result = SetLocalPropertyIgnoreAttributes(boilerplate, name,
+                                                    value, NONE);
+        }
       } else if (key->ToArrayIndex(&element_index)) {
         // Array index (uint32).
-        result = SetElement(boilerplate, element_index, value);
+        result = SetOwnElement(boilerplate, element_index, value);
       } else {
         // Non-uint32 number.
         ASSERT(key->IsNumber());
@@ -345,7 +350,8 @@
         Vector<char> buffer(arr, ARRAY_SIZE(arr));
         const char* str = DoubleToCString(num, buffer);
         Handle<String> name = Factory::NewStringFromAscii(CStrVector(str));
-        result = SetProperty(boilerplate, name, value, NONE);
+        result = SetLocalPropertyIgnoreAttributes(boilerplate, name,
+                                                  value, NONE);
       }
       // If setting the property on the boilerplate throws an
       // exception, the exception is converted to an empty handle in
@@ -984,7 +990,7 @@
       // of callbacks in the prototype chain (this rules out using
       // SetProperty).  Also, we must use the handle-based version to
       // avoid GC issues.
-      IgnoreAttributesAndSetLocalProperty(global, name, value, attributes);
+      SetLocalPropertyIgnoreAttributes(global, name, value, attributes);
     }
   }
 
@@ -1099,7 +1105,7 @@
   // to assign to the property. When adding the property we take
   // special precautions to always add it as a local property even in
   // case of callbacks in the prototype chain (this rules out using
-  // SetProperty).  We have IgnoreAttributesAndSetLocalProperty for
+  // SetProperty).  We have SetLocalPropertyIgnoreAttributes for
   // this.
   // Note that objects can have hidden prototypes, so we need to traverse
   // the whole chain of hidden prototypes to do a 'local' lookup.
@@ -1162,9 +1168,9 @@
 
   global = Top::context()->global();
   if (assign) {
-    return global->IgnoreAttributesAndSetLocalProperty(*name,
-                                                       args[1],
-                                                       attributes);
+    return global->SetLocalPropertyIgnoreAttributes(*name,
+                                                    args[1],
+                                                    attributes);
   }
   return Heap::undefined_value();
 }
@@ -1190,13 +1196,13 @@
   // there, we add the property and take special precautions to always
   // add it as a local property even in case of callbacks in the
   // prototype chain (this rules out using SetProperty).
-  // We use IgnoreAttributesAndSetLocalProperty instead
+  // We use SetLocalPropertyIgnoreAttributes instead
   LookupResult lookup;
   global->LocalLookup(*name, &lookup);
   if (!lookup.IsProperty()) {
-    return global->IgnoreAttributesAndSetLocalProperty(*name,
-                                                       *value,
-                                                       attributes);
+    return global->SetLocalPropertyIgnoreAttributes(*name,
+                                                    *value,
+                                                    attributes);
   }
 
   // Determine if this is a redeclaration of something not
@@ -1467,27 +1473,27 @@
   PropertyAttributes writable =
       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
   MaybeObject* result;
-  result = regexp->IgnoreAttributesAndSetLocalProperty(Heap::source_symbol(),
-                                                       source,
-                                                       final);
+  result = regexp->SetLocalPropertyIgnoreAttributes(Heap::source_symbol(),
+                                                    source,
+                                                    final);
   ASSERT(!result->IsFailure());
-  result = regexp->IgnoreAttributesAndSetLocalProperty(Heap::global_symbol(),
-                                                       global,
-                                                       final);
+  result = regexp->SetLocalPropertyIgnoreAttributes(Heap::global_symbol(),
+                                                    global,
+                                                    final);
   ASSERT(!result->IsFailure());
   result =
-      regexp->IgnoreAttributesAndSetLocalProperty(Heap::ignore_case_symbol(),
-                                                  ignoreCase,
-                                                  final);
+      regexp->SetLocalPropertyIgnoreAttributes(Heap::ignore_case_symbol(),
+                                               ignoreCase,
+                                               final);
   ASSERT(!result->IsFailure());
-  result = regexp->IgnoreAttributesAndSetLocalProperty(Heap::multiline_symbol(),
-                                                       multiline,
-                                                       final);
+  result = regexp->SetLocalPropertyIgnoreAttributes(Heap::multiline_symbol(),
+                                                    multiline,
+                                                    final);
   ASSERT(!result->IsFailure());
   result =
-      regexp->IgnoreAttributesAndSetLocalProperty(Heap::last_index_symbol(),
-                                                  Smi::FromInt(0),
-                                                  writable);
+      regexp->SetLocalPropertyIgnoreAttributes(Heap::last_index_symbol(),
+                                               Smi::FromInt(0),
+                                               writable);
   ASSERT(!result->IsFailure());
   USE(result);
   return regexp;
@@ -3571,9 +3577,9 @@
     NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0);
     // Use IgnoreAttributes version since a readonly property may be
     // overridden and SetProperty does not allow this.
-    return js_object->IgnoreAttributesAndSetLocalProperty(*name,
-                                                          *obj_value,
-                                                          attr);
+    return js_object->SetLocalPropertyIgnoreAttributes(*name,
+                                                       *obj_value,
+                                                       attr);
   }
 
   return Runtime::SetObjectProperty(js_object, name, obj_value, attr);
@@ -3674,9 +3680,9 @@
     } else {
       Handle<String> key_string = Handle<String>::cast(key);
       key_string->TryFlatten();
-      return js_object->IgnoreAttributesAndSetLocalProperty(*key_string,
-                                                            *value,
-                                                            attr);
+      return js_object->SetLocalPropertyIgnoreAttributes(*key_string,
+                                                         *value,
+                                                         attr);
     }
   }
 
@@ -3689,7 +3695,7 @@
   if (name->AsArrayIndex(&index)) {
     return js_object->SetElement(index, *value);
   } else {
-    return js_object->IgnoreAttributesAndSetLocalProperty(*name, *value, attr);
+    return js_object->SetLocalPropertyIgnoreAttributes(*name, *value, attr);
   }
 }
 
@@ -3771,7 +3777,7 @@
   }
 
   return object->
-      IgnoreAttributesAndSetLocalProperty(name, args[2], attributes);
+      SetLocalPropertyIgnoreAttributes(name, args[2], attributes);
 }
 
 
@@ -6742,7 +6748,7 @@
   Handle<JSFunction> function(JSFunction::cast(frame->function()));
   Handle<Object> arguments;
   for (int i = frame->ComputeExpressionsCount() - 1; i >= 0; --i) {
-    if (frame->GetExpression(i) == Heap::the_hole_value()) {
+    if (frame->GetExpression(i) == Heap::arguments_marker()) {
       if (arguments.is_null()) {
         // FunctionGetArguments can't throw an exception, so cast away the
         // doubt with an assert.
@@ -10406,10 +10412,36 @@
 }
 
 
+// Sets a v8 flag.
+static MaybeObject* Runtime_SetFlags(Arguments args) {
+  CONVERT_CHECKED(String, arg, args[0]);
+  SmartPointer<char> flags =
+      arg->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+  FlagList::SetFlagsFromString(*flags, StrLength(*flags));
+  return Heap::undefined_value();
+}
+
+
+// Performs a GC.
+// Presently, it only does a full GC.
+static MaybeObject* Runtime_CollectGarbage(Arguments args) {
+  Heap::CollectAllGarbage(true);
+  return Heap::undefined_value();
+}
+
+
+// Gets the current heap usage.
+static MaybeObject* Runtime_GetHeapUsage(Arguments args) {
+  int usage = static_cast<int>(Heap::SizeOfObjects());
+  if (!Smi::IsValid(usage)) {
+    return *Factory::NewNumberFromInt(usage);
+  }
+  return Smi::FromInt(usage);
+}
 #endif  // ENABLE_DEBUGGER_SUPPORT
 
-#ifdef ENABLE_LOGGING_AND_PROFILING
 
+#ifdef ENABLE_LOGGING_AND_PROFILING
 static MaybeObject* Runtime_ProfilerResume(Arguments args) {
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
diff --git a/src/runtime.h b/src/runtime.h
index 5ecae7e..2fa7438 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -363,7 +363,12 @@
   F(LiveEditCheckAndDropActivations, 2, 1) \
   F(LiveEditCompareStringsLinewise, 2, 1) \
   F(GetFunctionCodePositionFromSource, 2, 1) \
-  F(ExecuteInDebugContext, 2, 1)
+  F(ExecuteInDebugContext, 2, 1) \
+  \
+  F(SetFlags, 1, 1) \
+  F(CollectGarbage, 1, 1) \
+  F(GetHeapUsage, 0, 1)
+
 #else
 #define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
 #endif
diff --git a/src/runtime.js b/src/runtime.js
index 28a38ca..2cdbbde 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -165,7 +165,7 @@
   if (IS_STRING(a)) {
     return %_StringAdd(a, %ToString(b));
   } else if (IS_STRING(b)) {
-    return %_StringAdd(%ToString(a), b);
+    return %_StringAdd(%NonStringToString(a), b);
   } else {
     return %NumberAdd(%ToNumber(a), %ToNumber(b));
   }
@@ -205,32 +205,32 @@
 
 // ECMA-262, section 11.6.2, page 50.
 function SUB(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberSub(x, y);
 }
 
 
 // ECMA-262, section 11.5.1, page 48.
 function MUL(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberMul(x, y);
 }
 
 
 // ECMA-262, section 11.5.2, page 49.
 function DIV(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberDiv(x, y);
 }
 
 
 // ECMA-262, section 11.5.3, page 49.
 function MOD(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberMod(x, y);
 }
 
@@ -243,8 +243,8 @@
 
 // ECMA-262, section 11.10, page 57.
 function BIT_OR(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberOr(x, y);
 }
 
@@ -254,14 +254,14 @@
   var x;
   if (IS_NUMBER(this)) {
     x = this;
-    if (!IS_NUMBER(y)) y = %ToNumber(y);
+    if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   } else {
-    x = %ToNumber(this);
+    x = %NonNumberToNumber(this);
     // Make sure to convert the right operand to a number before
     // bailing out in the fast case, but after converting the
     // left operand. This ensures that valueOf methods on the right
     // operand are always executed.
-    if (!IS_NUMBER(y)) y = %ToNumber(y);
+    if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
     // Optimize for the case where we end up AND'ing a value
     // that doesn't convert to a number. This is common in
     // certain benchmarks.
@@ -273,30 +273,30 @@
 
 // ECMA-262, section 11.10, page 57.
 function BIT_XOR(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberXor(x, y);
 }
 
 
 // ECMA-262, section 11.4.7, page 47.
 function UNARY_MINUS() {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
   return %NumberUnaryMinus(x);
 }
 
 
 // ECMA-262, section 11.4.8, page 48.
 function BIT_NOT() {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
   return %NumberNot(x);
 }
 
 
 // ECMA-262, section 11.7.1, page 51.
 function SHL(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberShl(x, y);
 }
 
@@ -306,14 +306,14 @@
   var x;
   if (IS_NUMBER(this)) {
     x = this;
-    if (!IS_NUMBER(y)) y = %ToNumber(y);
+    if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   } else {
-    x = %ToNumber(this);
+    x = %NonNumberToNumber(this);
     // Make sure to convert the right operand to a number before
     // bailing out in the fast case, but after converting the
     // left operand. This ensures that valueOf methods on the right
     // operand are always executed.
-    if (!IS_NUMBER(y)) y = %ToNumber(y);
+    if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
     // Optimize for the case where we end up shifting a value
     // that doesn't convert to a number. This is common in
     // certain benchmarks.
@@ -325,8 +325,8 @@
 
 // ECMA-262, section 11.7.3, page 52.
 function SHR(y) {
-  var x = IS_NUMBER(this) ? this : %ToNumber(this);
-  if (!IS_NUMBER(y)) y = %ToNumber(y);
+  var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
+  if (!IS_NUMBER(y)) y = %NonNumberToNumber(y);
   return %NumberShr(x, y);
 }
 
@@ -511,6 +511,16 @@
   return (IS_NULL(x)) ? 0 : ToNumber(%DefaultNumber(x));
 }
 
+function NonNumberToNumber(x) {
+  if (IS_STRING(x)) {
+    return %_HasCachedArrayIndex(x) ? %_GetCachedArrayIndex(x)
+                                    : %StringToNumber(x);
+  }
+  if (IS_BOOLEAN(x)) return x ? 1 : 0;
+  if (IS_UNDEFINED(x)) return $NaN;
+  return (IS_NULL(x)) ? 0 : ToNumber(%DefaultNumber(x));
+}
+
 
 // ECMA-262, section 9.8, page 35.
 function ToString(x) {
@@ -568,12 +578,9 @@
   if (IS_NUMBER(x)) {
     if (NUMBER_IS_NAN(x) && NUMBER_IS_NAN(y)) return true;
     // x is +0 and y is -0 or vice versa.
-    if (x === 0 && y === 0 && (1 / x) != (1 / y)) {
-      return false;
-    }
-    return x === y;
+    if (x === 0 && y === 0 && (1 / x) != (1 / y)) return false;
   }
-  return x === y
+  return x === y;
 }
 
 
diff --git a/src/scanner-base.cc b/src/scanner-base.cc
index 1babaeb..997fb31 100644
--- a/src/scanner-base.cc
+++ b/src/scanner-base.cc
@@ -731,11 +731,18 @@
 
   while (c0_ != '/' || in_character_class) {
     if (ScannerConstants::kIsLineTerminator.get(c0_) || c0_ < 0) return false;
-    if (c0_ == '\\') {  // escaped character
+    if (c0_ == '\\') {  // Escape sequence.
       AddLiteralCharAdvance();
       if (ScannerConstants::kIsLineTerminator.get(c0_) || c0_ < 0) return false;
       AddLiteralCharAdvance();
-    } else {  // unescaped character
+      // If the escape allows more characters, i.e., \x??, \u????, or \c?,
+      // only "safe" characters are allowed (letters, digits, underscore),
+      // otherwise the escape isn't valid and the invalid character has
+      // its normal meaning. I.e., we can just continue scanning without
+      // worrying whether the following characters are part of the escape
+      // or not, since any '/', '\\' or '[' is guaranteed to not be part
+      // of the escape sequence.
+    } else {  // Unescaped character.
       if (c0_ == '[') in_character_class = true;
       if (c0_ == ']') in_character_class = false;
       AddLiteralCharAdvance();
diff --git a/src/serialize.cc b/src/serialize.cc
index 00a601f..19e6518 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -498,6 +498,10 @@
       UNCLASSIFIED,
       39,
       "power_double_int_function");
+  Add(ExternalReference::arguments_marker_location().address(),
+      UNCLASSIFIED,
+      40,
+      "Factory::arguments_marker().location()");
 }
 
 
diff --git a/src/v8-counters.h b/src/v8-counters.h
index fa5d581..aa30e4e 100644
--- a/src/v8-counters.h
+++ b/src/v8-counters.h
@@ -249,15 +249,7 @@
   SC(smi_checks_removed, V8.SmiChecksRemoved)                         \
   SC(map_checks_removed, V8.MapChecksRemoved)                         \
   SC(quote_json_char_count, V8.QuoteJsonCharacterCount)               \
-  SC(quote_json_char_recount, V8.QuoteJsonCharacterReCount)           \
-  SC(instance_of, V8.InstanceOf)                                      \
-  SC(instance_of_cache, V8.InstanceOfCache)                           \
-  SC(instance_of_stub_true, V8.InstanceOfStubTrue)                    \
-  SC(instance_of_stub_false, V8.InstanceOfStubFalse)                  \
-  SC(instance_of_stub_false_null, V8.InstanceOfStubFalseNull)         \
-  SC(instance_of_stub_false_string, V8.InstanceOfStubFalseString)     \
-  SC(instance_of_full, V8.InstanceOfFull)                             \
-  SC(instance_of_slow, V8.InstanceOfSlow)
+  SC(quote_json_char_recount, V8.QuoteJsonCharacterReCount)
 
 
 // This file contains all the v8 counters that are in use.
diff --git a/src/v8natives.js b/src/v8natives.js
index 9fd2162..233f8b4 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -83,7 +83,7 @@
 
 // ECMA 262 - 15.1.5
 function GlobalIsFinite(number) {
-  if (!IS_NUMBER(number)) number = ToNumber(number);
+  if (!IS_NUMBER(number)) number = NonNumberToNumber(number);
 
   // NaN - NaN == NaN, Infinity - Infinity == NaN, -Infinity - -Infinity == NaN.
   return %_IsSmi(number) || number - number == 0;
@@ -896,9 +896,14 @@
 function BooleanToString() {
   // NOTE: Both Boolean objects and values can enter here as
   // 'this'. This is not as dictated by ECMA-262.
-  if (!IS_BOOLEAN(this) && !IS_BOOLEAN_WRAPPER(this))
-    throw new $TypeError('Boolean.prototype.toString is not generic');
-  return ToString(%_ValueOf(this));
+  var b = this;
+  if (!IS_BOOLEAN(b)) {
+    if (!IS_BOOLEAN_WRAPPER(b)) {
+      throw new $TypeError('Boolean.prototype.toString is not generic');
+    }
+    b = %_ValueOf(b);
+  }
+  return b ? 'true' : 'false';
 }
 
 
@@ -951,7 +956,7 @@
   }
   // Fast case: Convert number in radix 10.
   if (IS_UNDEFINED(radix) || radix === 10) {
-    return ToString(number);
+    return %_NumberToString(number);
   }
 
   // Convert the radix to an integer and check the range.
@@ -1150,11 +1155,8 @@
   var p = '';
   if (n > 1) {
     p = new $Array(n - 1);
-    // Explicitly convert all parameters to strings.
-    // Array.prototype.join replaces null with empty strings which is
-    // not appropriate.
-    for (var i = 0; i < n - 1; i++) p[i] = ToString(%_Arguments(i));
-    p = p.join(',');
+    for (var i = 0; i < n - 1; i++) p[i] = %_Arguments(i);
+    p = Join(p, n - 1, ',', NonStringToString);
     // If the formal parameters string include ) - an illegal
     // character - it may make the combined function expression
     // compile. We avoid this problem by checking for this early on.
diff --git a/src/v8utils.h b/src/v8utils.h
index e9623be..095a8b1 100644
--- a/src/v8utils.h
+++ b/src/v8utils.h
@@ -29,7 +29,7 @@
 #define V8_V8UTILS_H_
 
 #include "utils.h"
-#include "platform.h"  // For va_list on Solaris.
+#include "platform.h" // For va_list on Solaris.
 
 namespace v8 {
 namespace internal {
diff --git a/src/version.cc b/src/version.cc
index 008f779..c1cc2fc 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     0
-#define BUILD_NUMBER      6
-#define PATCH_LEVEL       1
+#define BUILD_NUMBER      7
+#define PATCH_LEVEL       0
 #define CANDIDATE_VERSION false
 
 // Define SONAME to have the SCons build the put a specific SONAME into the
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 60ec35d..59522d2 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -988,8 +988,195 @@
 Handle<Code> GetTypeRecordingBinaryOpStub(int key,
     TRBinaryOpIC::TypeInfo type_info,
     TRBinaryOpIC::TypeInfo result_type_info) {
+  TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
+  return stub.GetCode();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
+  __ pop(rcx);  // Save return address.
+  __ push(rdx);
+  __ push(rax);
+  // Left and right arguments are now on top.
+  // Push this stub's key. Although the operation and the type info are
+  // encoded into the key, the encoding is opaque, so push them too.
+  __ Push(Smi::FromInt(MinorKey()));
+  __ Push(Smi::FromInt(op_));
+  __ Push(Smi::FromInt(operands_type_));
+
+  __ push(rcx);  // Push return address.
+
+  // Patch the caller to an appropriate specialized stub and return the
+  // operation result to the caller of the stub.
+  __ TailCallExternalReference(
+      ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
+      5,
+      1);
+}
+
+
+// Prepare for a type transition runtime call when the args are already on
+// the stack, under the return address.
+void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs(
+    MacroAssembler* masm) {
+  __ pop(rcx);  // Save return address.
+  // Left and right arguments are already on top of the stack.
+  // Push this stub's key. Although the operation and the type info are
+  // encoded into the key, the encoding is opaque, so push them too.
+  __ Push(Smi::FromInt(MinorKey()));
+  __ Push(Smi::FromInt(op_));
+  __ Push(Smi::FromInt(operands_type_));
+
+  __ push(rcx);  // Push return address.
+
+  // Patch the caller to an appropriate specialized stub and return the
+  // operation result to the caller of the stub.
+  __ TailCallExternalReference(
+      ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
+      5,
+      1);
+}
+
+
+void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
+  switch (operands_type_) {
+    case TRBinaryOpIC::UNINITIALIZED:
+      GenerateTypeTransition(masm);
+      break;
+    case TRBinaryOpIC::SMI:
+      GenerateSmiStub(masm);
+      break;
+    case TRBinaryOpIC::INT32:
+      GenerateInt32Stub(masm);
+      break;
+    case TRBinaryOpIC::HEAP_NUMBER:
+      GenerateHeapNumberStub(masm);
+      break;
+    case TRBinaryOpIC::STRING:
+      GenerateStringStub(masm);
+      break;
+    case TRBinaryOpIC::GENERIC:
+      GenerateGeneric(masm);
+      break;
+    default:
+      UNREACHABLE();
+  }
+}
+
+
+const char* TypeRecordingBinaryOpStub::GetName() {
+  if (name_ != NULL) return name_;
+  const int kMaxNameLength = 100;
+  name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
+  if (name_ == NULL) return "OOM";
+  const char* op_name = Token::Name(op_);
+  const char* overwrite_name;
+  switch (mode_) {
+    case NO_OVERWRITE: overwrite_name = "Alloc"; break;
+    case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
+    case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
+    default: overwrite_name = "UnknownOverwrite"; break;
+  }
+
+  OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
+               "TypeRecordingBinaryOpStub_%s_%s_%s",
+               op_name,
+               overwrite_name,
+               TRBinaryOpIC::GetName(operands_type_));
+  return name_;
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
+    Label* slow,
+    SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
   UNIMPLEMENTED();
-  return Handle<Code>::null();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
+  Label call_runtime;
+
+  switch (op_) {
+    case Token::ADD:
+    case Token::SUB:
+    case Token::MUL:
+    case Token::DIV:
+      break;
+    case Token::MOD:
+    case Token::BIT_OR:
+    case Token::BIT_AND:
+    case Token::BIT_XOR:
+    case Token::SAR:
+    case Token::SHL:
+    case Token::SHR:
+      GenerateRegisterArgsPush(masm);
+      break;
+    default:
+      UNREACHABLE();
+  }
+
+  if (result_type_ == TRBinaryOpIC::UNINITIALIZED ||
+      result_type_ == TRBinaryOpIC::SMI) {
+    GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
+  } else {
+    GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
+  }
+  __ bind(&call_runtime);
+  switch (op_) {
+    case Token::ADD:
+    case Token::SUB:
+    case Token::MUL:
+    case Token::DIV:
+      GenerateTypeTransition(masm);
+      break;
+    case Token::MOD:
+    case Token::BIT_OR:
+    case Token::BIT_AND:
+    case Token::BIT_XOR:
+    case Token::SAR:
+    case Token::SHL:
+    case Token::SHR:
+      GenerateTypeTransitionWithSavedArgs(masm);
+      break;
+    default:
+      UNREACHABLE();
+  }
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
+  UNIMPLEMENTED();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
+  UNIMPLEMENTED();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
+  UNIMPLEMENTED();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
+  UNIMPLEMENTED();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
+    MacroAssembler* masm,
+    Label* alloc_failure) {
+  UNIMPLEMENTED();
+}
+
+
+void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
+  __ pop(rcx);
+  __ push(rdx);
+  __ push(rax);
+  __ push(rcx);
 }
 
 
diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h
index 0fe4f8a..5056f34 100644
--- a/src/x64/code-stubs-x64.h
+++ b/src/x64/code-stubs-x64.h
@@ -131,7 +131,7 @@
 #ifdef DEBUG
   void Print() {
     PrintF("GenericBinaryOpStub %d (op %s), "
-           "(mode %d, flags %d, registers %d, reversed %d, only_numbers %s)\n",
+           "(mode %d, flags %d, registers %d, reversed %d, type_info %s)\n",
            MinorKey(),
            Token::String(op_),
            static_cast<int>(mode_),
@@ -200,6 +200,104 @@
   friend class CodeGenerator;
 };
 
+
+class TypeRecordingBinaryOpStub: public CodeStub {
+ public:
+  TypeRecordingBinaryOpStub(Token::Value op, OverwriteMode mode)
+      : op_(op),
+        mode_(mode),
+        operands_type_(TRBinaryOpIC::UNINITIALIZED),
+        result_type_(TRBinaryOpIC::UNINITIALIZED),
+        name_(NULL) {
+    ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
+  }
+
+  TypeRecordingBinaryOpStub(
+      int key,
+      TRBinaryOpIC::TypeInfo operands_type,
+      TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED)
+      : op_(OpBits::decode(key)),
+        mode_(ModeBits::decode(key)),
+        operands_type_(operands_type),
+        result_type_(result_type),
+        name_(NULL) { }
+
+ private:
+  enum SmiCodeGenerateHeapNumberResults {
+    ALLOW_HEAPNUMBER_RESULTS,
+    NO_HEAPNUMBER_RESULTS
+  };
+
+  Token::Value op_;
+  OverwriteMode mode_;
+
+  // Operand type information determined at runtime.
+  TRBinaryOpIC::TypeInfo operands_type_;
+  TRBinaryOpIC::TypeInfo result_type_;
+
+  char* name_;
+
+  const char* GetName();
+
+#ifdef DEBUG
+  void Print() {
+    PrintF("TypeRecordingBinaryOpStub %d (op %s), "
+           "(mode %d, runtime_type_info %s)\n",
+           MinorKey(),
+           Token::String(op_),
+           static_cast<int>(mode_),
+           TRBinaryOpIC::GetName(operands_type_));
+  }
+#endif
+
+  // Minor key encoding in 15 bits RRRTTTOOOOOOOMM.
+  class ModeBits: public BitField<OverwriteMode, 0, 2> {};
+  class OpBits: public BitField<Token::Value, 2, 7> {};
+  class OperandTypeInfoBits: public BitField<TRBinaryOpIC::TypeInfo, 9, 3> {};
+  class ResultTypeInfoBits: public BitField<TRBinaryOpIC::TypeInfo, 12, 3> {};
+
+  Major MajorKey() { return TypeRecordingBinaryOp; }
+  int MinorKey() {
+    return OpBits::encode(op_)
+           | ModeBits::encode(mode_)
+           | OperandTypeInfoBits::encode(operands_type_)
+           | ResultTypeInfoBits::encode(result_type_);
+  }
+
+  void Generate(MacroAssembler* masm);
+  void GenerateGeneric(MacroAssembler* masm);
+  void GenerateSmiCode(MacroAssembler* masm,
+                       Label* slow,
+                       SmiCodeGenerateHeapNumberResults heapnumber_results);
+  void GenerateLoadArguments(MacroAssembler* masm);
+  void GenerateReturn(MacroAssembler* masm);
+  void GenerateUninitializedStub(MacroAssembler* masm);
+  void GenerateSmiStub(MacroAssembler* masm);
+  void GenerateInt32Stub(MacroAssembler* masm);
+  void GenerateHeapNumberStub(MacroAssembler* masm);
+  void GenerateStringStub(MacroAssembler* masm);
+  void GenerateGenericStub(MacroAssembler* masm);
+
+  void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
+  void GenerateRegisterArgsPush(MacroAssembler* masm);
+  void GenerateTypeTransition(MacroAssembler* masm);
+  void GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm);
+
+  virtual int GetCodeKind() { return Code::TYPE_RECORDING_BINARY_OP_IC; }
+
+  virtual InlineCacheState GetICState() {
+    return TRBinaryOpIC::ToState(operands_type_);
+  }
+
+  virtual void FinishCode(Code* code) {
+    code->set_type_recording_binary_op_type(operands_type_);
+    code->set_type_recording_binary_op_result_type(result_type_);
+  }
+
+  friend class CodeGenerator;
+};
+
+
 class StringHelper : public AllStatic {
  public:
   // Generate code for copying characters using a simple loop. This should only
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index aa5d335..a543a50 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -627,10 +627,10 @@
 
   Comment cmnt(masm_, "[ store arguments object");
   if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
-    // When using lazy arguments allocation, we store the hole value
+    // When using lazy arguments allocation, we store the arguments marker value
     // as a sentinel indicating that the arguments object hasn't been
     // allocated yet.
-    frame_->Push(Factory::the_hole_value());
+    frame_->Push(Factory::arguments_marker());
   } else {
     ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
     frame_->PushFunction();
@@ -655,9 +655,9 @@
     if (probe.is_constant()) {
       // We have to skip updating the arguments object if it has
       // been assigned a proper value.
-      skip_arguments = !probe.handle()->IsTheHole();
+      skip_arguments = !probe.handle()->IsArgumentsMarker();
     } else {
-      __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
+      __ CompareRoot(probe.reg(), Heap::kArgumentsMarkerRootIndex);
       probe.Unuse();
       done.Branch(not_equal);
     }
@@ -2516,9 +2516,9 @@
     Label slow, done;
     bool try_lazy = true;
     if (probe.is_constant()) {
-      try_lazy = probe.handle()->IsTheHole();
+      try_lazy = probe.handle()->IsArgumentsMarker();
     } else {
-      __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
+      __ CompareRoot(probe.reg(), Heap::kArgumentsMarkerRootIndex);
       probe.Unuse();
       __ j(not_equal, &slow);
     }
@@ -4417,7 +4417,7 @@
   // If the loaded value is a constant, we know if the arguments
   // object has been lazily loaded yet.
   if (value.is_constant()) {
-    if (value.handle()->IsTheHole()) {
+    if (value.handle()->IsArgumentsMarker()) {
       Result arguments = StoreArgumentsObject(false);
       frame_->Push(&arguments);
     } else {
@@ -4430,7 +4430,7 @@
   // indicates that we haven't loaded the arguments object yet, we
   // need to do it now.
   JumpTarget exit;
-  __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
+  __ CompareRoot(value.reg(), Heap::kArgumentsMarkerRootIndex);
   frame_->Push(&value);
   exit.Branch(not_equal);
   Result arguments = StoreArgumentsObject(false);
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 66bc4ed..724a7c5 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -210,10 +210,17 @@
   __ j(above_equal, &ok);
   StackCheckStub stub;
   __ CallStub(&stub);
+  // Record a mapping of this PC offset to the OSR id.  This is used to find
+  // the AST id from the unoptimized code in order to use it as a key into
+  // the deoptimization input data found in the optimized code.
+  RecordStackCheck(stmt->OsrEntryId());
+
   __ bind(&ok);
   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+  // Record a mapping of the OSR id to this PC.  This is used if the OSR
+  // entry becomes the target of a bailout.  We don't expect it to be, but
+  // we want it to work if it is.
   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
-  RecordStackCheck(stmt->OsrEntryId());
 }
 
 
@@ -459,7 +466,10 @@
 
 
 void FullCodeGenerator::TestContext::Plug(bool flag) const {
-  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
+  codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
+                                          true,
+                                          true_label_,
+                                          false_label_);
   if (flag) {
     if (true_label_ != fall_through_) __ jmp(true_label_);
   } else {
@@ -555,6 +565,25 @@
                                                      bool should_normalize,
                                                      Label* if_true,
                                                      Label* if_false) {
+  // Only prepare for bailouts before splits if we're in a test
+  // context. Otherwise, we let the Visit function deal with the
+  // preparation to avoid preparing with the same AST id twice.
+  if (!context()->IsTest() || !info_->IsOptimizable()) return;
+
+  NearLabel skip;
+  if (should_normalize) __ jmp(&skip);
+
+  ForwardBailoutStack* current = forward_bailout_stack_;
+  while (current != NULL) {
+    PrepareForBailout(current->expr(), state);
+    current = current->parent();
+  }
+
+  if (should_normalize) {
+    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
+    Split(equal, if_true, if_false, NULL);
+    __ bind(&skip);
+  }
 }
 
 
@@ -669,8 +698,10 @@
   Comment cmnt(masm_, "[ SwitchStatement");
   Breakable nested_statement(this, stmt);
   SetStatementPosition(stmt);
+
   // Keep the switch value on the stack until a case matches.
   VisitForStackValue(stmt->tag());
+  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
 
   ZoneList<CaseClause*>* clauses = stmt->cases();
   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
@@ -735,6 +766,7 @@
   }
 
   __ bind(nested_statement.break_target());
+  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
 }
 
 
@@ -1224,6 +1256,7 @@
           if (property->emit_store()) {
             Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
             EmitCallIC(ic, RelocInfo::CODE_TARGET);
+            PrepareForBailoutForId(key->id(), NO_REGISTERS);
           }
           break;
         }
@@ -1311,6 +1344,8 @@
 
     // Update the write barrier for the array store.
     __ RecordWrite(rbx, offset, result_register(), rcx);
+
+    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   }
 
   if (result_saved) {
@@ -1355,17 +1390,34 @@
         VisitForStackValue(property->obj());
       }
       break;
-    case KEYED_PROPERTY:
+    case KEYED_PROPERTY: {
       if (expr->is_compound()) {
-        VisitForStackValue(property->obj());
-        VisitForAccumulatorValue(property->key());
+        if (property->is_arguments_access()) {
+          VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+          MemOperand slot_operand =
+              EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
+          __ push(slot_operand);
+          __ Move(rax, property->key()->AsLiteral()->handle());
+        } else {
+          VisitForStackValue(property->obj());
+          VisitForAccumulatorValue(property->key());
+        }
         __ movq(rdx, Operand(rsp, 0));
         __ push(rax);
       } else {
-        VisitForStackValue(property->obj());
-        VisitForStackValue(property->key());
+        if (property->is_arguments_access()) {
+          VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+          MemOperand slot_operand =
+              EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
+          __ push(slot_operand);
+          __ Push(property->key()->AsLiteral()->handle());
+        } else {
+          VisitForStackValue(property->obj());
+          VisitForStackValue(property->key());
+        }
       }
       break;
+    }
   }
 
   if (expr->is_compound()) {
@@ -1383,6 +1435,12 @@
       }
     }
 
+    // For property compound assignments we need another deoptimization
+    // point after the property load.
+    if (property != NULL) {
+      PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+    }
+
     Token::Value op = expr->binary_op();
     ConstantOperand constant = ShouldInlineSmiCase(op)
         ? GetConstantOperand(op, expr->target(), expr->value())
@@ -1408,6 +1466,8 @@
     } else {
       EmitBinaryOp(op, mode);
     }
+    // Deoptimization point in case the binary operation may have side effects.
+    PrepareForBailout(expr->binary_operation(), TOS_REG);
   } else {
     VisitForAccumulatorValue(expr->value());
   }
@@ -1420,6 +1480,7 @@
     case VARIABLE:
       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
                              expr->op());
+      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       context()->Plug(rax);
       break;
     case NAMED_PROPERTY:
@@ -1529,7 +1590,7 @@
 }
 
 
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
   // Invalid left-hand sides are rewritten to have a 'throw
   // ReferenceError' on the left-hand side.
   if (!expr->IsValidLeftHandSide()) {
@@ -1577,6 +1638,7 @@
       break;
     }
   }
+  PrepareForBailoutForId(bailout_ast_id, TOS_REG);
   context()->Plug(rax);
 }
 
@@ -1688,6 +1750,7 @@
     __ pop(rax);
     __ Drop(1);
   }
+  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   context()->Plug(rax);
 }
 
@@ -1726,6 +1789,7 @@
     __ pop(rax);
   }
 
+  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   context()->Plug(rax);
 }
 
@@ -1766,6 +1830,7 @@
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
   EmitCallIC(ic, mode);
+  RecordJSReturnSite(expr);
   // Restore context register.
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   context()->Plug(rax);
@@ -1799,6 +1864,7 @@
   Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
   __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
   EmitCallIC(ic, mode);
+  RecordJSReturnSite(expr);
   // Restore context register.
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   context()->DropAndPlug(1, rax);  // Drop the key still on the stack.
@@ -1819,6 +1885,7 @@
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
   __ CallStub(&stub);
+  RecordJSReturnSite(expr);
   // Restore context register.
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   // Discard the function left on TOS.
@@ -1827,6 +1894,12 @@
 
 
 void FullCodeGenerator::VisitCall(Call* expr) {
+#ifdef DEBUG
+  // We want to verify that RecordJSReturnSite gets called on all paths
+  // through this function.  Avoid early returns.
+  expr->return_is_recorded_ = false;
+#endif
+
   Comment cmnt(masm_, "[ Call");
   Expression* fun = expr->expression();
   Variable* var = fun->AsVariableProxy()->AsVariable();
@@ -1834,7 +1907,7 @@
   if (var != NULL && var->is_possibly_eval()) {
     // In a call to eval, we first call %ResolvePossiblyDirectEval to
     // resolve the function we need to call and the receiver of the
-    // call.  The we call the resolved function using the given
+    // call.  Then we call the resolved function using the given
     // arguments.
     ZoneList<Expression*>* args = expr->arguments();
     int arg_count = args->length();
@@ -1871,6 +1944,7 @@
     InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
     CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
     __ CallStub(&stub);
+    RecordJSReturnSite(expr);
     // Restore context register.
     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
     context()->DropAndPlug(1, rax);
@@ -1893,32 +1967,31 @@
                                       &done);
 
       __ bind(&slow);
-      // Call the runtime to find the function to call (returned in rax)
-      // and the object holding it (returned in rdx).
-      __ push(context_register());
-      __ Push(var->name());
-      __ CallRuntime(Runtime::kLoadContextSlot, 2);
-      __ push(rax);  // Function.
-      __ push(rdx);  // Receiver.
+    }
+    // Call the runtime to find the function to call (returned in rax)
+    // and the object holding it (returned in rdx).
+    __ push(context_register());
+    __ Push(var->name());
+    __ CallRuntime(Runtime::kLoadContextSlot, 2);
+    __ push(rax);  // Function.
+    __ push(rdx);  // Receiver.
 
-      // If fast case code has been generated, emit code to push the
-      // function and receiver and have the slow path jump around this
-      // code.
-      if (done.is_linked()) {
-        NearLabel call;
-        __ jmp(&call);
-        __ bind(&done);
-        // Push function.
-        __ push(rax);
-        // Push global receiver.
+    // If fast case code has been generated, emit code to push the
+    // function and receiver and have the slow path jump around this
+    // code.
+    if (done.is_linked()) {
+      NearLabel call;
+      __ jmp(&call);
+      __ bind(&done);
+      // Push function.
+      __ push(rax);
+      // Push global receiver.
         __ movq(rbx, GlobalObjectOperand());
         __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
         __ bind(&call);
-      }
     }
 
     EmitCallWithStub(expr);
-
   } else if (fun->AsProperty() != NULL) {
     // Call to an object property.
     Property* prop = fun->AsProperty();
@@ -1932,24 +2005,23 @@
     } else {
       // Call to a keyed property.
       // For a synthetic property use keyed load IC followed by function call,
-      // for a regular property use KeyedCallIC.
+      // for a regular property use keyed EmitCallIC.
       { PreservePositionScope scope(masm()->positions_recorder());
         VisitForStackValue(prop->obj());
       }
       if (prop->is_synthetic()) {
         { PreservePositionScope scope(masm()->positions_recorder());
           VisitForAccumulatorValue(prop->key());
-          __ movq(rdx, Operand(rsp, 0));
         }
         // Record source code position for IC call.
         SetSourcePosition(prop->position());
+        __ pop(rdx);  // We do not need to keep the receiver.
+
         Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
         EmitCallIC(ic, RelocInfo::CODE_TARGET);
-        // Pop receiver.
-        __ pop(rbx);
         // Push result (function).
         __ push(rax);
-        // Push receiver object on stack.
+        // Push Global receiver.
         __ movq(rcx, GlobalObjectOperand());
         __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
         EmitCallWithStub(expr);
@@ -1960,7 +2032,7 @@
   } else {
     // Call to some other expression.  If the expression is an anonymous
     // function literal not called in a loop, mark it as one that should
-    // also use the fast code generator.
+    // also use the full code generator.
     FunctionLiteral* lit = fun->AsFunctionLiteral();
     if (lit != NULL &&
         lit->name()->Equals(Heap::empty_string()) &&
@@ -1976,6 +2048,11 @@
     // Emit function call.
     EmitCallWithStub(expr);
   }
+
+#ifdef DEBUG
+  // RecordJSReturnSite should have been called.
+  ASSERT(expr->return_is_recorded_);
+#endif
 }
 
 
@@ -2023,6 +2100,7 @@
   context()->PrepareTest(&materialize_true, &materialize_false,
                          &if_true, &if_false, &fall_through);
 
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   __ JumpIfSmi(rax, if_true);
   __ jmp(if_false);
 
@@ -2042,6 +2120,7 @@
   context()->PrepareTest(&materialize_true, &materialize_false,
                          &if_true, &if_false, &fall_through);
 
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
   Split(non_negative_smi, if_true, if_false, fall_through);
 
@@ -2073,6 +2152,7 @@
   __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
   __ j(below, if_false);
   __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(below_equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2093,6 +2173,7 @@
 
   __ JumpIfSmi(rax, if_false);
   __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(above_equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2115,6 +2196,7 @@
   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
            Immediate(1 << Map::kIsUndetectable));
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(not_zero, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2137,6 +2219,7 @@
   // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
   // used in a few functions in runtime.js which should not normally be hit by
   // this compiler.
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   __ jmp(if_false);
   context()->Plug(if_true, if_false);
 }
@@ -2156,6 +2239,7 @@
 
   __ JumpIfSmi(rax, if_false);
   __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2176,6 +2260,7 @@
 
   __ JumpIfSmi(rax, if_false);
   __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2196,6 +2281,7 @@
 
   __ JumpIfSmi(rax, if_false);
   __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2227,6 +2313,7 @@
   __ bind(&check_frame_marker);
   __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
                 Smi::FromInt(StackFrame::CONSTRUCT));
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2249,6 +2336,7 @@
 
   __ pop(rbx);
   __ cmpq(rax, rbx);
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(equal, if_true, if_false, fall_through);
 
   context()->Plug(if_true, if_false);
@@ -2822,6 +2910,7 @@
 
   __ testl(FieldOperand(rax, String::kHashFieldOffset),
            Immediate(String::kContainsCachedArrayIndexMask));
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   __ j(zero, if_true);
   __ jmp(if_false);
 
@@ -2943,6 +3032,7 @@
       // Notice that the labels are swapped.
       context()->PrepareTest(&materialize_true, &materialize_false,
                              &if_false, &if_true, &fall_through);
+      if (context()->IsTest()) ForwardBailoutToChild(expr);
       VisitForControl(expr->expression(), if_true, if_false, fall_through);
       context()->Plug(if_false, if_true);  // Labels swapped.
       break;
@@ -3056,14 +3146,26 @@
       __ push(rax);  // Copy of receiver, needed for later store.
       EmitNamedPropertyLoad(prop);
     } else {
-      VisitForStackValue(prop->obj());
-      VisitForAccumulatorValue(prop->key());
+      if (prop->is_arguments_access()) {
+        VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
+        MemOperand slot_operand =
+            EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
+        __ push(slot_operand);
+        __ Move(rax, prop->key()->AsLiteral()->handle());
+      } else {
+        VisitForStackValue(prop->obj());
+        VisitForAccumulatorValue(prop->key());
+      }
       __ movq(rdx, Operand(rsp, 0));  // Leave receiver on stack
       __ push(rax);  // Copy of key, needed for later store.
       EmitKeyedPropertyLoad(prop);
     }
   }
 
+  // We need a second deoptimization point after loading the value
+  // in case evaluating the property load my have a side effect.
+  PrepareForBailout(expr->increment(), TOS_REG);
+
   // Call ToNumber only if operand is not a smi.
   NearLabel no_conversion;
   Condition is_smi;
@@ -3133,6 +3235,7 @@
         { EffectContext context(this);
           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
                                  Token::ASSIGN);
+          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
           context.Plug(rax);
         }
         // For all contexts except kEffect: We have the result on
@@ -3144,6 +3247,7 @@
         // Perform the assignment as if via '='.
         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
                                Token::ASSIGN);
+        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
         context()->Plug(rax);
       }
       break;
@@ -3152,6 +3256,7 @@
       __ pop(rdx);
       Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
           context()->PlugTOS();
@@ -3166,6 +3271,7 @@
       __ pop(rdx);
       Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
       EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
           context()->PlugTOS();
@@ -3192,6 +3298,7 @@
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
     EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    PrepareForBailout(expr, TOS_REG);
     context()->Plug(rax);
   } else if (proxy != NULL &&
              proxy->var()->AsSlot() != NULL &&
@@ -3207,12 +3314,13 @@
     __ push(rsi);
     __ Push(proxy->name());
     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+    PrepareForBailout(expr, TOS_REG);
     __ bind(&done);
 
     context()->Plug(rax);
   } else {
     // This expression cannot throw a reference error at the top level.
-    Visit(expr);
+    context()->HandleExpression(expr);
   }
 }
 
@@ -3237,6 +3345,7 @@
   { AccumulatorValueContext context(this);
     VisitForTypeofValue(left_unary->expression());
   }
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
 
   if (check->Equals(Heap::number_symbol())) {
     Condition is_smi = masm_->CheckSmi(rax);
@@ -3330,6 +3439,7 @@
     case Token::IN:
       VisitForStackValue(expr->right());
       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
+      PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
       Split(equal, if_true, if_false, fall_through);
       break;
@@ -3338,6 +3448,7 @@
       VisitForStackValue(expr->right());
       InstanceofStub stub(InstanceofStub::kNoFlags);
       __ CallStub(&stub);
+      PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
       __ testq(rax, rax);
        // The stub returns 0 for true.
       Split(zero, if_true, if_false, fall_through);
@@ -3396,6 +3507,8 @@
           : NO_COMPARE_FLAGS;
       CompareStub stub(cc, strict, flags);
       __ CallStub(&stub);
+
+      PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
       __ testq(rax, rax);
       Split(cc, if_true, if_false, fall_through);
     }
@@ -3417,6 +3530,7 @@
                          &if_true, &if_false, &fall_through);
 
   VisitForAccumulatorValue(expr->expression());
+  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   __ CompareRoot(rax, Heap::kNullValueRootIndex);
   if (expr->is_strict()) {
     Split(equal, if_true, if_false, fall_through);
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
new file mode 100644
index 0000000..8afa9d4
--- /dev/null
+++ b/src/x64/lithium-x64.cc
@@ -0,0 +1,71 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "x64/lithium-x64.h"
+#include "x64/lithium-codegen-x64.h"
+
+namespace v8 {
+namespace internal {
+
+LChunk* LChunkBuilder::Build() {
+  ASSERT(is_unused());
+  chunk_ = new LChunk(graph());
+  HPhase phase("Building chunk", chunk_);
+  status_ = BUILDING;
+  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
+  for (int i = 0; i < blocks->length(); i++) {
+    HBasicBlock* next = NULL;
+    if (i < blocks->length() - 1) next = blocks->at(i + 1);
+    DoBasicBlock(blocks->at(i), next);
+    if (is_aborted()) return NULL;
+  }
+  status_ = DONE;
+  return chunk_;
+}
+
+
+void LChunkBuilder::Abort(const char* format, ...) {
+  if (FLAG_trace_bailout) {
+    SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
+    PrintF("Aborting LChunk building in @\"%s\": ", *debug_name);
+    va_list arguments;
+    va_start(arguments, format);
+    OS::VPrint(format, arguments);
+    va_end(arguments);
+    PrintF("\n");
+  }
+  status_ = ABORTED;
+}
+
+
+void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
+  ASSERT(is_building());
+  Abort("Lithium not implemented on x64.");
+}
+
+
+} }  // namespace v8::internal
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index f66ec16..fcab235 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -30,6 +30,7 @@
 
 #include "hydrogen.h"
 #include "lithium-allocator.h"
+#include "lithium.h"
 #include "safepoint-table.h"
 
 namespace v8 {
@@ -45,6 +46,9 @@
   LInstruction() { }
   virtual ~LInstruction() { }
 
+  virtual void PrintTo(StringStream* stream) const { UNIMPLEMENTED(); }
+  virtual void PrintDataTo(StringStream* stream) const { }
+
   // Predicates should be generated by macro as in lithium-ia32.h.
   virtual bool IsLabel() const {
     UNIMPLEMENTED();
@@ -55,23 +59,43 @@
     return false;
   }
 
-  LPointerMap* pointer_map() const {
-    UNIMPLEMENTED();
-    return NULL;
+  void set_environment(LEnvironment* env) { environment_.set(env); }
+  LEnvironment* environment() const { return environment_.get(); }
+  bool HasEnvironment() const { return environment_.is_set(); }
+
+  void set_pointer_map(LPointerMap* p) { pointer_map_.set(p); }
+  LPointerMap* pointer_map() const { return pointer_map_.get(); }
+  bool HasPointerMap() const { return pointer_map_.is_set(); }
+
+  void set_result(LOperand* operand) { result_.set(operand); }
+  LOperand* result() const { return result_.get(); }
+  bool HasResult() const { return result_.is_set(); }
+
+  void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
+  HValue* hydrogen_value() const { return hydrogen_value_; }
+
+  void set_deoptimization_environment(LEnvironment* env) {
+    deoptimization_environment_.set(env);
+  }
+  LEnvironment* deoptimization_environment() const {
+    return deoptimization_environment_.get();
+  }
+  bool HasDeoptimizationEnvironment() const {
+    return deoptimization_environment_.is_set();
   }
 
-  bool HasPointerMap() const {
-    UNIMPLEMENTED();
-    return false;
-  }
-
-  virtual void PrintTo(StringStream* stream) const { UNIMPLEMENTED(); }
+ private:
+  SetOncePointer<LEnvironment> environment_;
+  SetOncePointer<LPointerMap> pointer_map_;
+  SetOncePointer<LOperand> result_;
+  HValue* hydrogen_value_;
+  SetOncePointer<LEnvironment> deoptimization_environment_;
 };
 
 
 class LParallelMove : public ZoneObject {
  public:
-  LParallelMove() { }
+  LParallelMove() : move_operands_(4) { }
 
   void AddMove(LOperand* from, LOperand* to) {
     UNIMPLEMENTED();
@@ -81,6 +105,9 @@
     UNIMPLEMENTED();
     return NULL;
   }
+
+ private:
+  ZoneList<LMoveOperands> move_operands_;
 };
 
 
@@ -111,12 +138,20 @@
     UNIMPLEMENTED();
     return NULL;
   }
+
+ private:
+  LParallelMove* parallel_moves_[LAST_INNER_POSITION + 1];
+  HBasicBlock* block_;
 };
 
 
 class LLabel: public LGap {
  public:
   explicit LLabel(HBasicBlock* block) : LGap(block) { }
+
+ private:
+  Label label_;
+  LLabel* replacement_;
 };
 
 
@@ -144,12 +179,21 @@
                                  LOperand* spill_operand) {
     UNIMPLEMENTED();
   }
+
+ private:
+  // Arrays of spill slot operands for registers with an assigned spill
+  // slot, i.e., that must also be restored to the spill slot on OSR entry.
+  // NULL if the register has no assigned spill slot.  Indexed by allocation
+  // index.
+  LOperand* register_spills_[Register::kNumAllocatableRegisters];
+  LOperand* double_register_spills_[DoubleRegister::kNumAllocatableRegisters];
 };
 
 
 class LPointerMap: public ZoneObject {
  public:
-  explicit LPointerMap(int position) { }
+  explicit LPointerMap(int position)
+      : pointer_operands_(8), position_(position), lithium_position_(-1) { }
 
   int lithium_position() const {
     UNIMPLEMENTED();
@@ -157,21 +201,80 @@
   }
 
   void RecordPointer(LOperand* op) { UNIMPLEMENTED(); }
+
+ private:
+  ZoneList<LOperand*> pointer_operands_;
+  int position_;
+  int lithium_position_;
 };
 
 
-class LChunk: public ZoneObject {
+class LEnvironment: public ZoneObject {
  public:
-  explicit LChunk(HGraph* graph) { }
-
-  HGraph* graph() const {
-    UNIMPLEMENTED();
-    return NULL;
+  LEnvironment(Handle<JSFunction> closure,
+               int ast_id,
+               int parameter_count,
+               int argument_count,
+               int value_count,
+               LEnvironment* outer)
+      : closure_(closure),
+        arguments_stack_height_(argument_count),
+        deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
+        translation_index_(-1),
+        ast_id_(ast_id),
+        parameter_count_(parameter_count),
+        values_(value_count),
+        representations_(value_count),
+        spilled_registers_(NULL),
+        spilled_double_registers_(NULL),
+        outer_(outer) {
   }
 
-  const ZoneList<LPointerMap*>* pointer_maps() const {
-    UNIMPLEMENTED();
-    return NULL;
+  Handle<JSFunction> closure() const { return closure_; }
+  int arguments_stack_height() const { return arguments_stack_height_; }
+  int deoptimization_index() const { return deoptimization_index_; }
+  int translation_index() const { return translation_index_; }
+  int ast_id() const { return ast_id_; }
+  int parameter_count() const { return parameter_count_; }
+  const ZoneList<LOperand*>* values() const { return &values_; }
+  LEnvironment* outer() const { return outer_; }
+
+ private:
+  Handle<JSFunction> closure_;
+  int arguments_stack_height_;
+  int deoptimization_index_;
+  int translation_index_;
+  int ast_id_;
+  int parameter_count_;
+  ZoneList<LOperand*> values_;
+  ZoneList<Representation> representations_;
+
+  // Allocation index indexed arrays of spill slot operands for registers
+  // that are also in spill slots at an OSR entry.  NULL for environments
+  // that do not correspond to an OSR entry.
+  LOperand** spilled_registers_;
+  LOperand** spilled_double_registers_;
+
+  LEnvironment* outer_;
+};
+
+
+class LChunkBuilder;
+class LChunk: public ZoneObject {
+ public:
+  explicit LChunk(HGraph* graph)
+    : spill_slot_count_(0),
+      graph_(graph),
+      instructions_(32),
+      pointer_maps_(8),
+      inlined_closures_(1) { }
+
+  int spill_slot_count() const { return spill_slot_count_; }
+  HGraph* graph() const { return graph_; }
+  const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
+  const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
+  const ZoneList<Handle<JSFunction> >* inlined_closures() const {
+    return &inlined_closures_;
   }
 
   LOperand* GetNextSpillSlot(bool double_slot) {
@@ -189,11 +292,6 @@
     return NULL;
   }
 
-  const ZoneList<LInstruction*>* instructions() const {
-    UNIMPLEMENTED();
-    return NULL;
-  }
-
   int GetParameterStackSlot(int index) const {
     UNIMPLEMENTED();
     return 0;
@@ -219,20 +317,35 @@
   void MarkEmptyBlocks() { UNIMPLEMENTED(); }
 
 #ifdef DEBUG
-  void Verify() { UNIMPLEMENTED(); }
+  void Verify() { }
 #endif
+
+ private:
+  int spill_slot_count_;
+  HGraph* const graph_;
+  ZoneList<LInstruction*> instructions_;
+  ZoneList<LPointerMap*> pointer_maps_;
+  ZoneList<Handle<JSFunction> > inlined_closures_;
 };
 
 
 class LChunkBuilder BASE_EMBEDDED {
  public:
-  LChunkBuilder(HGraph* graph, LAllocator* allocator) { }
+  LChunkBuilder(HGraph* graph, LAllocator* allocator)
+      : chunk_(NULL),
+        graph_(graph),
+        status_(UNUSED),
+        current_instruction_(NULL),
+        current_block_(NULL),
+        next_block_(NULL),
+        argument_count_(0),
+        allocator_(allocator),
+        position_(RelocInfo::kNoPosition),
+        instructions_pending_deoptimization_environment_(NULL),
+        pending_deoptimization_ast_id_(AstNode::kNoNumber) { }
 
   // Build the sequence for the graph.
-  LChunk* Build() {
-    UNIMPLEMENTED();
-    return NULL;
-  };
+  LChunk* Build();
 
   // Declare methods that deal with the individual node types.
 #define DECLARE_DO(type) LInstruction* Do##type(H##type* node) { \
@@ -242,6 +355,38 @@
   HYDROGEN_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
 #undef DECLARE_DO
 
+ private:
+  enum Status {
+    UNUSED,
+    BUILDING,
+    DONE,
+    ABORTED
+  };
+
+  LChunk* chunk() const { return chunk_; }
+  HGraph* graph() const { return graph_; }
+
+  bool is_unused() const { return status_ == UNUSED; }
+  bool is_building() const { return status_ == BUILDING; }
+  bool is_done() const { return status_ == DONE; }
+  bool is_aborted() const { return status_ == ABORTED; }
+
+  void Abort(const char* format, ...);
+
+  void DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block);
+
+  LChunk* chunk_;
+  HGraph* const graph_;
+  Status status_;
+  HInstruction* current_instruction_;
+  HBasicBlock* current_block_;
+  HBasicBlock* next_block_;
+  int argument_count_;
+  LAllocator* allocator_;
+  int position_;
+  LInstruction* instructions_pending_deoptimization_environment_;
+  int pending_deoptimization_ast_id_;
+
   DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
 };
 
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 70a3dab..2846fe2 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -288,7 +288,7 @@
   }
 #endif
   // Disable stub call restrictions to always allow calls to abort.
-  set_allow_stub_calls(true);
+  AllowStubCallsScope allow_scope(this, true);
 
   push(rax);
   movq(kScratchRegister, p0, RelocInfo::NONE);