Version 3.23.0

Fixed loading message from an Error object.  (Chromium issue 306220)

Made Object.freeze/seal/preventExtensions observable. (issue 2975, 2941)

Made snapshots reproducible. (issue 2885)

Added missing negative dictionary lookup to NonexistentHandlerFrontend. (issue 2980)

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@17517 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/accessors.cc b/src/accessors.cc
index 5023266..eb8be5f 100644
--- a/src/accessors.cc
+++ b/src/accessors.cc
@@ -148,45 +148,49 @@
 
 
 // The helper function will 'flatten' Number objects.
-Object* Accessors::FlattenNumber(Isolate* isolate, Object* value) {
+Handle<Object> Accessors::FlattenNumber(Isolate* isolate,
+                                        Handle<Object> value) {
   if (value->IsNumber() || !value->IsJSValue()) return value;
-  JSValue* wrapper = JSValue::cast(value);
+  Handle<JSValue> wrapper = Handle<JSValue>::cast(value);
   ASSERT(wrapper->GetIsolate()->context()->native_context()->number_function()->
       has_initial_map());
-  Map* number_map = isolate->context()->native_context()->
-      number_function()->initial_map();
-  if (wrapper->map() == number_map) return wrapper->value();
+  if (wrapper->map() ==
+      isolate->context()->native_context()->number_function()->initial_map()) {
+    return handle(wrapper->value(), isolate);
+  }
+
   return value;
 }
 
 
 MaybeObject* Accessors::ArraySetLength(Isolate* isolate,
-                                       JSObject* object,
-                                       Object* value,
+                                       JSObject* object_raw,
+                                       Object* value_raw,
                                        void*) {
+  HandleScope scope(isolate);
+  Handle<JSObject> object(object_raw, isolate);
+  Handle<Object> value(value_raw, isolate);
+
   // This means one of the object's prototypes is a JSArray and the
   // object does not have a 'length' property.  Calling SetProperty
   // causes an infinite loop.
   if (!object->IsJSArray()) {
-    return object->SetLocalPropertyIgnoreAttributesTrampoline(
-        isolate->heap()->length_string(), value, NONE);
+    Handle<Object> result = JSObject::SetLocalPropertyIgnoreAttributes(object,
+        isolate->factory()->length_string(), value, NONE);
+    RETURN_IF_EMPTY_HANDLE(isolate, result);
+    return *result;
   }
 
   value = FlattenNumber(isolate, value);
 
-  // Need to call methods that may trigger GC.
-  HandleScope scope(isolate);
-
-  // Protect raw pointers.
-  Handle<JSArray> array_handle(JSArray::cast(object), isolate);
-  Handle<Object> value_handle(value, isolate);
+  Handle<JSArray> array_handle = Handle<JSArray>::cast(object);
 
   bool has_exception;
   Handle<Object> uint32_v =
-      Execution::ToUint32(isolate, value_handle, &has_exception);
+      Execution::ToUint32(isolate, value, &has_exception);
   if (has_exception) return Failure::Exception();
   Handle<Object> number_v =
-      Execution::ToNumber(isolate, value_handle, &has_exception);
+      Execution::ToNumber(isolate, value, &has_exception);
   if (has_exception) return Failure::Exception();
 
   if (uint32_v->Number() == number_v->Number()) {
@@ -578,26 +582,28 @@
 
 
 MaybeObject* Accessors::FunctionSetPrototype(Isolate* isolate,
-                                             JSObject* object,
+                                             JSObject* object_raw,
                                              Object* value_raw,
                                              void*) {
-  Heap* heap = isolate->heap();
-  JSFunction* function_raw = FindInstanceOf<JSFunction>(isolate, object);
-  if (function_raw == NULL) return heap->undefined_value();
-  if (!function_raw->should_have_prototype()) {
-    // Since we hit this accessor, object will have no prototype property.
-    return object->SetLocalPropertyIgnoreAttributesTrampoline(
-        heap->prototype_string(), value_raw, NONE);
-  }
+  JSFunction* function_raw = FindInstanceOf<JSFunction>(isolate, object_raw);
+  if (function_raw == NULL) return isolate->heap()->undefined_value();
 
   HandleScope scope(isolate);
   Handle<JSFunction> function(function_raw, isolate);
+  Handle<JSObject> object(object_raw, isolate);
   Handle<Object> value(value_raw, isolate);
+  if (!function->should_have_prototype()) {
+    // Since we hit this accessor, object will have no prototype property.
+    Handle<Object> result = JSObject::SetLocalPropertyIgnoreAttributes(object,
+        isolate->factory()->prototype_string(), value, NONE);
+    RETURN_IF_EMPTY_HANDLE(isolate, result);
+    return *result;
+  }
 
   Handle<Object> old_value;
   bool is_observed =
       FLAG_harmony_observation &&
-      *function == object &&
+      *function == *object &&
       function->map()->is_observed();
   if (is_observed) {
     if (function->has_prototype())
diff --git a/src/accessors.h b/src/accessors.h
index b2dee27..723abd2 100644
--- a/src/accessors.h
+++ b/src/accessors.h
@@ -149,7 +149,7 @@
                                                     void*);
 
   // Helper functions.
-  static Object* FlattenNumber(Isolate* isolate, Object* value);
+  static Handle<Object> FlattenNumber(Isolate* isolate, Handle<Object> value);
   static MaybeObject* IllegalSetter(Isolate* isolate,
                                     JSObject*,
                                     Object*,
diff --git a/src/allocation-tracker.cc b/src/allocation-tracker.cc
deleted file mode 100644
index 586ce3c..0000000
--- a/src/allocation-tracker.cc
+++ /dev/null
@@ -1,279 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#include "allocation-tracker.h"
-
-#include "heap-snapshot-generator.h"
-#include "frames-inl.h"
-
-namespace v8 {
-namespace internal {
-
-AllocationTraceNode::AllocationTraceNode(
-    AllocationTraceTree* tree, SnapshotObjectId shared_function_info_id)
-    : tree_(tree),
-      function_id_(shared_function_info_id),
-      total_size_(0),
-      allocation_count_(0),
-      id_(tree->next_node_id()) {
-}
-
-
-AllocationTraceNode::~AllocationTraceNode() {
-}
-
-
-AllocationTraceNode* AllocationTraceNode::FindChild(SnapshotObjectId id) {
-  for (int i = 0; i < children_.length(); i++) {
-    AllocationTraceNode* node = children_[i];
-    if (node->function_id() == id) return node;
-  }
-  return NULL;
-}
-
-
-AllocationTraceNode* AllocationTraceNode::FindOrAddChild(SnapshotObjectId id) {
-  AllocationTraceNode* child = FindChild(id);
-  if (child == NULL) {
-    child = new AllocationTraceNode(tree_, id);
-    children_.Add(child);
-  }
-  return child;
-}
-
-
-void AllocationTraceNode::AddAllocation(unsigned size) {
-  total_size_ += size;
-  ++allocation_count_;
-}
-
-
-void AllocationTraceNode::Print(int indent, AllocationTracker* tracker) {
-  OS::Print("%10u %10u %*c", total_size_, allocation_count_, indent, ' ');
-  if (tracker != NULL) {
-    const char* name = "<unknown function>";
-    if (function_id_ != 0) {
-      AllocationTracker::FunctionInfo* info =
-          tracker->GetFunctionInfo(function_id_);
-      if (info != NULL) {
-        name = info->name;
-      }
-    }
-    OS::Print("%s #%u", name, id_);
-  } else {
-    OS::Print("%u #%u", function_id_, id_);
-  }
-  OS::Print("\n");
-  indent += 2;
-  for (int i = 0; i < children_.length(); i++) {
-    children_[i]->Print(indent, tracker);
-  }
-}
-
-
-AllocationTraceTree::AllocationTraceTree()
-    : next_node_id_(1),
-      root_(this, 0) {
-}
-
-
-AllocationTraceTree::~AllocationTraceTree() {
-}
-
-
-AllocationTraceNode* AllocationTraceTree::AddPathFromEnd(
-    const Vector<SnapshotObjectId>& path) {
-  AllocationTraceNode* node = root();
-  for (SnapshotObjectId* entry = path.start() + path.length() - 1;
-       entry != path.start() - 1;
-       --entry) {
-    node = node->FindOrAddChild(*entry);
-  }
-  return node;
-}
-
-
-void AllocationTraceTree::Print(AllocationTracker* tracker) {
-  OS::Print("[AllocationTraceTree:]\n");
-  OS::Print("Total size | Allocation count | Function id | id\n");
-  root()->Print(0, tracker);
-}
-
-void AllocationTracker::DeleteUnresolvedLocation(
-    UnresolvedLocation** location) {
-  delete *location;
-}
-
-
-AllocationTracker::FunctionInfo::FunctionInfo()
-    : name(""),
-      script_name(""),
-      script_id(0),
-      line(-1),
-      column(-1) {
-}
-
-
-static bool AddressesMatch(void* key1, void* key2) {
-  return key1 == key2;
-}
-
-
-AllocationTracker::AllocationTracker(
-    HeapObjectsMap* ids, StringsStorage* names)
-    : ids_(ids),
-      names_(names),
-      id_to_function_info_(AddressesMatch) {
-}
-
-
-AllocationTracker::~AllocationTracker() {
-  unresolved_locations_.Iterate(DeleteUnresolvedLocation);
-}
-
-
-void AllocationTracker::PrepareForSerialization() {
-  List<UnresolvedLocation*> copy(unresolved_locations_.length());
-  copy.AddAll(unresolved_locations_);
-  unresolved_locations_.Clear();
-  for (int i = 0; i < copy.length(); i++) {
-    copy[i]->Resolve();
-    delete copy[i];
-  }
-}
-
-
-void AllocationTracker::NewObjectEvent(Address addr, int size) {
-  DisallowHeapAllocation no_allocation;
-  Heap* heap = ids_->heap();
-
-  // Mark the new block as FreeSpace to make sure the heap is iterable
-  // while we are capturing stack trace.
-  FreeListNode::FromAddress(addr)->set_size(heap, size);
-  ASSERT_EQ(HeapObject::FromAddress(addr)->Size(), size);
-  ASSERT(FreeListNode::IsFreeListNode(HeapObject::FromAddress(addr)));
-
-  Isolate* isolate = heap->isolate();
-  int length = 0;
-  StackTraceFrameIterator it(isolate);
-  while (!it.done() && length < kMaxAllocationTraceLength) {
-    JavaScriptFrame* frame = it.frame();
-    SharedFunctionInfo* shared = frame->function()->shared();
-    SnapshotObjectId id = ids_->FindEntry(shared->address());
-    allocation_trace_buffer_[length++] = id;
-    AddFunctionInfo(shared, id);
-    it.Advance();
-  }
-  AllocationTraceNode* top_node = trace_tree_.AddPathFromEnd(
-      Vector<SnapshotObjectId>(allocation_trace_buffer_, length));
-  top_node->AddAllocation(size);
-}
-
-
-static uint32_t SnapshotObjectIdHash(SnapshotObjectId id) {
-  return ComputeIntegerHash(static_cast<uint32_t>(id),
-                            v8::internal::kZeroHashSeed);
-}
-
-
-AllocationTracker::FunctionInfo* AllocationTracker::GetFunctionInfo(
-    SnapshotObjectId id) {
-  HashMap::Entry* entry = id_to_function_info_.Lookup(
-      reinterpret_cast<void*>(id), SnapshotObjectIdHash(id), false);
-  if (entry == NULL) {
-    return NULL;
-  }
-  return reinterpret_cast<FunctionInfo*>(entry->value);
-}
-
-
-void AllocationTracker::AddFunctionInfo(SharedFunctionInfo* shared,
-                                        SnapshotObjectId id) {
-  HashMap::Entry* entry = id_to_function_info_.Lookup(
-      reinterpret_cast<void*>(id), SnapshotObjectIdHash(id), true);
-  if (entry->value == NULL) {
-    FunctionInfo* info = new FunctionInfo();
-    info->name = names_->GetFunctionName(shared->DebugName());
-    if (shared->script()->IsScript()) {
-      Script* script = Script::cast(shared->script());
-      if (script->name()->IsName()) {
-        Name* name = Name::cast(script->name());
-        info->script_name = names_->GetName(name);
-      }
-      info->script_id = script->id()->value();
-      // Converting start offset into line and column may cause heap
-      // allocations so we postpone them until snapshot serialization.
-      unresolved_locations_.Add(new UnresolvedLocation(
-          script,
-          shared->start_position(),
-          info));
-    }
-    entry->value = info;
-  }
-}
-
-
-AllocationTracker::UnresolvedLocation::UnresolvedLocation(
-    Script* script, int start, FunctionInfo* info)
-    : start_position_(start),
-      info_(info) {
-  script_ = Handle<Script>::cast(
-      script->GetIsolate()->global_handles()->Create(script));
-  GlobalHandles::MakeWeak(
-      reinterpret_cast<Object**>(script_.location()),
-      this, &HandleWeakScript);
-}
-
-
-AllocationTracker::UnresolvedLocation::~UnresolvedLocation() {
-  if (!script_.is_null()) {
-    script_->GetIsolate()->global_handles()->Destroy(
-        reinterpret_cast<Object**>(script_.location()));
-  }
-}
-
-
-void AllocationTracker::UnresolvedLocation::Resolve() {
-  if (script_.is_null()) return;
-  info_->line = GetScriptLineNumber(script_, start_position_);
-  info_->column = GetScriptColumnNumber(script_, start_position_);
-}
-
-
-void AllocationTracker::UnresolvedLocation::HandleWeakScript(
-    v8::Isolate* isolate,
-    v8::Persistent<v8::Value>* obj,
-    void* data) {
-  UnresolvedLocation* location = reinterpret_cast<UnresolvedLocation*>(data);
-  location->script_ = Handle<Script>::null();
-  obj->Dispose();
-}
-
-
-} }  // namespace v8::internal
diff --git a/src/allocation-tracker.h b/src/allocation-tracker.h
deleted file mode 100644
index 617cf90..0000000
--- a/src/allocation-tracker.h
+++ /dev/null
@@ -1,138 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_ALLOCATION_TRACKER_H_
-#define V8_ALLOCATION_TRACKER_H_
-
-namespace v8 {
-namespace internal {
-
-class HeapObjectsMap;
-
-class AllocationTraceTree;
-
-class AllocationTraceNode {
- public:
-  AllocationTraceNode(AllocationTraceTree* tree,
-                      SnapshotObjectId shared_function_info_id);
-  ~AllocationTraceNode();
-  AllocationTraceNode* FindChild(SnapshotObjectId shared_function_info_id);
-  AllocationTraceNode* FindOrAddChild(SnapshotObjectId shared_function_info_id);
-  void AddAllocation(unsigned size);
-
-  SnapshotObjectId function_id() const { return function_id_; }
-  unsigned allocation_size() const { return total_size_; }
-  unsigned allocation_count() const { return allocation_count_; }
-  unsigned id() const { return id_; }
-  Vector<AllocationTraceNode*> children() const { return children_.ToVector(); }
-
-  void Print(int indent, AllocationTracker* tracker);
-
- private:
-  AllocationTraceTree* tree_;
-  SnapshotObjectId function_id_;
-  unsigned total_size_;
-  unsigned allocation_count_;
-  unsigned id_;
-  List<AllocationTraceNode*> children_;
-
-  DISALLOW_COPY_AND_ASSIGN(AllocationTraceNode);
-};
-
-
-class AllocationTraceTree {
- public:
-  AllocationTraceTree();
-  ~AllocationTraceTree();
-  AllocationTraceNode* AddPathFromEnd(const Vector<SnapshotObjectId>& path);
-  AllocationTraceNode* root() { return &root_; }
-  unsigned next_node_id() { return next_node_id_++; }
-  void Print(AllocationTracker* tracker);
-
- private:
-  unsigned next_node_id_;
-  AllocationTraceNode root_;
-
-  DISALLOW_COPY_AND_ASSIGN(AllocationTraceTree);
-};
-
-
-class AllocationTracker {
- public:
-  struct FunctionInfo {
-    FunctionInfo();
-    const char* name;
-    const char* script_name;
-    int script_id;
-    int line;
-    int column;
-  };
-
-  AllocationTracker(HeapObjectsMap* ids, StringsStorage* names);
-  ~AllocationTracker();
-
-  void PrepareForSerialization();
-  void NewObjectEvent(Address addr, int size);
-
-  AllocationTraceTree* trace_tree() { return &trace_tree_; }
-  HashMap* id_to_function_info() { return &id_to_function_info_; }
-  FunctionInfo* GetFunctionInfo(SnapshotObjectId id);
-
- private:
-  void AddFunctionInfo(SharedFunctionInfo* info, SnapshotObjectId id);
-
-  class UnresolvedLocation {
-   public:
-    UnresolvedLocation(Script* script, int start, FunctionInfo* info);
-    ~UnresolvedLocation();
-    void Resolve();
-
-   private:
-    static void HandleWeakScript(v8::Isolate* isolate,
-                                 v8::Persistent<v8::Value>* obj,
-                                 void* data);
-    Handle<Script> script_;
-    int start_position_;
-    FunctionInfo* info_;
-  };
-  static void DeleteUnresolvedLocation(UnresolvedLocation** location);
-
-  static const int kMaxAllocationTraceLength = 64;
-  HeapObjectsMap* ids_;
-  StringsStorage* names_;
-  AllocationTraceTree trace_tree_;
-  SnapshotObjectId allocation_trace_buffer_[kMaxAllocationTraceLength];
-  HashMap id_to_function_info_;
-  List<UnresolvedLocation*> unresolved_locations_;
-
-  DISALLOW_COPY_AND_ASSIGN(AllocationTracker);
-};
-
-} }  // namespace v8::internal
-
-#endif  // V8_ALLOCATION_TRACKER_H_
-
diff --git a/src/api.cc b/src/api.cc
index 8a73877..b90e693 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -568,14 +568,20 @@
 
 bool SetResourceConstraints(ResourceConstraints* constraints) {
   i::Isolate* isolate = EnterIsolateIfNeeded();
+  return SetResourceConstraints(reinterpret_cast<Isolate*>(isolate),
+                                constraints);
+}
 
+
+bool SetResourceConstraints(Isolate* v8_isolate,
+                            ResourceConstraints* constraints) {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
   int young_space_size = constraints->max_young_space_size();
   int old_gen_size = constraints->max_old_space_size();
   int max_executable_size = constraints->max_executable_size();
   if (young_space_size != 0 || old_gen_size != 0 || max_executable_size != 0) {
     // After initialization it's too late to change Heap constraints.
-    // TODO(rmcilroy): fix this assert.
-    // ASSERT(!isolate->IsInitialized());
+    ASSERT(!isolate->IsInitialized());
     bool result = isolate->heap()->ConfigureHeap(young_space_size / 2,
                                                  old_gen_size,
                                                  max_executable_size);
@@ -3689,7 +3695,8 @@
   ENTER_V8(isolate);
   i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
-  return i::JSObject::GetIdentityHash(self);
+  return i::Handle<i::Smi>::cast(
+      i::JSReceiver::GetOrCreateIdentityHash(self))->value();
 }
 
 
diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h
index e3b39f4..dff8162 100644
--- a/src/arm/assembler-arm-inl.h
+++ b/src/arm/assembler-arm-inl.h
@@ -104,7 +104,7 @@
   ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
                               || rmode_ == EMBEDDED_OBJECT
                               || rmode_ == EXTERNAL_REFERENCE);
-  return reinterpret_cast<Address>(Assembler::target_pointer_address_at(pc_));
+  return Assembler::target_pointer_address_at(pc_);
 }
 
 
@@ -137,16 +137,6 @@
 }
 
 
-Object** RelocInfo::target_object_address() {
-  // Provide a "natural pointer" to the embedded object,
-  // which can be de-referenced during heap iteration.
-  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
-  reconstructed_obj_ptr_ =
-      reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
-  return &reconstructed_obj_ptr_;
-}
-
-
 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   ASSERT(!target->IsConsString());
@@ -160,10 +150,9 @@
 }
 
 
-Address* RelocInfo::target_reference_address() {
+Address RelocInfo::target_reference() {
   ASSERT(rmode_ == EXTERNAL_REFERENCE);
-  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
-  return &reconstructed_adr_ptr_;
+  return Assembler::target_address_at(pc_);
 }
 
 
@@ -269,6 +258,15 @@
 }
 
 
+void RelocInfo::WipeOut() {
+  ASSERT(IsEmbeddedObject(rmode_) ||
+         IsCodeTarget(rmode_) ||
+         IsRuntimeEntry(rmode_) ||
+         IsExternalReference(rmode_));
+  Assembler::set_target_pointer_at(pc_, NULL);
+}
+
+
 bool RelocInfo::IsPatchedReturnSequence() {
   Instr current_instr = Assembler::instr_at(pc_);
   Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
@@ -394,29 +392,8 @@
 
 
 Address Assembler::target_pointer_address_at(Address pc) {
-  Address target_pc = pc;
-  Instr instr = Memory::int32_at(target_pc);
-  // If we have a bx instruction, the instruction before the bx is
-  // what we need to patch.
-  static const int32_t kBxInstMask = 0x0ffffff0;
-  static const int32_t kBxInstPattern = 0x012fff10;
-  if ((instr & kBxInstMask) == kBxInstPattern) {
-    target_pc -= kInstrSize;
-    instr = Memory::int32_at(target_pc);
-  }
-
-  // With a blx instruction, the instruction before is what needs to be patched.
-  if ((instr & kBlxRegMask) == kBlxRegPattern) {
-    target_pc -= kInstrSize;
-    instr = Memory::int32_at(target_pc);
-  }
-
-  ASSERT(IsLdrPcImmediateOffset(instr));
-  int offset = instr & 0xfff;  // offset_12 is unsigned
-  if ((instr & (1 << 23)) == 0) offset = -offset;  // U bit defines offset sign
-  // Verify that the constant pool comes after the instruction referencing it.
-  ASSERT(offset >= -4);
-  return target_pc + offset + 8;
+  Instr instr = Memory::int32_at(pc);
+  return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
 }
 
 
@@ -429,6 +406,7 @@
         (next_instr->ImmedMovwMovtValue() << 16) |
         instr->ImmedMovwMovtValue());
   }
+  ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
   return Memory::Address_at(target_pointer_address_at(pc));
 }
 
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 05b25ae..b8b1f49 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -3184,6 +3184,14 @@
 }
 
 
+void Assembler::emit_code_stub_address(Code* stub) {
+  CheckBuffer();
+  *reinterpret_cast<uint32_t*>(pc_) =
+      reinterpret_cast<uint32_t>(stub->instruction_start());
+  pc_ += sizeof(uint32_t);
+}
+
+
 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data,
                                 UseConstantPoolMode mode) {
   // We do not try to reuse pool constants.
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 8caa64d..0437b3f 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -1393,6 +1393,9 @@
   void db(uint8_t data);
   void dd(uint32_t data);
 
+  // Emits the address of the code stub's first instruction.
+  void emit_code_stub_address(Code* stub);
+
   PositionsRecorder* positions_recorder() { return &positions_recorder_; }
 
   // Read/patch instructions
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 60f5290..ae50d79 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -294,10 +294,8 @@
   FrameScope scope(masm, StackFrame::INTERNAL);
   // Push a copy of the function onto the stack.
   __ push(r1);
-  // Push call kind information.
-  __ push(r5);
-  // Function is also the parameter to the runtime call.
-  __ push(r1);
+  // Push call kind information and function as parameter to the runtime call.
+  __ Push(r5, r1);
 
   __ CallRuntime(function_id, 1);
   // Restore call kind information.
@@ -406,9 +404,9 @@
         __ strb(r4, constructor_count);
         __ b(ne, &allocate);
 
-        __ Push(r1, r2);
+        __ push(r1);
 
-        __ push(r1);  // constructor
+        __ Push(r2, r1);  // r1 = constructor
         // The call will replace the stub, so the countdown is only done once.
         __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
 
@@ -1208,8 +1206,7 @@
 
     // Out of stack space.
     __ ldr(r1, MemOperand(fp, kFunctionOffset));
-    __ push(r1);
-    __ push(r0);
+    __ Push(r1, r0);
     __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
     // End of stack check.
 
@@ -1291,8 +1288,7 @@
     // r0: current argument index
     __ bind(&loop);
     __ ldr(r1, MemOperand(fp, kArgsOffset));
-    __ push(r1);
-    __ push(r0);
+    __ Push(r1, r0);
 
     // Call the runtime to access the property in the arguments array.
     __ CallRuntime(Runtime::kGetProperty, 2);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 9330eb1..95a04f7 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -77,7 +77,7 @@
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
   descriptor->deoptimization_handler_ =
-      Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
+      Runtime::FunctionForId(Runtime::kCreateArrayLiteral)->entry;
 }
 
 
@@ -3344,16 +3344,12 @@
 
     // Arguments register must be smi-tagged to call out.
     __ SmiTag(r0);
-    __ push(r0);
-    __ push(r1);
-    __ push(r2);
+    __ Push(r2, r1, r0);
 
     CreateAllocationSiteStub create_stub;
     __ CallStub(&create_stub);
 
-    __ pop(r2);
-    __ pop(r1);
-    __ pop(r0);
+    __ Pop(r2, r1, r0);
     __ SmiUntag(r0);
   }
   __ b(&done);
@@ -5021,8 +5017,7 @@
 
     FrameScope scope(masm, StackFrame::INTERNAL);
     __ Push(r1, r0);
-    __ push(lr);
-    __ Push(r1, r0);
+    __ Push(lr, r1, r0);
     __ mov(ip, Operand(Smi::FromInt(op_)));
     __ push(ip);
     __ CallExternalReference(miss, 3);
@@ -5030,8 +5025,7 @@
     __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
     // Restore registers.
     __ pop(lr);
-    __ pop(r0);
-    __ pop(r1);
+    __ Pop(r1, r0);
   }
 
   __ Jump(r2);
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 44c331b..85cb610 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -896,7 +896,7 @@
     CodePatcher patcher(sequence, young_length / Assembler::kInstrSize);
     patcher.masm()->add(r0, pc, Operand(-8));
     patcher.masm()->ldr(pc, MemOperand(pc, -4));
-    patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+    patcher.masm()->emit_code_stub_address(stub);
   }
 }
 
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index c57c785..1ee612b 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -168,9 +168,20 @@
     // Generators allocate locals, if any, in context slots.
     ASSERT(!info->function()->is_generator() || locals_count == 0);
     if (locals_count > 0) {
-      __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
-      for (int i = 0; i < locals_count; i++) {
-        __ push(ip);
+      // Emit a loop to initialize stack cells for locals when optimizing for
+      // size. Otherwise, unroll the loop for maximum performance.
+      __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
+      if (FLAG_optimize_for_size && locals_count > 4) {
+        Label loop;
+        __ mov(r2, Operand(locals_count));
+        __ bind(&loop);
+        __ sub(r2, r2, Operand(1), SetCC);
+        __ push(r9);
+        __ b(&loop, ne);
+      } else {
+        for (int i = 0; i < locals_count; i++) {
+          __ push(r9);
+        }
       }
     }
   }
@@ -613,12 +624,11 @@
   Label done;
   __ bind(materialize_true);
   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
-  __ push(ip);
   __ jmp(&done);
   __ bind(materialize_false);
   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
-  __ push(ip);
   __ bind(&done);
+  __ push(ip);
 }
 
 
@@ -1597,9 +1607,8 @@
   __ jmp(&allocated);
 
   __ bind(&runtime_allocate);
-  __ push(r5);
   __ mov(r0, Operand(Smi::FromInt(size)));
-  __ push(r0);
+  __ Push(r5, r0);
   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   __ pop(r5);
 
@@ -1781,13 +1790,11 @@
     __ CallStub(&stub);
     __ IncrementCounter(
         isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
-  } else if (expr->depth() > 1) {
+  } else if (expr->depth() > 1 ||
+             Serializer::enabled() ||
+             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ Push(r3, r2, r1);
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
-  } else if (Serializer::enabled() ||
-      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
-    __ Push(r3, r2, r1);
-    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
            FLAG_smi_only_arrays);
@@ -2036,8 +2043,7 @@
       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
       __ LoadRoot(r2, Heap::kthrow_stringRootIndex);     // "throw"
       __ ldr(r3, MemOperand(sp, 1 * kPointerSize));      // iter
-      __ push(r3);                                       // iter
-      __ push(r0);                                       // exception
+      __ Push(r3, r0);                                   // iter, exception
       __ jmp(&l_call);
 
       // try { received = %yield result }
@@ -2073,8 +2079,7 @@
       __ bind(&l_next);
       __ LoadRoot(r2, Heap::knext_stringRootIndex);      // "next"
       __ ldr(r3, MemOperand(sp, 1 * kPointerSize));      // iter
-      __ push(r3);                                       // iter
-      __ push(r0);                                       // received
+      __ Push(r3, r0);                                   // iter, received
 
       // result = receiver[f](arg);
       __ bind(&l_call);
@@ -2150,11 +2155,13 @@
   __ bl(&resume_frame);
   __ jmp(&done);
   __ bind(&resume_frame);
-  __ push(lr);  // Return address.
-  __ push(fp);  // Caller's frame pointer.
-  __ mov(fp, sp);
-  __ push(cp);  // Callee's context.
-  __ push(r4);  // Callee's JS Function.
+  // lr = return address.
+  // fp = caller's frame pointer.
+  // cp = callee's context,
+  // r4 = callee's JS function.
+  __ Push(lr, fp, cp, r4);
+  // Adjust FP to point to saved FP.
+  __ add(fp, sp, Operand(2 * kPointerSize));
 
   // Load the operand stack size.
   __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
@@ -2186,8 +2193,8 @@
   __ push(r2);
   __ b(&push_operand_holes);
   __ bind(&call_resume);
-  __ push(r1);
-  __ push(result_register());
+  ASSERT(!result_register().is(r1));
+  __ Push(r1, result_register());
   __ Push(Smi::FromInt(resume_mode));
   __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
   // Not reached: the runtime call returns elsewhere.
@@ -2409,8 +2416,7 @@
       VisitForStackValue(prop->obj());
       VisitForAccumulatorValue(prop->key());
       __ mov(r1, r0);
-      __ pop(r2);
-      __ pop(r0);  // Restore value.
+      __ Pop(r0, r2);  // r0 = restored value.
       Handle<Code> ic = is_classic_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize()
           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
@@ -2544,8 +2550,7 @@
 
   // Record source code position before IC call.
   SetSourcePosition(expr->position());
-  __ pop(r1);  // Key.
-  __ pop(r2);
+  __ Pop(r2, r1);  // r1 = key.
 
   Handle<Code> ic = is_classic_mode()
       ? isolate()->builtins()->KeyedStoreIC_Initialize()
@@ -2674,27 +2679,25 @@
 
 
 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
-  // Push copy of the first argument or undefined if it doesn't exist.
+  // r4: copy of the first argument or undefined if it doesn't exist.
   if (arg_count > 0) {
-    __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
+    __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
   } else {
-    __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
+    __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
   }
-  __ push(r1);
 
-  // Push the receiver of the enclosing function.
+  // r3: the receiver of the enclosing function.
   int receiver_offset = 2 + info_->scope()->num_parameters();
-  __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
-  __ push(r1);
-  // Push the language mode.
-  __ mov(r1, Operand(Smi::FromInt(language_mode())));
-  __ push(r1);
+  __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
 
-  // Push the start position of the scope the calls resides in.
+  // r2: the language mode.
+  __ mov(r2, Operand(Smi::FromInt(language_mode())));
+
+  // r1: the start position of the scope the calls resides in.
   __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
-  __ push(r1);
 
   // Do the runtime call.
+  __ Push(r4, r3, r2, r1);
   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
 }
 
@@ -2768,9 +2771,9 @@
     __ bind(&slow);
     // Call the runtime to find the function to call (returned in r0)
     // and the object holding it (returned in edx).
-    __ push(context_register());
+    ASSERT(!context_register().is(r2));
     __ mov(r2, Operand(proxy->name()));
-    __ push(r2);
+    __ Push(context_register(), r2);
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
     __ Push(r0, r1);  // Function, receiver.
 
@@ -3487,8 +3490,7 @@
 
   VisitForStackValue(args->at(1));  // index
   VisitForStackValue(args->at(2));  // value
-  __ pop(value);
-  __ pop(index);
+  __ Pop(index, value);
   VisitForAccumulatorValue(args->at(0));  // string
 
   if (FLAG_debug_code) {
@@ -3515,8 +3517,7 @@
 
   VisitForStackValue(args->at(1));  // index
   VisitForStackValue(args->at(2));  // value
-  __ pop(value);
-  __ pop(index);
+  __ Pop(index, value);
   VisitForAccumulatorValue(args->at(0));  // string
 
   if (FLAG_debug_code) {
@@ -4260,9 +4261,9 @@
         } else {
           // Non-global variable.  Call the runtime to try to delete from the
           // context where the variable was introduced.
-          __ push(context_register());
+          ASSERT(!context_register().is(r2));
           __ mov(r2, Operand(var->name()));
-          __ push(r2);
+          __ Push(context_register(), r2);
           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
           context()->Plug(r0);
         }
@@ -4492,8 +4493,7 @@
       break;
     }
     case KEYED_PROPERTY: {
-      __ pop(r1);  // Key.
-      __ pop(r2);  // Receiver.
+      __ Pop(r2, r1);  // r1 = key. r2 = receiver.
       Handle<Code> ic = is_classic_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize()
           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index aded4c1..4019461 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -577,8 +577,8 @@
   __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
-    __ push(r2);  // save the key
-    __ Push(r1, r2);  // pass the receiver and the key
+    __ Push(r2, r1);  // save the key and the receiver
+    __ push(r2);  // pass the receiver and the key
     __ CallRuntime(Runtime::kKeyedGetProperty, 2);
     __ pop(r2);  // restore the key
   }
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index d8771cb..9c21b81 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -932,7 +932,7 @@
       Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
       add(r0, pc, Operand(-8));
       ldr(pc, MemOperand(pc, -4));
-      dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+      emit_code_stub_address(stub);
     } else {
       stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
       nop(ip.code());
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 004e067..923011f 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -121,18 +121,14 @@
 }
 
 
-// Helper function used to check that the dictionary doesn't contain
-// the property. This function may return false negatives, so miss_label
-// must always call a backup property check that is complete.
-// This function is safe to call if the receiver has fast properties.
-// Name must be unique and receiver must be a heap object.
-static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
-                                             Label* miss_label,
-                                             Register receiver,
-                                             Handle<Name> name,
-                                             Register scratch0,
-                                             Register scratch1) {
+void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                                    Label* miss_label,
+                                                    Register receiver,
+                                                    Handle<Name> name,
+                                                    Register scratch0,
+                                                    Register scratch1) {
   ASSERT(name->IsUniqueName());
+  ASSERT(!receiver.is(scratch0));
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
@@ -418,12 +414,12 @@
 // Generate code to check that a global property cell is empty. Create
 // the property cell at compilation time if no cell exists for the
 // property.
-static void GenerateCheckPropertyCell(MacroAssembler* masm,
-                                      Handle<GlobalObject> global,
-                                      Handle<Name> name,
-                                      Register scratch,
-                                      Label* miss) {
-  Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
+void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
+                                             Handle<JSGlobalObject> global,
+                                             Handle<Name> name,
+                                             Register scratch,
+                                             Label* miss) {
+  Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
   ASSERT(cell->value()->IsTheHole());
   __ mov(scratch, Operand(cell));
   __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
@@ -441,7 +437,7 @@
     Label* miss) {
   if (holder->IsJSGlobalObject()) {
     GenerateCheckPropertyCell(
-        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
+        masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
     GenerateDictionaryNegativeLookup(
         masm, miss, holder_reg, name, scratch1(), scratch2());
@@ -1156,19 +1152,17 @@
 };
 
 
-// Calls GenerateCheckPropertyCell for each global object in the prototype chain
-// from object to (but not including) holder.
-static void GenerateCheckPropertyCells(MacroAssembler* masm,
-                                       Handle<JSObject> object,
-                                       Handle<JSObject> holder,
-                                       Handle<Name> name,
-                                       Register scratch,
-                                       Label* miss) {
+void StubCompiler::GenerateCheckPropertyCells(MacroAssembler* masm,
+                                              Handle<JSObject> object,
+                                              Handle<JSObject> holder,
+                                              Handle<Name> name,
+                                              Register scratch,
+                                              Label* miss) {
   Handle<JSObject> current = object;
   while (!current.is_identical_to(holder)) {
-    if (current->IsGlobalObject()) {
+    if (current->IsJSGlobalObject()) {
       GenerateCheckPropertyCell(masm,
-                                Handle<GlobalObject>::cast(current),
+                                Handle<JSGlobalObject>::cast(current),
                                 name,
                                 scratch,
                                 miss);
@@ -1373,26 +1367,6 @@
 }
 
 
-void LoadStubCompiler::NonexistentHandlerFrontend(
-    Handle<JSObject> object,
-    Handle<JSObject> last,
-    Handle<Name> name,
-    Label* success,
-    Handle<GlobalObject> global) {
-  Label miss;
-
-  HandlerFrontendHeader(object, receiver(), last, name, &miss);
-
-  // If the last object in the prototype chain is a global object,
-  // check that the global property cell is empty.
-  if (!global.is_null()) {
-    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
-  }
-
-  HandlerFrontendFooter(name, success, &miss);
-}
-
-
 void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Handle<JSObject> holder,
                                          PropertyIndex field,
@@ -2939,7 +2913,7 @@
     Handle<JSObject> object,
     Handle<JSObject> last,
     Handle<Name> name,
-    Handle<GlobalObject> global) {
+    Handle<JSGlobalObject> global) {
   Label success;
 
   NonexistentHandlerFrontend(object, last, name, &success, global);
diff --git a/src/assembler.cc b/src/assembler.cc
index 9ed4360..0e05c2c 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -819,8 +819,8 @@
   } else if (rmode_ == EXTERNAL_REFERENCE) {
     ExternalReferenceEncoder ref_encoder(isolate);
     PrintF(out, " (%s)  (%p)",
-           ref_encoder.NameOfAddress(*target_reference_address()),
-           *target_reference_address());
+           ref_encoder.NameOfAddress(target_reference()),
+           target_reference());
   } else if (IsCodeTarget(rmode_)) {
     Code* code = Code::GetCodeFromTargetAddress(target_address());
     PrintF(out, " (%s)  (%p)", Code::Kind2String(code->kind()),
diff --git a/src/assembler.h b/src/assembler.h
index f0b7fed..ee51f23 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -390,7 +390,6 @@
                                  WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
   INLINE(Object* target_object());
   INLINE(Handle<Object> target_object_handle(Assembler* origin));
-  INLINE(Object** target_object_address());
   INLINE(void set_target_object(Object* target,
                                 WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
   INLINE(Address target_runtime_entry(Assembler* origin));
@@ -425,7 +424,7 @@
 
   // Read/modify the reference in the instruction this relocation
   // applies to; can only be called if rmode_ is external_reference
-  INLINE(Address* target_reference_address());
+  INLINE(Address target_reference());
 
   // Read/modify the address of a call instruction. This is used to relocate
   // the break points where straight-line code is patched with a call
@@ -436,6 +435,10 @@
   INLINE(void set_call_object(Object* target));
   INLINE(Object** call_object_address());
 
+  // Wipe out a relocation to a fixed value, used for making snapshots
+  // reproducible.
+  INLINE(void WipeOut());
+
   template<typename StaticVisitor> inline void Visit(Heap* heap);
   inline void Visit(Isolate* isolate, ObjectVisitor* v);
 
@@ -486,12 +489,6 @@
     double data64_;
   };
   Code* host_;
-  // Code and Embedded Object pointers on some platforms are stored split
-  // across two consecutive 32-bit instructions. Heap management
-  // routines expect to access these pointers indirectly. The following
-  // location provides a place for these pointers to exist naturally
-  // when accessed via the Iterator.
-  Object* reconstructed_obj_ptr_;
   // External-reference pointers are also split across instruction-pairs
   // on some platforms, but are accessed via indirect pointers. This location
   // provides a place for that pointer to exist naturally. Its address
diff --git a/src/builtins.h b/src/builtins.h
index 9b589d8..4f85c38 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -50,7 +50,9 @@
 #define CODE_AGE_LIST(V) \
   CODE_AGE_LIST_WITH_ARG(CODE_AGE_LIST_IGNORE_ARG, V)
 
-#define CODE_AGE_LIST_WITH_NO_AGE(V)               \
+#define CODE_AGE_LIST_COMPLETE(V)                  \
+  V(NotExecuted)                                   \
+  V(ExecutedOnce)                                  \
   V(NoAge)                                         \
   CODE_AGE_LIST_WITH_ARG(CODE_AGE_LIST_IGNORE_ARG, V)
 
@@ -364,6 +366,11 @@
   }
 
   static const char* GetName(JavaScript id) { return javascript_names_[id]; }
+  const char* name(int index) {
+    ASSERT(index >= 0);
+    ASSERT(index < builtin_count);
+    return names_[index];
+  }
   static int GetArgumentsCount(JavaScript id) { return javascript_argc_[id]; }
   Handle<Code> GetCode(JavaScript id, bool* resolved);
   static int NumberOfJavaScriptBuiltins() { return id_count; }
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index 84e80b9..96871d6 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -1620,10 +1620,10 @@
         Handle<Map>::cast(MaterializeNextValue()), Representation::Tagged());
     switch (map->instance_type()) {
       case HEAP_NUMBER_TYPE: {
-        Handle<HeapNumber> object = isolate_->factory()->NewHeapNumber(0.0);
+        // Reuse the HeapNumber value directly as it is already properly
+        // tagged and skip materializing the HeapNumber explicitly.
+        Handle<Object> object = MaterializeNextValue();
         materialized_objects_->Add(object);
-        Handle<Object> number = MaterializeNextValue();
-        object->set_value(number->Number());
         materialization_value_index_ += kDoubleSize / kPointerSize - 1;
         break;
       }
diff --git a/src/disassembler.cc b/src/disassembler.cc
index d7898dd..69737ed 100644
--- a/src/disassembler.cc
+++ b/src/disassembler.cc
@@ -227,7 +227,7 @@
         out.AddFormatted("    ;; object: %s", *obj_name);
       } else if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
         const char* reference_name =
-            ref_encoder.NameOfAddress(*relocinfo.target_reference_address());
+            ref_encoder.NameOfAddress(relocinfo.target_reference());
         out.AddFormatted("    ;; external reference (%s)", reference_name);
       } else if (RelocInfo::IsCodeTarget(rmode)) {
         out.AddFormatted("    ;; code:");
diff --git a/src/factory.cc b/src/factory.cc
index 1dd246f..cccc3e7 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -132,11 +132,14 @@
 }
 
 
-Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) {
+Handle<ObjectHashTable> Factory::NewObjectHashTable(
+    int at_least_space_for,
+    MinimumCapacity capacity_option) {
   ASSERT(0 <= at_least_space_for);
   CALL_HEAP_FUNCTION(isolate(),
                      ObjectHashTable::Allocate(isolate()->heap(),
-                                               at_least_space_for),
+                                               at_least_space_for,
+                                               capacity_option),
                      ObjectHashTable);
 }
 
@@ -147,7 +150,7 @@
       isolate(),
       WeakHashTable::Allocate(isolate()->heap(),
                               at_least_space_for,
-                              WeakHashTable::USE_DEFAULT_MINIMUM_CAPACITY,
+                              USE_DEFAULT_MINIMUM_CAPACITY,
                               TENURED),
       WeakHashTable);
 }
@@ -573,10 +576,32 @@
 
 
 Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
-  CALL_HEAP_FUNCTION(
-      isolate(),
-      isolate()->heap()->AllocateFunctionPrototype(*function),
-      JSObject);
+  // Make sure to use globals from the function's context, since the function
+  // can be from a different context.
+  Handle<Context> native_context(function->context()->native_context());
+  Handle<Map> new_map;
+  if (function->shared()->is_generator()) {
+    // Generator prototypes can share maps since they don't have "constructor"
+    // properties.
+    new_map = handle(native_context->generator_object_prototype_map());
+  } else {
+    // Each function prototype gets a fresh map to avoid unwanted sharing of
+    // maps between prototypes of different constructors.
+    Handle<JSFunction> object_function(native_context->object_function());
+    ASSERT(object_function->has_initial_map());
+    new_map = Map::Copy(handle(object_function->initial_map()));
+  }
+
+  Handle<JSObject> prototype = NewJSObjectFromMap(new_map);
+
+  if (!function->shared()->is_generator()) {
+    JSObject::SetLocalPropertyIgnoreAttributes(prototype,
+                                               constructor_string(),
+                                               function,
+                                               DONT_ENUM);
+  }
+
+  return prototype;
 }
 
 
@@ -1047,6 +1072,7 @@
 
 Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
                                       PretenureFlag pretenure) {
+  JSFunction::EnsureHasInitialMap(constructor);
   CALL_HEAP_FUNCTION(
       isolate(),
       isolate()->heap()->AllocateJSObject(*constructor, pretenure), JSObject);
@@ -1193,6 +1219,19 @@
 }
 
 
+Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
+    Handle<JSFunction> function) {
+  ASSERT(function->shared()->is_generator());
+  JSFunction::EnsureHasInitialMap(function);
+  Handle<Map> map(function->initial_map());
+  ASSERT(map->instance_type() == JS_GENERATOR_OBJECT_TYPE);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSObjectFromMap(*map),
+      JSGeneratorObject);
+}
+
+
 Handle<JSArrayBuffer> Factory::NewJSArrayBuffer() {
   Handle<JSFunction> array_buffer_fun(
       isolate()->context()->native_context()->array_buffer_fun());
diff --git a/src/factory.h b/src/factory.h
index ee25bf2..2b4ae7b 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -74,7 +74,9 @@
 
   Handle<ObjectHashSet> NewObjectHashSet(int at_least_space_for);
 
-  Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for);
+  Handle<ObjectHashTable> NewObjectHashTable(
+      int at_least_space_for,
+      MinimumCapacity capacity_option = USE_DEFAULT_MINIMUM_CAPACITY);
 
   Handle<WeakHashTable> NewWeakHashTable(int at_least_space_for);
 
@@ -341,6 +343,8 @@
 
   void SetContent(Handle<JSArray> array, Handle<FixedArrayBase> elements);
 
+  Handle<JSGeneratorObject> NewJSGeneratorObject(Handle<JSFunction> function);
+
   Handle<JSArrayBuffer> NewJSArrayBuffer();
 
   Handle<JSTypedArray> NewJSTypedArray(ExternalArrayType type);
@@ -584,101 +588,6 @@
 }
 
 
-// Used to "safely" transition from pointer-based runtime code to Handle-based
-// runtime code. When a GC happens during the called Handle-based code, a
-// failure object is returned to the pointer-based code to cause it abort and
-// re-trigger a gc of it's own. Since this double-gc will cause the Handle-based
-// code to be called twice, it must be idempotent.
-class IdempotentPointerToHandleCodeTrampoline {
- public:
-  explicit IdempotentPointerToHandleCodeTrampoline(Isolate* isolate)
-      : isolate_(isolate) {}
-
-  template<typename R>
-  MUST_USE_RESULT MaybeObject* Call(R (*function)()) {
-    int collections = isolate_->heap()->gc_count();
-    (*function)();
-    return (collections == isolate_->heap()->gc_count())
-        ? isolate_->heap()->true_value()
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
-  template<typename R>
-  MUST_USE_RESULT MaybeObject* CallWithReturnValue(R (*function)()) {
-    int collections = isolate_->heap()->gc_count();
-    Object* result = (*function)();
-    return (collections == isolate_->heap()->gc_count())
-        ? result
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
-  template<typename R, typename P1>
-  MUST_USE_RESULT MaybeObject* Call(R (*function)(P1), P1 p1) {
-    int collections = isolate_->heap()->gc_count();
-    (*function)(p1);
-    return (collections == isolate_->heap()->gc_count())
-        ? isolate_->heap()->true_value()
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
-  template<typename R, typename P1>
-  MUST_USE_RESULT MaybeObject* CallWithReturnValue(
-      R (*function)(P1),
-      P1 p1) {
-    int collections = isolate_->heap()->gc_count();
-    Object* result = (*function)(p1);
-    return (collections == isolate_->heap()->gc_count())
-        ? result
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
-  template<typename R, typename P1, typename P2>
-  MUST_USE_RESULT MaybeObject* Call(
-      R (*function)(P1, P2),
-      P1 p1,
-      P2 p2) {
-    int collections = isolate_->heap()->gc_count();
-    (*function)(p1, p2);
-    return (collections == isolate_->heap()->gc_count())
-        ? isolate_->heap()->true_value()
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
-  template<typename R, typename P1, typename P2>
-  MUST_USE_RESULT MaybeObject* CallWithReturnValue(
-      R (*function)(P1, P2),
-      P1 p1,
-      P2 p2) {
-    int collections = isolate_->heap()->gc_count();
-    Object* result = (*function)(p1, p2);
-    return (collections == isolate_->heap()->gc_count())
-        ? result
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
-  template<typename R, typename P1, typename P2, typename P3, typename P4,
-           typename P5, typename P6, typename P7>
-  MUST_USE_RESULT MaybeObject* CallWithReturnValue(
-      R (*function)(P1, P2, P3, P4, P5, P6, P7),
-      P1 p1,
-      P2 p2,
-      P3 p3,
-      P4 p4,
-      P5 p5,
-      P6 p6,
-      P7 p7) {
-    int collections = isolate_->heap()->gc_count();
-    Handle<Object> result = (*function)(p1, p2, p3, p4, p5, p6, p7);
-    return (collections == isolate_->heap()->gc_count())
-        ? *result
-        : reinterpret_cast<MaybeObject*>(Failure::RetryAfterGC());
-  }
-
- private:
-  Isolate* isolate_;
-};
-
-
 } }  // namespace v8::internal
 
 #endif  // V8_FACTORY_H_
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 865413e..9228944 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -311,7 +311,7 @@
 DEFINE_bool(inline_arguments, true, "inline functions with arguments object")
 DEFINE_bool(inline_accessors, true, "inline JavaScript accessors")
 DEFINE_int(loop_weight, 1, "loop weight for representation inference")
-DEFINE_int(escape_analysis_iterations, 1,
+DEFINE_int(escape_analysis_iterations, 2,
            "maximum number of escape analysis fix-point iterations")
 
 DEFINE_bool(optimize_for_in, true,
@@ -502,6 +502,9 @@
             "do not print trace line after scavenger collection")
 DEFINE_bool(print_cumulative_gc_stat, false,
             "print cumulative GC statistics in name=value format on exit")
+DEFINE_bool(print_max_heap_committed, false,
+            "print statistics of the maximum memory committed for the heap "
+            "in name=value format on exit")
 DEFINE_bool(trace_gc_verbose, false,
             "print more details following each garbage collection")
 DEFINE_bool(trace_fragmentation, false,
diff --git a/src/handles.cc b/src/handles.cc
index 4cb1827..f3928eb 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -767,31 +767,6 @@
 }
 
 
-Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table,
-                                       Handle<Object> key) {
-  CALL_HEAP_FUNCTION(table->GetIsolate(),
-                     table->Add(*key),
-                     ObjectHashSet);
-}
-
-
-Handle<ObjectHashSet> ObjectHashSetRemove(Handle<ObjectHashSet> table,
-                                          Handle<Object> key) {
-  CALL_HEAP_FUNCTION(table->GetIsolate(),
-                     table->Remove(*key),
-                     ObjectHashSet);
-}
-
-
-Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
-                                               Handle<Object> key,
-                                               Handle<Object> value) {
-  CALL_HEAP_FUNCTION(table->GetIsolate(),
-                     table->Put(*key, *value),
-                     ObjectHashTable);
-}
-
-
 DeferredHandleScope::DeferredHandleScope(Isolate* isolate)
     : impl_(isolate->handle_scope_implementer()) {
   impl_->BeginDeferredScope();
diff --git a/src/handles.h b/src/handles.h
index cfdecac..890f4f5 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -303,16 +303,6 @@
     Handle<JSFunction> constructor,
     Handle<JSGlobalProxy> global);
 
-Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table,
-                                       Handle<Object> key);
-
-Handle<ObjectHashSet> ObjectHashSetRemove(Handle<ObjectHashSet> table,
-                                          Handle<Object> key);
-
-Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
-                                               Handle<Object> key,
-                                               Handle<Object> value);
-
 void AddWeakObjectToCodeDependency(Heap* heap,
                                    Handle<Object> object,
                                    Handle<Code> code);
diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc
index 10d113c..b3d905a 100644
--- a/src/heap-snapshot-generator.cc
+++ b/src/heap-snapshot-generator.cc
@@ -29,7 +29,7 @@
 
 #include "heap-snapshot-generator-inl.h"
 
-#include "allocation-tracker.h"
+#include "code-stubs.h"
 #include "heap-profiler.h"
 #include "debug.h"
 #include "types.h"
@@ -748,8 +748,7 @@
 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
     : is_tracking_objects_(false),
       names_(heap),
-      ids_(heap),
-      allocation_tracker_(NULL) {
+      ids_(heap) {
 }
 
 
@@ -759,29 +758,10 @@
 
 
 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
-  delete allocation_tracker_;
   snapshots_.Iterate(DeleteHeapSnapshot);
 }
 
 
-void HeapSnapshotsCollection::StartHeapObjectsTracking() {
-  ids_.UpdateHeapObjectsMap();
-  if (allocation_tracker_ == NULL) {
-    allocation_tracker_ = new AllocationTracker(&ids_, names());
-  }
-  is_tracking_objects_ = true;
-}
-
-
-void HeapSnapshotsCollection::StopHeapObjectsTracking() {
-  ids_.StopHeapObjectsTracking();
-  if (allocation_tracker_ != NULL) {
-    delete allocation_tracker_;
-    allocation_tracker_ = NULL;
-  }
-}
-
-
 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name,
                                                    unsigned uid) {
   is_tracking_objects_ = true;  // Start watching for heap objects moves.
@@ -825,15 +805,6 @@
 }
 
 
-void HeapSnapshotsCollection::NewObjectEvent(Address addr, int size) {
-  DisallowHeapAllocation no_allocation;
-  ids_.NewObject(addr, size);
-  if (allocation_tracker_ != NULL) {
-    allocation_tracker_->NewObjectEvent(addr, size);
-  }
-}
-
-
 size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
   size_t size = sizeof(*this);
   size += names_.GetUsedMemorySize();
@@ -1110,7 +1081,7 @@
   void VisitCodeEntry(Address entry_address) {
      Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
      generator_->SetInternalReference(parent_obj_, parent_, "code", code);
-     generator_->TagObject(code, "(code)");
+     generator_->TagCodeObject(code);
   }
   void VisitPointers(Object** start, Object** end) {
     for (Object** p = start; p < end; p++) {
@@ -1370,10 +1341,20 @@
 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
     int entry, SharedFunctionInfo* shared) {
   HeapObject* obj = shared;
+  StringsStorage* names = collection_->names();
+  String* shared_name = shared->DebugName();
+  const char* name = NULL;
+  if (shared_name != *heap_->isolate()->factory()->empty_string()) {
+    name = names->GetName(shared_name);
+    TagObject(shared->code(), names->GetFormatted("(code for %s)", name));
+  } else {
+    TagObject(shared->code(), names->GetFormatted("(%s code)",
+        Code::Kind2String(shared->code()->kind())));
+  }
+
   SetInternalReference(obj, entry,
                        "name", shared->name(),
                        SharedFunctionInfo::kNameOffset);
-  TagObject(shared->code(), "(code)");
   SetInternalReference(obj, entry,
                        "code", shared->code(),
                        SharedFunctionInfo::kCodeOffset);
@@ -1387,7 +1368,10 @@
   SetInternalReference(obj, entry,
                        "script", shared->script(),
                        SharedFunctionInfo::kScriptOffset);
-  TagObject(shared->construct_stub(), "(code)");
+  const char* construct_stub_name = name ?
+      names->GetFormatted("(construct stub code for %s)", name) :
+      "(construct stub code)";
+  TagObject(shared->construct_stub(), construct_stub_name);
   SetInternalReference(obj, entry,
                        "construct_stub", shared->construct_stub(),
                        SharedFunctionInfo::kConstructStubOffset);
@@ -1400,6 +1384,9 @@
   SetInternalReference(obj, entry,
                        "inferred_name", shared->inferred_name(),
                        SharedFunctionInfo::kInferredNameOffset);
+  SetInternalReference(obj, entry,
+                       "optimized_code_map", shared->optimized_code_map(),
+                       SharedFunctionInfo::kOptimizedCodeMapOffset);
   SetWeakReference(obj, entry,
                    1, shared->initial_map(),
                    SharedFunctionInfo::kInitialMapOffset);
@@ -1449,7 +1436,23 @@
 }
 
 
+void V8HeapExplorer::TagCodeObject(Code* code, const char* external_name) {
+  TagObject(code, collection_->names()->GetFormatted("(%s code)",
+      external_name));
+}
+
+
+void V8HeapExplorer::TagCodeObject(Code* code) {
+  if (code->kind() == Code::STUB) {
+    TagObject(code, collection_->names()->GetFormatted(
+        "(%s code)", CodeStub::MajorName(
+            static_cast<CodeStub::Major>(code->major_key()), true)));
+  }
+}
+
+
 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
+  TagCodeObject(code);
   TagObject(code->relocation_info(), "(code relocation info)");
   SetInternalReference(code, entry,
                        "relocation_info", code->relocation_info(),
@@ -1695,9 +1698,10 @@
   };
 
  public:
-  RootsReferencesExtractor()
+  explicit RootsReferencesExtractor(Heap* heap)
       : collecting_all_references_(false),
-        previous_reference_count_(0) {
+        previous_reference_count_(0),
+        heap_(heap) {
   }
 
   void VisitPointers(Object** start, Object** end) {
@@ -1712,22 +1716,30 @@
 
   void FillReferences(V8HeapExplorer* explorer) {
     ASSERT(strong_references_.length() <= all_references_.length());
+    Builtins* builtins = heap_->isolate()->builtins();
     for (int i = 0; i < reference_tags_.length(); ++i) {
       explorer->SetGcRootsReference(reference_tags_[i].tag);
     }
-    int strong_index = 0, all_index = 0, tags_index = 0;
+    int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
     while (all_index < all_references_.length()) {
       if (strong_index < strong_references_.length() &&
           strong_references_[strong_index] == all_references_[all_index]) {
         explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
                                         false,
-                                        all_references_[all_index++]);
+                                        all_references_[all_index]);
         ++strong_index;
       } else {
         explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
                                         true,
-                                        all_references_[all_index++]);
+                                        all_references_[all_index]);
       }
+      if (reference_tags_[tags_index].tag ==
+          VisitorSynchronization::kBuiltins) {
+        ASSERT(all_references_[all_index]->IsCode());
+        explorer->TagCodeObject(Code::cast(all_references_[all_index]),
+            builtins->name(builtin_index++));
+      }
+      ++all_index;
       if (reference_tags_[tags_index].index == all_index) ++tags_index;
     }
   }
@@ -1746,6 +1758,7 @@
   List<Object*> all_references_;
   int previous_reference_count_;
   List<IndexTag> reference_tags_;
+  Heap* heap_;
 };
 
 
@@ -1771,7 +1784,7 @@
   }
 
   SetRootGcRootsReference();
-  RootsReferencesExtractor extractor;
+  RootsReferencesExtractor extractor(heap_);
   heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
   extractor.SetCollectingAllReferences();
   heap_->IterateRoots(&extractor, VISIT_ALL);
@@ -2632,10 +2645,6 @@
 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
 
 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
-  if (AllocationTracker* allocation_tracker =
-      snapshot_->collection()->allocation_tracker()) {
-    allocation_tracker->PrepareForSerialization();
-  }
   ASSERT(writer_ == NULL);
   writer_ = new OutputStreamWriter(stream);
   SerializeImpl();
@@ -2659,16 +2668,6 @@
   SerializeEdges();
   if (writer_->aborted()) return;
   writer_->AddString("],\n");
-
-  writer_->AddString("\"trace_function_infos\":[");
-  SerializeTraceNodeInfos();
-  if (writer_->aborted()) return;
-  writer_->AddString("],\n");
-  writer_->AddString("\"trace_tree\":[");
-  SerializeTraceTree();
-  if (writer_->aborted()) return;
-  writer_->AddString("],\n");
-
   writer_->AddString("\"strings\":[");
   SerializeStrings();
   if (writer_->aborted()) return;
@@ -2829,20 +2828,7 @@
             JSON_S("shortcut") ","
             JSON_S("weak")) ","
         JSON_S("string_or_number") ","
-        JSON_S("node")) ","
-    JSON_S("trace_function_info_fields") ":" JSON_A(
-        JSON_S("function_id") ","
-        JSON_S("name") ","
-        JSON_S("script_name") ","
-        JSON_S("script_id") ","
-        JSON_S("line") ","
-        JSON_S("column")) ","
-    JSON_S("trace_node_fields") ":" JSON_A(
-        JSON_S("id") ","
-        JSON_S("function_id") ","
-        JSON_S("count") ","
-        JSON_S("size") ","
-        JSON_S("children"))));
+        JSON_S("node"))));
 #undef JSON_S
 #undef JSON_O
 #undef JSON_A
@@ -2850,13 +2836,6 @@
   writer_->AddNumber(snapshot_->entries().length());
   writer_->AddString(",\"edge_count\":");
   writer_->AddNumber(snapshot_->edges().length());
-  writer_->AddString(",\"trace_function_count\":");
-  uint32_t count = 0;
-  AllocationTracker* tracker = snapshot_->collection()->allocation_tracker();
-  if (tracker) {
-    count = tracker->id_to_function_info()->occupancy();
-  }
-  writer_->AddNumber(count);
 }
 
 
@@ -2870,100 +2849,6 @@
 }
 
 
-void HeapSnapshotJSONSerializer::SerializeTraceTree() {
-  AllocationTracker* tracker = snapshot_->collection()->allocation_tracker();
-  if (!tracker) return;
-  AllocationTraceTree* traces = tracker->trace_tree();
-  SerializeTraceNode(traces->root());
-}
-
-
-void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
-  // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
-  const int kBufferSize =
-      4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
-      + 4 + 1 + 1;
-  EmbeddedVector<char, kBufferSize> buffer;
-  int buffer_pos = 0;
-  buffer_pos = utoa(node->id(), buffer, buffer_pos);
-  buffer[buffer_pos++] = ',';
-  buffer_pos = utoa(node->function_id(), buffer, buffer_pos);
-  buffer[buffer_pos++] = ',';
-  buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
-  buffer[buffer_pos++] = ',';
-  buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
-  buffer[buffer_pos++] = ',';
-  buffer[buffer_pos++] = '[';
-  buffer[buffer_pos++] = '\0';
-  writer_->AddString(buffer.start());
-
-  Vector<AllocationTraceNode*> children = node->children();
-  for (int i = 0; i < children.length(); i++) {
-    if (i > 0) {
-      writer_->AddCharacter(',');
-    }
-    SerializeTraceNode(children[i]);
-  }
-  writer_->AddCharacter(']');
-}
-
-
-// 0-based position is converted to 1-based during the serialization.
-static int SerializePosition(int position, const Vector<char>& buffer,
-                             int buffer_pos) {
-  if (position == -1) {
-    buffer[buffer_pos++] = '0';
-  } else {
-    ASSERT(position >= 0);
-    buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
-  }
-  return buffer_pos;
-}
-
-
-void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
-  AllocationTracker* tracker = snapshot_->collection()->allocation_tracker();
-  if (!tracker) return;
-  // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
-  const int kBufferSize =
-      6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
-      + 6 + 1 + 1;
-  EmbeddedVector<char, kBufferSize> buffer;
-  HashMap* id_to_function_info = tracker->id_to_function_info();
-  bool first_entry = true;
-  for (HashMap::Entry* p = id_to_function_info->Start();
-       p != NULL;
-       p = id_to_function_info->Next(p)) {
-    SnapshotObjectId id =
-        static_cast<SnapshotObjectId>(reinterpret_cast<intptr_t>(p->key));
-    AllocationTracker::FunctionInfo* info =
-        reinterpret_cast<AllocationTracker::FunctionInfo* >(p->value);
-    int buffer_pos = 0;
-    if (first_entry) {
-      first_entry = false;
-    } else {
-      buffer[buffer_pos++] = ',';
-    }
-    buffer_pos = utoa(id, buffer, buffer_pos);
-    buffer[buffer_pos++] = ',';
-    buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
-    buffer[buffer_pos++] = ',';
-    buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
-    buffer[buffer_pos++] = ',';
-    // The cast is safe because script id is a non-negative Smi.
-    buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
-        buffer_pos);
-    buffer[buffer_pos++] = ',';
-    buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
-    buffer[buffer_pos++] = ',';
-    buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
-    buffer[buffer_pos++] = '\n';
-    buffer[buffer_pos++] = '\0';
-    writer_->AddString(buffer.start());
-  }
-}
-
-
 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
   writer_->AddCharacter('\n');
   writer_->AddCharacter('\"');
diff --git a/src/heap-snapshot-generator.h b/src/heap-snapshot-generator.h
index e4038b1..f78db55 100644
--- a/src/heap-snapshot-generator.h
+++ b/src/heap-snapshot-generator.h
@@ -33,8 +33,6 @@
 namespace v8 {
 namespace internal {
 
-class AllocationTracker;
-class AllocationTraceNode;
 class HeapEntry;
 class HeapSnapshot;
 
@@ -298,8 +296,8 @@
   SnapshotObjectId PushHeapObjectsStats(OutputStream* stream) {
     return ids_.PushHeapObjectsStats(stream);
   }
-  void StartHeapObjectsTracking();
-  void StopHeapObjectsTracking();
+  void StartHeapObjectsTracking() { is_tracking_objects_ = true; }
+  void StopHeapObjectsTracking() { ids_.StopHeapObjectsTracking(); }
 
   HeapSnapshot* NewSnapshot(const char* name, unsigned uid);
   void SnapshotGenerationFinished(HeapSnapshot* snapshot);
@@ -307,7 +305,6 @@
   void RemoveSnapshot(HeapSnapshot* snapshot);
 
   StringsStorage* names() { return &names_; }
-  AllocationTracker* allocation_tracker() { return allocation_tracker_; }
 
   SnapshotObjectId FindObjectId(Address object_addr) {
     return ids_.FindEntry(object_addr);
@@ -319,7 +316,7 @@
   void ObjectMoveEvent(Address from, Address to, int size) {
     ids_.MoveObject(from, to, size);
   }
-  void NewObjectEvent(Address addr, int size);
+  void NewObjectEvent(Address addr, int size) { ids_.NewObject(addr, size); }
   void UpdateObjectSizeEvent(Address addr, int size) {
     ids_.UpdateObjectSize(addr, size);
   }
@@ -338,7 +335,6 @@
   StringsStorage names_;
   // Mapping from HeapObject addresses to objects' uids.
   HeapObjectsMap ids_;
-  AllocationTracker* allocation_tracker_;
 
   DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection);
 };
@@ -447,6 +443,8 @@
   int EstimateObjectsCount(HeapIterator* iterator);
   bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
   void TagGlobalObjects();
+  void TagCodeObject(Code* code);
+  void TagCodeObject(Code* code, const char* external_name);
 
   static String* GetConstructorName(JSObject* object);
 
@@ -677,9 +675,6 @@
   void SerializeNode(HeapEntry* entry);
   void SerializeNodes();
   void SerializeSnapshot();
-  void SerializeTraceTree();
-  void SerializeTraceNode(AllocationTraceNode* node);
-  void SerializeTraceNodeInfos();
   void SerializeString(const unsigned char* s);
   void SerializeStrings();
 
diff --git a/src/heap.cc b/src/heap.cc
index fa358c5..9e877ab 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -79,6 +79,7 @@
 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
 // Will be 4 * reserved_semispace_size_ to ensure that young
 // generation can be aligned to its size.
+      maximum_committed_(0),
       survived_since_last_expansion_(0),
       sweep_generation_(0),
       always_allocate_scope_depth_(0),
@@ -232,6 +233,16 @@
 }
 
 
+void Heap::UpdateMaximumCommitted() {
+  if (!HasBeenSetUp()) return;
+
+  intptr_t current_committed_memory = CommittedMemory();
+  if (current_committed_memory > maximum_committed_) {
+    maximum_committed_ = current_committed_memory;
+  }
+}
+
+
 intptr_t Heap::Available() {
   if (!HasBeenSetUp()) return 0;
 
@@ -441,6 +452,8 @@
 #endif
   }
 
+  UpdateMaximumCommitted();
+
 #ifdef DEBUG
   ASSERT(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
 
@@ -506,6 +519,8 @@
     }
   }
 
+  UpdateMaximumCommitted();
+
   isolate_->counters()->alive_after_last_gc()->Set(
       static_cast<int>(SizeOfObjects()));
 
@@ -567,6 +582,9 @@
                 property_cell_space()->CommittedMemory() / KB));
     isolate_->counters()->heap_sample_code_space_committed()->AddSample(
         static_cast<int>(code_space()->CommittedMemory() / KB));
+
+    isolate_->counters()->heap_sample_maximum_committed()->AddSample(
+        static_cast<int>(MaximumCommittedMemory() / KB));
   }
 
 #define UPDATE_COUNTERS_FOR_SPACE(space)                                       \
@@ -4194,7 +4212,7 @@
   }
   code->set_is_crankshafted(crankshafted);
   code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
-  code->InitializeTypeFeedbackInfoNoWriteBarrier(undefined_value());
+  code->set_raw_type_feedback_info(undefined_value());
   code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
   code->set_gc_metadata(Smi::FromInt(0));
   code->set_ic_age(global_ic_age_);
@@ -4370,39 +4388,6 @@
 }
 
 
-MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
-  // Make sure to use globals from the function's context, since the function
-  // can be from a different context.
-  Context* native_context = function->context()->native_context();
-  Map* new_map;
-  if (function->shared()->is_generator()) {
-    // Generator prototypes can share maps since they don't have "constructor"
-    // properties.
-    new_map = native_context->generator_object_prototype_map();
-  } else {
-    // Each function prototype gets a fresh map to avoid unwanted sharing of
-    // maps between prototypes of different constructors.
-    JSFunction* object_function = native_context->object_function();
-    ASSERT(object_function->has_initial_map());
-    MaybeObject* maybe_map = object_function->initial_map()->Copy();
-    if (!maybe_map->To(&new_map)) return maybe_map;
-  }
-
-  Object* prototype;
-  MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
-  if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
-
-  if (!function->shared()->is_generator()) {
-    MaybeObject* maybe_failure =
-        JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributesTrampoline(
-            constructor_string(), function, DONT_ENUM);
-    if (maybe_failure->IsFailure()) return maybe_failure;
-  }
-
-  return prototype;
-}
-
-
 MaybeObject* Heap::AllocateFunction(Map* function_map,
                                     SharedFunctionInfo* shared,
                                     Object* prototype,
@@ -4474,48 +4459,6 @@
 }
 
 
-MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
-  ASSERT(!fun->has_initial_map());
-
-  // First create a new map with the size and number of in-object properties
-  // suggested by the function.
-  InstanceType instance_type;
-  int instance_size;
-  int in_object_properties;
-  if (fun->shared()->is_generator()) {
-    instance_type = JS_GENERATOR_OBJECT_TYPE;
-    instance_size = JSGeneratorObject::kSize;
-    in_object_properties = 0;
-  } else {
-    instance_type = JS_OBJECT_TYPE;
-    instance_size = fun->shared()->CalculateInstanceSize();
-    in_object_properties = fun->shared()->CalculateInObjectProperties();
-  }
-  Map* map;
-  MaybeObject* maybe_map = AllocateMap(instance_type, instance_size);
-  if (!maybe_map->To(&map)) return maybe_map;
-
-  // Fetch or allocate prototype.
-  Object* prototype;
-  if (fun->has_instance_prototype()) {
-    prototype = fun->instance_prototype();
-  } else {
-    MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
-    if (!maybe_prototype->To(&prototype)) return maybe_prototype;
-  }
-  map->set_inobject_properties(in_object_properties);
-  map->set_unused_property_fields(in_object_properties);
-  map->set_prototype(prototype);
-  ASSERT(map->has_fast_object_elements());
-
-  if (!fun->shared()->is_generator()) {
-    fun->shared()->StartInobjectSlackTracking(map);
-  }
-
-  return map;
-}
-
-
 void Heap::InitializeJSObjectFromMap(JSObject* obj,
                                      FixedArray* properties,
                                      Map* map) {
@@ -4622,15 +4565,7 @@
 
 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
                                     PretenureFlag pretenure) {
-  // Allocate the initial map if absent.
-  if (!constructor->has_initial_map()) {
-    Object* initial_map;
-    { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
-      if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
-    }
-    constructor->set_initial_map(Map::cast(initial_map));
-    Map::cast(initial_map)->set_constructor(constructor);
-  }
+  ASSERT(constructor->has_initial_map());
   // Allocate the object based on the constructors initial map.
   MaybeObject* result = AllocateJSObjectFromMap(
       constructor->initial_map(), pretenure);
@@ -4645,15 +4580,7 @@
 
 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
     Handle<AllocationSite> allocation_site) {
-  // Allocate the initial map if absent.
-  if (!constructor->has_initial_map()) {
-    Object* initial_map;
-    { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
-      if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
-    }
-    constructor->set_initial_map(Map::cast(initial_map));
-    Map::cast(initial_map)->set_constructor(constructor);
-  }
+  ASSERT(constructor->has_initial_map());
   // Allocate the object based on the constructors initial map, or the payload
   // advice
   Map* initial_map = constructor->initial_map();
@@ -4685,23 +4612,6 @@
 }
 
 
-MaybeObject* Heap::AllocateJSGeneratorObject(JSFunction *function) {
-  ASSERT(function->shared()->is_generator());
-  Map *map;
-  if (function->has_initial_map()) {
-    map = function->initial_map();
-  } else {
-    // Allocate the initial map if absent.
-    MaybeObject* maybe_map = AllocateInitialMap(function);
-    if (!maybe_map->To(&map)) return maybe_map;
-    function->set_initial_map(map);
-    map->set_constructor(function);
-  }
-  ASSERT(map->instance_type() == JS_GENERATOR_OBJECT_TYPE);
-  return AllocateJSObjectFromMap(map);
-}
-
-
 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
   // Allocate a fresh map. Modules do not have a prototype.
   Map* map;
@@ -6812,6 +6722,8 @@
   }
 #endif
 
+  UpdateMaximumCommitted();
+
   if (FLAG_print_cumulative_gc_stat) {
     PrintF("\n");
     PrintF("gc_count=%d ", gc_count_);
@@ -6826,6 +6738,31 @@
     PrintF("\n\n");
   }
 
+  if (FLAG_print_max_heap_committed) {
+    PrintF("\n");
+    PrintF("maximum_committed_by_heap=%" V8_PTR_PREFIX "d ",
+      MaximumCommittedMemory());
+    PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ",
+      new_space_.MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ",
+      old_data_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
+      old_pointer_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
+      old_pointer_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ",
+      code_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
+      map_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ",
+      cell_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ",
+      property_cell_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
+      lo_space_->MaximumCommittedMemory());
+    PrintF("\n\n");
+  }
+
   TearDownArrayBuffers();
 
   isolate_->global_handles()->TearDown();
@@ -7944,17 +7881,18 @@
       static_cast<int>(object_sizes_last_time_[index]));
   FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
 #undef ADJUST_LAST_TIME_OBJECT_COUNT
-#define ADJUST_LAST_TIME_OBJECT_COUNT(name)                 \
-  index = FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge; \
-  counters->count_of_CODE_AGE_##name()->Increment(          \
-      static_cast<int>(object_counts_[index]));             \
-  counters->count_of_CODE_AGE_##name()->Decrement(          \
-      static_cast<int>(object_counts_last_time_[index]));   \
-  counters->size_of_CODE_AGE_##name()->Increment(           \
-      static_cast<int>(object_sizes_[index]));              \
-  counters->size_of_CODE_AGE_##name()->Decrement(          \
+#define ADJUST_LAST_TIME_OBJECT_COUNT(name)                                   \
+  index =                                                                     \
+      FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \
+  counters->count_of_CODE_AGE_##name()->Increment(                            \
+      static_cast<int>(object_counts_[index]));                               \
+  counters->count_of_CODE_AGE_##name()->Decrement(                            \
+      static_cast<int>(object_counts_last_time_[index]));                     \
+  counters->size_of_CODE_AGE_##name()->Increment(                             \
+      static_cast<int>(object_sizes_[index]));                                \
+  counters->size_of_CODE_AGE_##name()->Decrement(                             \
       static_cast<int>(object_sizes_last_time_[index]));
-  CODE_AGE_LIST_WITH_NO_AGE(ADJUST_LAST_TIME_OBJECT_COUNT)
+  CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
 #undef ADJUST_LAST_TIME_OBJECT_COUNT
 
   OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
diff --git a/src/heap.h b/src/heap.h
index 96cda58..cb421d4 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -533,6 +533,13 @@
   // Returns the amount of phyical memory currently committed for the heap.
   size_t CommittedPhysicalMemory();
 
+  // Returns the maximum amount of memory ever committed for the heap.
+  intptr_t MaximumCommittedMemory() { return maximum_committed_; }
+
+  // Updates the maximum committed memory for the heap. Should be called
+  // whenever a space grows.
+  void UpdateMaximumCommitted();
+
   // Returns the available bytes in space w/o growing.
   // Heap doesn't guarantee that it can allocate an object that requires
   // all available bytes. Check MaxHeapObjectSize() instead.
@@ -624,9 +631,6 @@
       JSFunction* constructor,
       Handle<AllocationSite> allocation_site);
 
-  MUST_USE_RESULT MaybeObject* AllocateJSGeneratorObject(
-      JSFunction* function);
-
   MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
                                                 ScopeInfo* scope_info);
 
@@ -668,12 +672,6 @@
   MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source,
                                             AllocationSite* site = NULL);
 
-  // Allocates the function prototype.
-  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
-  // failed.
-  // Please note this does not perform a garbage collection.
-  MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
-
   // Allocates a JS ArrayBuffer object.
   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   // failed.
@@ -740,9 +738,6 @@
   MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
                                                   int instance_size);
 
-  // Allocate a map for the specified function
-  MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
-
   // Allocates an empty code cache.
   MUST_USE_RESULT MaybeObject* AllocateCodeCache();
 
@@ -1812,7 +1807,7 @@
         FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
     FIRST_CODE_AGE_SUB_TYPE =
         FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
-    OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kLastCodeAge + 1
+    OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
   };
 
   void RecordObjectStats(InstanceType type, size_t size) {
@@ -1822,12 +1817,17 @@
   }
 
   void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
-    ASSERT(code_sub_type < Code::NUMBER_OF_KINDS);
-    ASSERT(code_age < Code::kLastCodeAge);
-    object_counts_[FIRST_CODE_KIND_SUB_TYPE + code_sub_type]++;
-    object_sizes_[FIRST_CODE_KIND_SUB_TYPE + code_sub_type] += size;
-    object_counts_[FIRST_CODE_AGE_SUB_TYPE + code_age]++;
-    object_sizes_[FIRST_CODE_AGE_SUB_TYPE + code_age] += size;
+    int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
+    int code_age_index =
+        FIRST_CODE_AGE_SUB_TYPE + code_age - Code::kFirstCodeAge;
+    ASSERT(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
+           code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
+    ASSERT(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
+           code_age_index < OBJECT_STATS_COUNT);
+    object_counts_[code_sub_type_index]++;
+    object_sizes_[code_sub_type_index] += size;
+    object_counts_[code_age_index]++;
+    object_sizes_[code_age_index] += size;
   }
 
   void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
@@ -1888,6 +1888,7 @@
   int initial_semispace_size_;
   intptr_t max_old_generation_size_;
   intptr_t max_executable_size_;
+  intptr_t maximum_committed_;
 
   // For keeping track of how much data has survived
   // scavenge since last new space expansion.
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 3b232e6..2e05654 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -844,12 +844,12 @@
 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
   ASSERT(!finished_);
   ASSERT(!captured_);
+  ASSERT(did_then_);
+  if (!did_else_) Else();
   HBasicBlock* true_block = last_true_block_ == NULL
       ? first_true_block_
       : last_true_block_;
-  HBasicBlock* false_block = did_else_ && (first_false_block_ != NULL)
-      ? builder_->current_block()
-      : first_false_block_;
+  HBasicBlock* false_block = builder_->current_block();
   if (true_block != NULL && !true_block->IsFinished()) {
     ASSERT(continuation->IsTrueReachable());
     builder_->GotoNoSimulate(true_block, continuation->true_branch());
@@ -4498,8 +4498,7 @@
     // TODO(mvstanton): Consider a flag to turn off creation of any
     // AllocationMementos for this call: we are in crankshaft and should have
     // learned enough about transition behavior to stop emitting mementos.
-    Runtime::FunctionId function_id = (expr->depth() > 1)
-        ? Runtime::kCreateArrayLiteral : Runtime::kCreateArrayLiteralShallow;
+    Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
     literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
                                 Runtime::FunctionForId(function_id),
                                 3);
diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h
index 05cc23a..ee5d991 100644
--- a/src/ia32/assembler-ia32-inl.h
+++ b/src/ia32/assembler-ia32-inl.h
@@ -125,12 +125,6 @@
 }
 
 
-Object** RelocInfo::target_object_address() {
-  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
-  return &Memory::Object_at(pc_);
-}
-
-
 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   ASSERT(!target->IsConsString());
@@ -145,9 +139,9 @@
 }
 
 
-Address* RelocInfo::target_reference_address() {
+Address RelocInfo::target_reference() {
   ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
-  return reinterpret_cast<Address*>(pc_);
+  return Memory::Address_at(pc_);
 }
 
 
@@ -249,6 +243,18 @@
 }
 
 
+void RelocInfo::WipeOut() {
+  if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_)) {
+    Memory::Address_at(pc_) = NULL;
+  } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
+    // Effectively write zero into the relocation.
+    Assembler::set_target_address_at(pc_, pc_ + sizeof(int32_t));
+  } else {
+    UNREACHABLE();
+  }
+}
+
+
 bool RelocInfo::IsPatchedReturnSequence() {
   return *pc_ == kCallOpcode;
 }
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 0557ed8..40345d8 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -2059,6 +2059,22 @@
 }
 
 
+void Assembler::andps(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  EMIT(0x0F);
+  EMIT(0x54);
+  emit_sse_operand(dst, src);
+}
+
+
+void Assembler::orps(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  EMIT(0x0F);
+  EMIT(0x56);
+  emit_sse_operand(dst, src);
+}
+
+
 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
   EnsureSpace ensure_space(this);
   EMIT(0x0F);
@@ -2344,14 +2360,6 @@
 }
 
 
-void Assembler::andps(XMMRegister dst, XMMRegister src) {
-  EnsureSpace ensure_space(this);
-  EMIT(0x0F);
-  EMIT(0x54);
-  emit_sse_operand(dst, src);
-}
-
-
 void Assembler::pand(XMMRegister dst, XMMRegister src) {
   ASSERT(IsEnabled(SSE2));
   EnsureSpace ensure_space(this);
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index f46c647..57f496f 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -1020,6 +1020,7 @@
   // SSE instructions
   void andps(XMMRegister dst, XMMRegister src);
   void xorps(XMMRegister dst, XMMRegister src);
+  void orps(XMMRegister dst, XMMRegister src);
 
   // SSE2 instructions
   void cvttss2si(Register dst, const Operand& src);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index b6bbe04..941bb32 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -82,7 +82,7 @@
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
   descriptor->deoptimization_handler_ =
-      Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
+      Runtime::FunctionForId(Runtime::kCreateArrayLiteral)->entry;
 }
 
 
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index d09a85f..ab4029d 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -1110,7 +1110,7 @@
   __ pshufd(input, input, static_cast<uint8_t>(0xe1));  // Order: 11 10 00 01
   __ movsd(double_scratch, Operand::StaticArray(
       temp2, times_8, ExternalReference::math_exp_log_table()));
-  __ por(input, double_scratch);
+  __ orps(input, double_scratch);
   __ mulsd(result, input);
   __ bind(&done);
 }
diff --git a/src/ia32/disasm-ia32.cc b/src/ia32/disasm-ia32.cc
index 13cf6bc..4e4d552 100644
--- a/src/ia32/disasm-ia32.cc
+++ b/src/ia32/disasm-ia32.cc
@@ -1050,6 +1050,14 @@
                            NameOfXMMRegister(regop),
                            NameOfXMMRegister(rm));
             data++;
+          } else if (f0byte == 0x56) {
+            data += 2;
+            int mod, regop, rm;
+            get_modrm(*data, &mod, &regop, &rm);
+            AppendToBuffer("orps %s,%s",
+                           NameOfXMMRegister(regop),
+                           NameOfXMMRegister(rm));
+            data++;
           } else if (f0byte == 0x57) {
             data += 2;
             int mod, regop, rm;
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 704fb4e..09f5de1 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -1728,19 +1728,14 @@
         DONT_TRACK_ALLOCATION_SITE,
         length);
     __ CallStub(&stub);
-  } else if (expr->depth() > 1) {
+  } else if (expr->depth() > 1 ||
+             Serializer::enabled() ||
+             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
     __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
     __ push(Immediate(Smi::FromInt(expr->literal_index())));
     __ push(Immediate(constant_elements));
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
-  } else if (Serializer::enabled() ||
-      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
-    __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
-    __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
-    __ push(Immediate(Smi::FromInt(expr->literal_index())));
-    __ push(Immediate(constant_elements));
-    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
            FLAG_smi_only_arrays);
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 46c87e1..a69ef12 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -1958,7 +1958,7 @@
           XMMRegister xmm_scratch = double_scratch0();
           __ Set(temp, Immediate(lower));
           __ movd(xmm_scratch, Operand(temp));
-          __ por(res, xmm_scratch);
+          __ orps(res, xmm_scratch);
         }
       }
     }
@@ -2184,7 +2184,7 @@
     __ ucomisd(left_reg, left_reg);  // NaN check.
     __ j(parity_even, &return_left, Label::kNear);  // left == NaN.
     __ bind(&return_right);
-    __ movsd(left_reg, right_reg);
+    __ movaps(left_reg, right_reg);
 
     __ bind(&return_left);
   }
@@ -3995,7 +3995,7 @@
 
   // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
   // compare and compensate.
-  __ movsd(input_temp, input_reg);  // Do not alter input_reg.
+  __ movaps(input_temp, input_reg);  // Do not alter input_reg.
   __ subsd(input_temp, xmm_scratch);
   __ cvttsd2si(output_reg, Operand(input_temp));
   // Catch minint due to overflow, and to prevent overflow when compensating.
diff --git a/src/ia32/lithium-gap-resolver-ia32.cc b/src/ia32/lithium-gap-resolver-ia32.cc
index 2b2126a..d621bd2 100644
--- a/src/ia32/lithium-gap-resolver-ia32.cc
+++ b/src/ia32/lithium-gap-resolver-ia32.cc
@@ -488,7 +488,7 @@
         cgen_->ToOperand(source->IsDoubleRegister() ? destination : source);
     __ movsd(xmm0, other);
     __ movsd(other, reg);
-    __ movsd(reg, Operand(xmm0));
+    __ movaps(reg, xmm0);
   } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) {
     CpuFeatureScope scope(cgen_->masm(), SSE2);
     // Double-width memory-to-memory.  Spill on demand to use a general
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 9786cff..0648833 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -137,38 +137,34 @@
 }
 
 
-// Helper function used to check that the dictionary doesn't contain
-// the property. This function may return false negatives, so miss_label
-// must always call a backup property check that is complete.
-// This function is safe to call if the receiver has fast properties.
-// Name must be unique and receiver must be a heap object.
-static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
-                                             Label* miss_label,
-                                             Register receiver,
-                                             Handle<Name> name,
-                                             Register r0,
-                                             Register r1) {
+void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                                    Label* miss_label,
+                                                    Register receiver,
+                                                    Handle<Name> name,
+                                                    Register scratch0,
+                                                    Register scratch1) {
   ASSERT(name->IsUniqueName());
+  ASSERT(!receiver.is(scratch0));
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->negative_lookups(), 1);
   __ IncrementCounter(counters->negative_lookups_miss(), 1);
 
-  __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
+  __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
 
   const int kInterceptorOrAccessCheckNeededMask =
       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
 
   // Bail out if the receiver has a named interceptor or requires access checks.
-  __ test_b(FieldOperand(r0, Map::kBitFieldOffset),
+  __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
             kInterceptorOrAccessCheckNeededMask);
   __ j(not_zero, miss_label);
 
   // Check that receiver is a JSObject.
-  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
+  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
   __ j(below, miss_label);
 
   // Load properties array.
-  Register properties = r0;
+  Register properties = scratch0;
   __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
 
   // Check that the properties array is a dictionary.
@@ -182,7 +178,7 @@
                                                    &done,
                                                    properties,
                                                    name,
-                                                   r1);
+                                                   scratch1);
   __ bind(&done);
   __ DecrementCounter(counters->negative_lookups_miss(), 1);
 }
@@ -792,13 +788,13 @@
 // Generate code to check that a global property cell is empty. Create
 // the property cell at compilation time if no cell exists for the
 // property.
-static void GenerateCheckPropertyCell(MacroAssembler* masm,
-                                      Handle<GlobalObject> global,
-                                      Handle<Name> name,
-                                      Register scratch,
-                                      Label* miss) {
+void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
+                                             Handle<JSGlobalObject> global,
+                                             Handle<Name> name,
+                                             Register scratch,
+                                             Label* miss) {
   Handle<PropertyCell> cell =
-      GlobalObject::EnsurePropertyCell(global, name);
+      JSGlobalObject::EnsurePropertyCell(global, name);
   ASSERT(cell->value()->IsTheHole());
   Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
   if (Serializer::enabled()) {
@@ -820,7 +816,7 @@
     Label* miss) {
   if (holder->IsJSGlobalObject()) {
     GenerateCheckPropertyCell(
-        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
+        masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
     GenerateDictionaryNegativeLookup(
         masm, miss, holder_reg, name, scratch1(), scratch2());
@@ -1122,19 +1118,17 @@
 }
 
 
-// Calls GenerateCheckPropertyCell for each global object in the prototype chain
-// from object to (but not including) holder.
-static void GenerateCheckPropertyCells(MacroAssembler* masm,
-                                       Handle<JSObject> object,
-                                       Handle<JSObject> holder,
-                                       Handle<Name> name,
-                                       Register scratch,
-                                       Label* miss) {
+void StubCompiler::GenerateCheckPropertyCells(MacroAssembler* masm,
+                                              Handle<JSObject> object,
+                                              Handle<JSObject> holder,
+                                              Handle<Name> name,
+                                              Register scratch,
+                                              Label* miss) {
   Handle<JSObject> current = object;
   while (!current.is_identical_to(holder)) {
-    if (current->IsGlobalObject()) {
+    if (current->IsJSGlobalObject()) {
       GenerateCheckPropertyCell(masm,
-                                Handle<GlobalObject>::cast(current),
+                                Handle<JSGlobalObject>::cast(current),
                                 name,
                                 scratch,
                                 miss);
@@ -1355,26 +1349,6 @@
 }
 
 
-void LoadStubCompiler::NonexistentHandlerFrontend(
-    Handle<JSObject> object,
-    Handle<JSObject> last,
-    Handle<Name> name,
-    Label* success,
-    Handle<GlobalObject> global) {
-  Label miss;
-
-  HandlerFrontendHeader(object, receiver(), last, name, &miss);
-
-  // If the last object in the prototype chain is a global object,
-  // check that the global property cell is empty.
-  if (!global.is_null()) {
-    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
-  }
-
-  HandlerFrontendFooter(name, success, &miss);
-}
-
-
 void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Handle<JSObject> holder,
                                          PropertyIndex field,
@@ -3049,7 +3023,7 @@
     Handle<JSObject> object,
     Handle<JSObject> last,
     Handle<Name> name,
-    Handle<GlobalObject> global) {
+    Handle<JSGlobalObject> global) {
   Label success;
 
   NonexistentHandlerFrontend(object, last, name, &success, global);
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index b75ddb3..8ef7c3c 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -93,7 +93,8 @@
     ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
     if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(),
                                     rinfo->target_object())) {
-      VisitPointer(rinfo->target_object_address());
+      Object* p = rinfo->target_object();
+      VisitPointer(&p);
     }
   }
 
@@ -1649,7 +1650,7 @@
     int object_size = obj->Size();
     ASSERT(map->instance_type() == CODE_TYPE);
     Code* code_obj = Code::cast(obj);
-    heap->RecordCodeSubTypeStats(code_obj->kind(), code_obj->GetAge(),
+    heap->RecordCodeSubTypeStats(code_obj->kind(), code_obj->GetRawAge(),
                                  object_size);
     ObjectStatsVisitBase(kVisitCode, map, obj);
   }
diff --git a/src/messages.js b/src/messages.js
index 0a30122..bfd42de 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -1247,23 +1247,24 @@
 var cyclic_error_marker = new $Object();
 
 function GetPropertyWithoutInvokingMonkeyGetters(error, name) {
+  var current = error;
   // Climb the prototype chain until we find the holder.
-  while (error && !%HasLocalProperty(error, name)) {
-    error = %GetPrototype(error);
+  while (current && !%HasLocalProperty(current, name)) {
+    current = %GetPrototype(current);
   }
-  if (IS_NULL(error)) return UNDEFINED;
-  if (!IS_OBJECT(error)) return error[name];
+  if (IS_NULL(current)) return UNDEFINED;
+  if (!IS_OBJECT(current)) return error[name];
   // If the property is an accessor on one of the predefined errors that can be
   // generated statically by the compiler, don't touch it. This is to address
   // http://code.google.com/p/chromium/issues/detail?id=69187
-  var desc = %GetOwnProperty(error, name);
+  var desc = %GetOwnProperty(current, name);
   if (desc && desc[IS_ACCESSOR_INDEX]) {
     var isName = name === "name";
-    if (error === $ReferenceError.prototype)
+    if (current === $ReferenceError.prototype)
       return isName ? "ReferenceError" : UNDEFINED;
-    if (error === $SyntaxError.prototype)
+    if (current === $SyntaxError.prototype)
       return isName ? "SyntaxError" : UNDEFINED;
-    if (error === $TypeError.prototype)
+    if (current === $TypeError.prototype)
       return isName ? "TypeError" : UNDEFINED;
   }
   // Otherwise, read normally.
diff --git a/src/mips/assembler-mips-inl.h b/src/mips/assembler-mips-inl.h
index de91051..d5af5b2 100644
--- a/src/mips/assembler-mips-inl.h
+++ b/src/mips/assembler-mips-inl.h
@@ -190,16 +190,6 @@
 }
 
 
-Object** RelocInfo::target_object_address() {
-  // Provide a "natural pointer" to the embedded object,
-  // which can be de-referenced during heap iteration.
-  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
-  reconstructed_obj_ptr_ =
-      reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
-  return &reconstructed_obj_ptr_;
-}
-
-
 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   ASSERT(!target->IsConsString());
@@ -213,10 +203,9 @@
 }
 
 
-Address* RelocInfo::target_reference_address() {
+Address RelocInfo::target_reference() {
   ASSERT(rmode_ == EXTERNAL_REFERENCE);
-  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
-  return &reconstructed_adr_ptr_;
+  return Assembler::target_address_at(pc_);
 }
 
 
@@ -326,6 +315,15 @@
 }
 
 
+void RelocInfo::WipeOut() {
+  ASSERT(IsEmbeddedObject(rmode_) ||
+         IsCodeTarget(rmode_) ||
+         IsRuntimeEntry(rmode_) ||
+         IsExternalReference(rmode_));
+  Assembler::set_target_address_at(pc_, NULL);
+}
+
+
 bool RelocInfo::IsPatchedReturnSequence() {
   Instr instr0 = Assembler::instr_at(pc_);
   Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc
index 0972a82..d84315a 100644
--- a/src/mips/assembler-mips.cc
+++ b/src/mips/assembler-mips.cc
@@ -2031,6 +2031,14 @@
 }
 
 
+void Assembler::emit_code_stub_address(Code* stub) {
+  CheckBuffer();
+  *reinterpret_cast<uint32_t*>(pc_) =
+      reinterpret_cast<uint32_t>(stub->instruction_start());
+  pc_ += sizeof(uint32_t);
+}
+
+
 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
   // We do not try to reuse pool constants.
   RelocInfo rinfo(pc_, rmode, data, NULL);
diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h
index 2468c3c..49cce62 100644
--- a/src/mips/assembler-mips.h
+++ b/src/mips/assembler-mips.h
@@ -896,6 +896,9 @@
   void db(uint8_t data);
   void dd(uint32_t data);
 
+  // Emits the address of the code stub's first instruction.
+  void emit_code_stub_address(Code* stub);
+
   PositionsRecorder* positions_recorder() { return &positions_recorder_; }
 
   // Postpone the generation of the trampoline pool for the specified number of
diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
index 0b49583..2fe081e 100644
--- a/src/mips/builtins-mips.cc
+++ b/src/mips/builtins-mips.cc
@@ -301,17 +301,12 @@
                                     Runtime::FunctionId function_id) {
   FrameScope scope(masm, StackFrame::INTERNAL);
   // Push a copy of the function onto the stack.
-  __ push(a1);
-  // Push call kind information.
-  __ push(t1);
-  // Function is also the parameter to the runtime call.
-  __ push(a1);
+  // Push call kind information and function as parameter to the runtime call.
+  __ Push(a1, t1, a1);
 
   __ CallRuntime(function_id, 1);
-  // Restore call kind information.
-  __ pop(t1);
-  // Restore receiver.
-  __ pop(a1);
+  // Restore call kind information and receiver.
+  __ Pop(a1, t1);
 }
 
 
@@ -421,14 +416,11 @@
         __ sb(t0, constructor_count);
         __ Branch(&allocate, ne, t0, Operand(zero_reg));
 
-        __ Push(a1, a2);
-
-        __ push(a1);  // Constructor.
+        __ Push(a1, a2, a1);  // a1 = Constructor.
         // The call will replace the stub, so the countdown is only done once.
         __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
 
-        __ pop(a2);
-        __ pop(a1);
+        __ Pop(a1, a2);
 
         __ bind(&allocate);
       }
@@ -1245,8 +1237,7 @@
 
     // Out of stack space.
     __ lw(a1, MemOperand(fp, kFunctionOffset));
-    __ push(a1);
-    __ push(v0);
+    __ Push(a1, v0);
     __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
     // End of stack check.
 
@@ -1327,8 +1318,7 @@
     // a0: current argument index
     __ bind(&loop);
     __ lw(a1, MemOperand(fp, kArgsOffset));
-    __ push(a1);
-    __ push(a0);
+    __ Push(a1, a0);
 
     // Call the runtime to access the property in the arguments array.
     __ CallRuntime(Runtime::kGetProperty, 2);
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index e334b28..a0e990c 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -78,7 +78,7 @@
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
   descriptor->deoptimization_handler_ =
-      Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
+      Runtime::FunctionForId(Runtime::kCreateArrayLiteral)->entry;
 }
 
 
@@ -5156,8 +5156,7 @@
         ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
     FrameScope scope(masm, StackFrame::INTERNAL);
     __ Push(a1, a0);
-    __ push(ra);
-    __ Push(a1, a0);
+    __ Push(ra, a1, a0);
     __ li(t0, Operand(Smi::FromInt(op_)));
     __ addiu(sp, sp, -kPointerSize);
     __ CallExternalReference(miss, 3, USE_DELAY_SLOT);
diff --git a/src/mips/codegen-mips.cc b/src/mips/codegen-mips.cc
index ec66495..904a37d 100644
--- a/src/mips/codegen-mips.cc
+++ b/src/mips/codegen-mips.cc
@@ -671,7 +671,7 @@
         Operand(reinterpret_cast<uint32_t>(stub->instruction_start())));
     patcher.masm()->Call(t9);
     // Record the stub address in the empty space for GetCodeAgeAndParity()
-    patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+    patcher.masm()->emit_code_stub_address(stub);
   }
 }
 
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index cbd0788..b217e06 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -180,8 +180,20 @@
     ASSERT(!info->function()->is_generator() || locals_count == 0);
     if (locals_count > 0) {
       __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
-      for (int i = 0; i < locals_count; i++) {
-        __ push(at);
+      // Emit a loop to initialize stack cells for locals when optimizing for
+      // size. Otherwise, unroll the loop for maximum performance.
+      __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
+      if (FLAG_optimize_for_size && locals_count > 4) {
+        Label loop;
+        __ li(a2, Operand(locals_count));
+        __ bind(&loop);
+        __ Subu(a2, a2, 1);
+        __ push(t5);
+        __ Branch(&loop, gt, a2, Operand(zero_reg));
+      } else {
+        for (int i = 0; i < locals_count; i++) {
+          __ push(t5);
+        }
       }
     }
   }
@@ -619,12 +631,11 @@
   Label done;
   __ bind(materialize_true);
   __ LoadRoot(at, Heap::kTrueValueRootIndex);
-  __ push(at);
   __ Branch(&done);
   __ bind(materialize_false);
   __ LoadRoot(at, Heap::kFalseValueRootIndex);
-  __ push(at);
   __ bind(&done);
+  __ push(at);
 }
 
 
@@ -1162,7 +1173,7 @@
       Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
                      isolate()));
   RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
-  __ LoadHeapObject(a1, cell);
+  __ li(a1, cell);
   __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
   __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset));
 
@@ -1605,9 +1616,8 @@
   __ jmp(&allocated);
 
   __ bind(&runtime_allocate);
-  __ push(t1);
   __ li(a0, Operand(Smi::FromInt(size)));
-  __ push(a0);
+  __ Push(t1, a0);
   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   __ pop(t1);
 
@@ -1793,13 +1803,11 @@
     __ CallStub(&stub);
     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
         1, a1, a2);
-  } else if (expr->depth() > 1) {
+  } else if (expr->depth() > 1 ||
+             Serializer::enabled() ||
+             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ Push(a3, a2, a1);
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
-  } else if (Serializer::enabled() ||
-      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
-    __ Push(a3, a2, a1);
-    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
            FLAG_smi_only_arrays);
@@ -2050,8 +2058,7 @@
       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
       __ LoadRoot(a2, Heap::kthrow_stringRootIndex);     // "throw"
       __ lw(a3, MemOperand(sp, 1 * kPointerSize));       // iter
-      __ push(a3);                                       // iter
-      __ push(a0);                                       // exception
+      __ Push(a3, a0);                                   // iter, exception
       __ jmp(&l_call);
 
       // try { received = %yield result }
@@ -2089,8 +2096,7 @@
       __ bind(&l_next);
       __ LoadRoot(a2, Heap::knext_stringRootIndex);      // "next"
       __ lw(a3, MemOperand(sp, 1 * kPointerSize));       // iter
-      __ push(a3);                                       // iter
-      __ push(a0);                                       // received
+      __ Push(a3, a0);                                   // iter, received
 
       // result = receiver[f](arg);
       __ bind(&l_call);
@@ -2166,11 +2172,13 @@
   __ Call(&resume_frame);
   __ jmp(&done);
   __ bind(&resume_frame);
-  __ push(ra);  // Return address.
-  __ push(fp);  // Caller's frame pointer.
-  __ mov(fp, sp);
-  __ push(cp);  // Callee's context.
-  __ push(t0);  // Callee's JS Function.
+  // ra = return address.
+  // fp = caller's frame pointer.
+  // cp = callee's context,
+  // t0 = callee's JS function.
+  __ Push(ra, fp, cp, t0);
+  // Adjust FP to point to saved FP.
+  __ Addu(fp, sp, 2 * kPointerSize);
 
   // Load the operand stack size.
   __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
@@ -2201,8 +2209,8 @@
   __ push(a2);
   __ Branch(&push_operand_holes);
   __ bind(&call_resume);
-  __ push(a1);
-  __ push(result_register());
+  ASSERT(!result_register().is(a1));
+  __ Push(a1, result_register());
   __ Push(Smi::FromInt(resume_mode));
   __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
   // Not reached: the runtime call returns elsewhere.
@@ -2432,8 +2440,7 @@
       VisitForStackValue(prop->obj());
       VisitForAccumulatorValue(prop->key());
       __ mov(a1, result_register());
-      __ pop(a2);
-      __ pop(a0);  // Restore value.
+      __ Pop(a0, a2);  // a0 = restored value.
       Handle<Code> ic = is_classic_mode()
         ? isolate()->builtins()->KeyedStoreIC_Initialize()
         : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
@@ -2575,8 +2582,7 @@
   // - a1 is the key,
   // - a2 is the receiver.
   __ mov(a0, result_register());
-  __ pop(a1);  // Key.
-  __ pop(a2);
+  __ Pop(a2, a1);  // a1 = key.
 
   Handle<Code> ic = is_classic_mode()
       ? isolate()->builtins()->KeyedStoreIC_Initialize()
@@ -2704,27 +2710,25 @@
 
 
 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
-  // Push copy of the first argument or undefined if it doesn't exist.
+  // t2: copy of the first argument or undefined if it doesn't exist.
   if (arg_count > 0) {
-    __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
+    __ lw(t2, MemOperand(sp, arg_count * kPointerSize));
   } else {
-    __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
+    __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
   }
-  __ push(a1);
 
-  // Push the receiver of the enclosing function.
+  // t1: the receiver of the enclosing function.
   int receiver_offset = 2 + info_->scope()->num_parameters();
-  __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
-  __ push(a1);
-  // Push the language mode.
-  __ li(a1, Operand(Smi::FromInt(language_mode())));
-  __ push(a1);
+  __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
 
-  // Push the start position of the scope the calls resides in.
+  // t0: the language mode.
+  __ li(t0, Operand(Smi::FromInt(language_mode())));
+
+  // a1: the start position of the scope the calls resides in.
   __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
-  __ push(a1);
 
   // Do the runtime call.
+  __ Push(t2, t1, t0, a1);
   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
 }
 
@@ -2797,9 +2801,9 @@
     __ bind(&slow);
     // Call the runtime to find the function to call (returned in v0)
     // and the object holding it (returned in v1).
-    __ push(context_register());
+    ASSERT(!context_register().is(a2));
     __ li(a2, Operand(proxy->name()));
-    __ push(a2);
+    __ Push(context_register(), a2);
     __ CallRuntime(Runtime::kLoadContextSlot, 2);
     __ Push(v0, v1);  // Function, receiver.
 
@@ -3511,8 +3515,7 @@
 
   VisitForStackValue(args->at(1));  // index
   VisitForStackValue(args->at(2));  // value
-  __ pop(value);
-  __ pop(index);
+  __ Pop(index, value);
   VisitForAccumulatorValue(args->at(0));  // string
 
   if (FLAG_debug_code) {
@@ -3541,8 +3544,7 @@
 
   VisitForStackValue(args->at(1));  // index
   VisitForStackValue(args->at(2));  // value
-  __ pop(value);
-  __ pop(index);
+  __ Pop(index, value);
   VisitForAccumulatorValue(args->at(0));  // string
 
   if (FLAG_debug_code) {
@@ -4303,9 +4305,9 @@
         } else {
           // Non-global variable.  Call the runtime to try to delete from the
           // context where the variable was introduced.
-          __ push(context_register());
+          ASSERT(!context_register().is(a2));
           __ li(a2, Operand(var->name()));
-          __ push(a2);
+          __ Push(context_register(), a2);
           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
           context()->Plug(v0);
         }
@@ -4537,8 +4539,7 @@
     }
     case KEYED_PROPERTY: {
       __ mov(a0, result_register());  // Value.
-      __ pop(a1);  // Key.
-      __ pop(a2);  // Receiver.
+      __ Pop(a2, a1);  // a1 = key, a2 = receiver.
       Handle<Code> ic = is_classic_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize()
           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc
index aa27734..9813da4 100644
--- a/src/mips/ic-mips.cc
+++ b/src/mips/ic-mips.cc
@@ -578,8 +578,7 @@
   __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
-    __ push(a2);  // Save the key.
-    __ Push(a1, a2);  // Pass the receiver and the key.
+    __ Push(a2, a1, a2);  // Save the key and pass the receiver and the key.
     __ CallRuntime(Runtime::kKeyedGetProperty, 2);
     __ pop(a2);  // Restore the key.
   }
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index f54d4a5..8fa397b 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -366,7 +366,7 @@
       Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
     } else {
       ASSERT(r.IsSmiOrTagged());
-      __ LoadObject(scratch, literal);
+      __ li(scratch, literal);
     }
     return scratch;
   } else if (op->IsStackSlot() || op->IsArgument()) {
@@ -668,7 +668,7 @@
   } else if (context->IsConstantOperand()) {
     HConstant* constant =
         chunk_->LookupConstant(LConstantOperand::cast(context));
-    __ LoadObject(cp, Handle<Object>::cast(constant->handle(isolate())));
+    __ li(cp, Handle<Object>::cast(constant->handle(isolate())));
   } else {
     UNREACHABLE();
   }
@@ -1647,7 +1647,7 @@
 void LCodeGen::DoConstantT(LConstantT* instr) {
   Handle<Object> value = instr->value(isolate());
   AllowDeferredHandleDereference smi_check;
-  __ LoadObject(ToRegister(instr->result()), value);
+  __ li(ToRegister(instr->result()), value);
 }
 
 
@@ -2651,7 +2651,7 @@
   // offset to the location of the map check.
   Register temp = ToRegister(instr->temp());
   ASSERT(temp.is(t0));
-  __ LoadHeapObject(InstanceofStub::right(), instr->function());
+  __ li(InstanceofStub::right(), instr->function());
   static const int kAdditionalDelta = 7;
   int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
   Label before_push_delta;
@@ -3403,7 +3403,7 @@
 
 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
   ASSERT(ToRegister(instr->context()).is(cp));
-  __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
+  __ li(scratch0(), instr->hydrogen()->pairs());
   __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
   // The context is the first argument.
   __ Push(cp, scratch0(), scratch1());
@@ -3440,7 +3440,7 @@
 
   if (can_invoke_directly) {
     if (a1_state == A1_UNINITIALIZED) {
-      __ LoadHeapObject(a1, function);
+      __ li(a1, function);
     }
 
     // Change context.
@@ -4584,9 +4584,7 @@
 
 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) {
   LOperand* input = instr->value();
-  ASSERT(input->IsRegister());
   LOperand* output = instr->result();
-  ASSERT(output->IsRegister());
   Register scratch = scratch0();
 
   __ SmiTagCheckOverflow(ToRegister(output), ToRegister(input), scratch);
@@ -4607,6 +4605,19 @@
 }
 
 
+void LCodeGen::DoUint32ToSmi(LUint32ToSmi* instr) {
+  LOperand* input = instr->value();
+  LOperand* output = instr->result();
+  if (!instr->hydrogen()->value()->HasRange() ||
+      !instr->hydrogen()->value()->range()->IsInSmiRange()) {
+    Register scratch = scratch0();
+    __ And(scratch, ToRegister(input), Operand(0xc0000000));
+    DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
+  }
+  __ SmiTag(ToRegister(output), ToRegister(input));
+}
+
+
 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
   class DeferredNumberTagI V8_FINAL : public LDeferredCode {
    public:
@@ -5363,7 +5374,7 @@
   // a2 and t0-t2 are used as temporaries.
   int literal_offset =
       FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
-  __ LoadHeapObject(t3, instr->hydrogen()->literals());
+  __ li(t3, instr->hydrogen()->literals());
   __ lw(a1, FieldMemOperand(t3, literal_offset));
   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   __ Branch(&materialized, ne, a1, Operand(at));
diff --git a/src/mips/lithium-codegen-mips.h b/src/mips/lithium-codegen-mips.h
index f643d02..d0909ae 100644
--- a/src/mips/lithium-codegen-mips.h
+++ b/src/mips/lithium-codegen-mips.h
@@ -242,8 +242,6 @@
                          CallKind call_kind,
                          A1State a1_state);
 
-  void LoadHeapObject(Register result, Handle<HeapObject> object);
-
   void RecordSafepointWithLazyDeopt(LInstruction* instr,
                                     SafepointMode safepoint_mode);
 
diff --git a/src/mips/lithium-gap-resolver-mips.cc b/src/mips/lithium-gap-resolver-mips.cc
index 460e13b..3ee7486 100644
--- a/src/mips/lithium-gap-resolver-mips.cc
+++ b/src/mips/lithium-gap-resolver-mips.cc
@@ -256,7 +256,7 @@
       if (cgen_->IsInteger32(constant_source)) {
         __ li(dst, Operand(cgen_->ToRepresentation(constant_source, r)));
       } else {
-        __ LoadObject(dst, cgen_->ToHandle(constant_source));
+        __ li(dst, cgen_->ToHandle(constant_source));
       }
     } else if (destination->IsDoubleRegister()) {
       DoubleRegister result = cgen_->ToDoubleRegister(destination);
@@ -271,8 +271,7 @@
         __ li(kLithiumScratchReg,
               Operand(cgen_->ToRepresentation(constant_source, r)));
       } else {
-        __ LoadObject(kLithiumScratchReg,
-                      cgen_->ToHandle(constant_source));
+        __ li(kLithiumScratchReg, cgen_->ToHandle(constant_source));
       }
       __ sw(kLithiumScratchReg, cgen_->ToMemOperand(destination));
     }
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index fb94bc3..ba7346f 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -1934,8 +1934,9 @@
     } else if (to.IsSmi()) {
       HValue* val = instr->value();
       LOperand* value = UseRegister(val);
-      LInstruction* result =
-          DefineSameAsFirst(new(zone()) LInteger32ToSmi(value));
+      LInstruction* result = val->CheckFlag(HInstruction::kUint32)
+          ? DefineSameAsFirst(new(zone()) LUint32ToSmi(value))
+          : DefineSameAsFirst(new(zone()) LInteger32ToSmi(value));
       if (val->HasRange() && val->range()->IsInSmiRange()) {
         return result;
       }
diff --git a/src/mips/lithium-mips.h b/src/mips/lithium-mips.h
index 301be8f..7254d83 100644
--- a/src/mips/lithium-mips.h
+++ b/src/mips/lithium-mips.h
@@ -182,6 +182,7 @@
   V(Typeof)                                     \
   V(TypeofIsAndBranch)                          \
   V(Uint32ToDouble)                             \
+  V(Uint32ToSmi)                                \
   V(UnknownOSRValue)                            \
   V(ValueOf)                                    \
   V(WrapReceiver)
@@ -2052,6 +2053,19 @@
 };
 
 
+class LUint32ToSmi V8_FINAL : public LTemplateInstruction<1, 1, 0> {
+ public:
+  explicit LUint32ToSmi(LOperand* value) {
+    inputs_[0] = value;
+  }
+
+  LOperand* value() { return inputs_[0]; }
+
+  DECLARE_CONCRETE_INSTRUCTION(Uint32ToSmi, "uint32-to-smi")
+  DECLARE_HYDROGEN_ACCESSOR(Change)
+};
+
+
 class LNumberTagI V8_FINAL : public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LNumberTagI(LOperand* value) {
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index e0cb1ba..c434c31 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -83,19 +83,6 @@
 }
 
 
-void MacroAssembler::LoadHeapObject(Register result,
-                                    Handle<HeapObject> object) {
-  AllowDeferredHandleDereference using_raw_address;
-  if (isolate()->heap()->InNewSpace(*object)) {
-    Handle<Cell> cell = isolate()->factory()->NewCell(object);
-    li(result, Operand(cell));
-    lw(result, FieldMemOperand(result, Cell::kValueOffset));
-  } else {
-    li(result, Operand(object));
-  }
-}
-
-
 // Push and pop all registers that can hold pointers.
 void MacroAssembler::PushSafepointRegisters() {
   // Safepoints expect a block of kNumSafepointRegisters values on the
@@ -768,6 +755,23 @@
 
 //------------Pseudo-instructions-------------
 
+void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
+  AllowDeferredHandleDereference smi_check;
+  if (value->IsSmi()) {
+    li(dst, Operand(value), mode);
+  } else {
+    ASSERT(value->IsHeapObject());
+    if (isolate()->heap()->InNewSpace(*value)) {
+      Handle<Cell> cell = isolate()->factory()->NewCell(value);
+      li(dst, Operand(cell));
+      lw(dst, FieldMemOperand(dst, Cell::kValueOffset));
+    } else {
+      li(dst, Operand(value));
+    }
+  }
+}
+
+
 void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
   ASSERT(!j.is_reg());
   BlockTrampolinePoolScope block_trampoline_pool(this);
@@ -3697,7 +3701,7 @@
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
   // Get the function and setup the context.
-  LoadHeapObject(a1, function);
+  li(a1, function);
   lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
 
   // We call indirectly through the code field in the function to
@@ -4602,14 +4606,14 @@
       Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
       nop(Assembler::CODE_AGE_MARKER_NOP);
       // Save the function's original return address
-      // (it will be clobbered by Call(t9))
+      // (it will be clobbered by Call(t9)).
       mov(at, ra);
-      // Load the stub address to t9 and call it
+      // Load the stub address to t9 and call it.
       li(t9,
          Operand(reinterpret_cast<uint32_t>(stub->instruction_start())));
       Call(t9);
-      // Record the stub address in the empty space for GetCodeAgeAndParity()
-      dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+      // Record the stub address in the empty space for GetCodeAgeAndParity().
+      emit_code_stub_address(stub);
     } else {
       Push(ra, fp, cp, a1);
       nop(Assembler::CODE_AGE_SEQUENCE_NOP);
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index 0805bb9..0d232e4 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -293,17 +293,6 @@
                  Heap::RootListIndex index,
                  Condition cond, Register src1, const Operand& src2);
 
-  void LoadHeapObject(Register dst, Handle<HeapObject> object);
-
-  void LoadObject(Register result, Handle<Object> object) {
-    AllowDeferredHandleDereference heap_object_check;
-    if (object->IsHeapObject()) {
-      LoadHeapObject(result, Handle<HeapObject>::cast(object));
-    } else {
-      li(result, object);
-    }
-  }
-
   // ---------------------------------------------------------------------------
   // GC Support
 
@@ -620,10 +609,7 @@
   inline void li(Register rd, int32_t j, LiFlags mode = OPTIMIZE_SIZE) {
     li(rd, Operand(j), mode);
   }
-  inline void li(Register dst, Handle<Object> value,
-                 LiFlags mode = OPTIMIZE_SIZE) {
-    li(dst, Operand(value), mode);
-  }
+  void li(Register dst, Handle<Object> value, LiFlags mode = OPTIMIZE_SIZE);
 
   // Push multiple registers on the stack.
   // Registers are saved in numerical order, with higher numbered registers
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index 471c25e..68e2074 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -117,18 +117,14 @@
 }
 
 
-// Helper function used to check that the dictionary doesn't contain
-// the property. This function may return false negatives, so miss_label
-// must always call a backup property check that is complete.
-// This function is safe to call if the receiver has fast properties.
-// Name must be unique and receiver must be a heap object.
-static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
-                                             Label* miss_label,
-                                             Register receiver,
-                                             Handle<Name> name,
-                                             Register scratch0,
-                                             Register scratch1) {
+void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                                    Label* miss_label,
+                                                    Register receiver,
+                                                    Handle<Name> name,
+                                                    Register scratch0,
+                                                    Register scratch1) {
   ASSERT(name->IsUniqueName());
+  ASSERT(!receiver.is(scratch0));
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
@@ -408,15 +404,12 @@
 }
 
 
-// Generate code to check that a global property cell is empty. Create
-// the property cell at compilation time if no cell exists for the
-// property.
-static void GenerateCheckPropertyCell(MacroAssembler* masm,
-                                      Handle<GlobalObject> global,
-                                      Handle<Name> name,
-                                      Register scratch,
-                                      Label* miss) {
-  Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
+void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
+                                             Handle<JSGlobalObject> global,
+                                             Handle<Name> name,
+                                             Register scratch,
+                                             Label* miss) {
+  Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
   ASSERT(cell->value()->IsTheHole());
   __ li(scratch, Operand(cell));
   __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
@@ -433,7 +426,7 @@
     Label* miss) {
   if (holder->IsJSGlobalObject()) {
     GenerateCheckPropertyCell(
-        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
+        masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
     GenerateDictionaryNegativeLookup(
         masm, miss, holder_reg, name, scratch1(), scratch2());
@@ -468,7 +461,7 @@
 
   if (details.type() == CONSTANT) {
     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
-    __ LoadObject(scratch1, constant);
+    __ li(scratch1, constant);
     __ Branch(miss_label, ne, value_reg, Operand(scratch1));
   } else if (FLAG_track_fields && representation.IsSmi()) {
     __ JumpIfNotSmi(value_reg, miss_label);
@@ -844,7 +837,7 @@
   __ sw(cp, MemOperand(sp, FCA::kContextSaveIndex * kPointerSize));
   // Get the function and setup the context.
   Handle<JSFunction> function = optimization.constant_function();
-  __ LoadHeapObject(t1, function);
+  __ li(t1, function);
   __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
   __ sw(t1, MemOperand(sp, FCA::kCalleeIndex * kPointerSize));
 
@@ -1149,19 +1142,17 @@
 };
 
 
-// Calls GenerateCheckPropertyCell for each global object in the prototype chain
-// from object to (but not including) holder.
-static void GenerateCheckPropertyCells(MacroAssembler* masm,
-                                       Handle<JSObject> object,
-                                       Handle<JSObject> holder,
-                                       Handle<Name> name,
-                                       Register scratch,
-                                       Label* miss) {
+void StubCompiler::GenerateCheckPropertyCells(MacroAssembler* masm,
+                                              Handle<JSObject> object,
+                                              Handle<JSObject> holder,
+                                              Handle<Name> name,
+                                              Register scratch,
+                                              Label* miss) {
   Handle<JSObject> current = object;
   while (!current.is_identical_to(holder)) {
-    if (current->IsGlobalObject()) {
+    if (current->IsJSGlobalObject()) {
       GenerateCheckPropertyCell(masm,
-                                Handle<GlobalObject>::cast(current),
+                                Handle<JSGlobalObject>::cast(current),
                                 name,
                                 scratch,
                                 miss);
@@ -1364,26 +1355,6 @@
 }
 
 
-void LoadStubCompiler::NonexistentHandlerFrontend(
-    Handle<JSObject> object,
-    Handle<JSObject> last,
-    Handle<Name> name,
-    Label* success,
-    Handle<GlobalObject> global) {
-  Label miss;
-
-  HandlerFrontendHeader(object, receiver(), last, name, &miss);
-
-  // If the last object in the prototype chain is a global object,
-  // check that the global property cell is empty.
-  if (!global.is_null()) {
-    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
-  }
-
-  HandlerFrontendFooter(name, success, &miss);
-}
-
-
 void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Handle<JSObject> holder,
                                          PropertyIndex field,
@@ -1405,7 +1376,7 @@
 
 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
   // Return the constant value.
-  __ LoadObject(v0, value);
+  __ li(v0, value);
   __ Ret();
 }
 
@@ -2957,7 +2928,7 @@
     Handle<JSObject> object,
     Handle<JSObject> last,
     Handle<Name> name,
-    Handle<GlobalObject> global) {
+    Handle<JSGlobalObject> global) {
   Label success;
 
   NonexistentHandlerFrontend(object, last, name, &success, global);
diff --git a/src/object-observe.js b/src/object-observe.js
index 9c7ac38..718920f 100644
--- a/src/object-observe.js
+++ b/src/object-observe.js
@@ -132,7 +132,8 @@
   'updated',
   'deleted',
   'prototype',
-  'reconfigured'
+  'reconfigured',
+  'preventExtensions'
 ]);
 
 // An Observer is a registration to observe an object by a callback with
@@ -386,8 +387,28 @@
   %SetObserverDeliveryPending();
 }
 
-function ObjectInfoEnqueueChangeRecord(objectInfo, changeRecord,
-                                       skipAccessCheck) {
+function ObjectInfoEnqueueExternalChangeRecord(objectInfo, changeRecord, type) {
+  if (!ObjectInfoHasActiveObservers(objectInfo))
+    return;
+
+  var hasType = !IS_UNDEFINED(type);
+  var newRecord = hasType ?
+      { object: ObjectInfoGetObject(objectInfo), type: type } :
+      { object: ObjectInfoGetObject(objectInfo) };
+
+  for (var prop in changeRecord) {
+    if (prop === 'object' || (hasType && prop === 'type')) continue;
+    %DefineOrRedefineDataProperty(newRecord, prop, changeRecord[prop],
+        READ_ONLY + DONT_DELETE);
+  }
+  ObjectFreeze(newRecord);
+
+  ObjectInfoEnqueueInternalChangeRecord(objectInfo, newRecord,
+                                        true /* skip access check */);
+}
+
+function ObjectInfoEnqueueInternalChangeRecord(objectInfo, changeRecord,
+                                               skipAccessCheck) {
   // TODO(rossberg): adjust once there is a story for symbols vs proxies.
   if (IS_SYMBOL(changeRecord.name)) return;
 
@@ -435,7 +456,7 @@
 
   ObjectFreeze(changeRecord);
   ObjectFreeze(changeRecord.removed);
-  ObjectInfoEnqueueChangeRecord(objectInfo, changeRecord);
+  ObjectInfoEnqueueInternalChangeRecord(objectInfo, changeRecord);
 }
 
 function NotifyChange(type, object, name, oldValue) {
@@ -443,11 +464,22 @@
   if (!ObjectInfoHasActiveObservers(objectInfo))
     return;
 
-  var changeRecord = (arguments.length < 4) ?
-      { type: type, object: object, name: name } :
-      { type: type, object: object, name: name, oldValue: oldValue };
+  var changeRecord;
+  if (arguments.length == 2) {
+    changeRecord = { type: type, object: object };
+  } else if (arguments.length == 3) {
+    changeRecord = { type: type, object: object, name: name };
+  } else {
+    changeRecord = {
+      type: type,
+      object: object,
+      name: name,
+      oldValue: oldValue
+    };
+  }
+
   ObjectFreeze(changeRecord);
-  ObjectInfoEnqueueChangeRecord(objectInfo, changeRecord);
+  ObjectInfoEnqueueInternalChangeRecord(objectInfo, changeRecord);
 }
 
 var notifierPrototype = {};
@@ -462,19 +494,7 @@
   if (!IS_STRING(changeRecord.type))
     throw MakeTypeError("observe_type_non_string");
 
-  if (!ObjectInfoHasActiveObservers(objectInfo))
-    return;
-
-  var newRecord = { object: ObjectInfoGetObject(objectInfo) };
-  for (var prop in changeRecord) {
-    if (prop === 'object') continue;
-    %DefineOrRedefineDataProperty(newRecord, prop, changeRecord[prop],
-        READ_ONLY + DONT_DELETE);
-  }
-  ObjectFreeze(newRecord);
-
-  ObjectInfoEnqueueChangeRecord(objectInfo, newRecord,
-                                true /* skip access check */);
+  ObjectInfoEnqueueExternalChangeRecord(objectInfo, changeRecord);
 }
 
 function ObjectNotifierPerformChange(changeType, changeFn) {
@@ -491,11 +511,16 @@
     throw MakeTypeError("observe_perform_non_function");
 
   ObjectInfoAddPerformingType(objectInfo, changeType);
+
+  var changeRecord;
   try {
-    %_CallFunction(UNDEFINED, changeFn);
+    changeRecord = %_CallFunction(UNDEFINED, changeFn);
   } finally {
     ObjectInfoRemovePerformingType(objectInfo, changeType);
   }
+
+  if (IS_SPEC_OBJECT(changeRecord))
+    ObjectInfoEnqueueExternalChangeRecord(objectInfo, changeRecord, changeType);
 }
 
 function ObjectGetNotifier(object) {
diff --git a/src/objects-inl.h b/src/objects-inl.h
index deb3365..bef807e 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -5320,23 +5320,29 @@
 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
+ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
 
 
-// Type feedback slot: type_feedback_info for FUNCTIONs, stub_info for STUBs.
-void Code::InitializeTypeFeedbackInfoNoWriteBarrier(Object* value) {
-  WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
+void Code::WipeOutHeader() {
+  WRITE_FIELD(this, kRelocationInfoOffset, NULL);
+  WRITE_FIELD(this, kHandlerTableOffset, NULL);
+  WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
+  // Do not wipe out e.g. a minor key.
+  if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
+    WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
+  }
 }
 
 
 Object* Code::type_feedback_info() {
   ASSERT(kind() == FUNCTION);
-  return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
+  return raw_type_feedback_info();
 }
 
 
 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
   ASSERT(kind() == FUNCTION);
-  WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
+  set_raw_type_feedback_info(value, mode);
   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
                             value, mode);
 }
@@ -5344,13 +5350,13 @@
 
 Object* Code::next_code_link() {
   CHECK(kind() == OPTIMIZED_FUNCTION);
-  return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
+  return raw_type_feedback_info();
 }
 
 
 void Code::set_next_code_link(Object* value, WriteBarrierMode mode) {
   CHECK(kind() == OPTIMIZED_FUNCTION);
-  WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
+  set_raw_type_feedback_info(value);
   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
                             value, mode);
 }
@@ -5359,8 +5365,7 @@
 int Code::stub_info() {
   ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
          kind() == BINARY_OP_IC || kind() == LOAD_IC);
-  Object* value = READ_FIELD(this, kTypeFeedbackInfoOffset);
-  return Smi::cast(value)->value();
+  return Smi::cast(raw_type_feedback_info())->value();
 }
 
 
@@ -5373,7 +5378,7 @@
          kind() == KEYED_LOAD_IC ||
          kind() == STORE_IC ||
          kind() == KEYED_STORE_IC);
-  WRITE_FIELD(this, kTypeFeedbackInfoOffset, Smi::FromInt(value));
+  set_raw_type_feedback_info(Smi::FromInt(value));
 }
 
 
@@ -5846,10 +5851,17 @@
 }
 
 
-MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
+Handle<Object> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
+  return object->IsJSProxy()
+      ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
+      : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
+}
+
+
+Object* JSReceiver::GetIdentityHash() {
   return IsJSProxy()
-      ? JSProxy::cast(this)->GetIdentityHash(flag)
-      : JSObject::cast(this)->GetIdentityHash(flag);
+      ? JSProxy::cast(this)->GetIdentityHash()
+      : JSObject::cast(this)->GetIdentityHash();
 }
 
 
@@ -6049,16 +6061,14 @@
 
 template <int entrysize>
 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
-  MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
-  return Smi::cast(maybe_hash->ToObjectChecked())->value();
+  return Smi::cast(key->GetHash())->value();
 }
 
 
 template <int entrysize>
 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
                                                         Object* other) {
-  MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
-  return Smi::cast(maybe_hash->ToObjectChecked())->value();
+  return Smi::cast(other->GetHash())->value();
 }
 
 
diff --git a/src/objects.cc b/src/objects.cc
index f7c8917..d5f0443 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1028,7 +1028,7 @@
 }
 
 
-MaybeObject* Object::GetHash(CreationFlag flag) {
+Object* Object::GetHash() {
   // The object is either a number, a name, an odd-ball,
   // a real JS object, or a Harmony proxy.
   if (IsNumber()) {
@@ -1043,12 +1043,20 @@
     uint32_t hash = Oddball::cast(this)->to_string()->Hash();
     return Smi::FromInt(hash);
   }
-  if (IsJSReceiver()) {
-    return JSReceiver::cast(this)->GetIdentityHash(flag);
-  }
 
-  UNREACHABLE();
-  return Smi::FromInt(0);
+  ASSERT(IsJSReceiver());
+  return JSReceiver::cast(this)->GetIdentityHash();
+}
+
+
+Handle<Object> Object::GetOrCreateHash(Handle<Object> object,
+                                       Isolate* isolate) {
+  Handle<Object> hash(object->GetHash(), isolate);
+  if (hash->IsSmi())
+    return hash;
+
+  ASSERT(object->IsJSReceiver());
+  return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object));
 }
 
 
@@ -2167,11 +2175,13 @@
     object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate);
   }
   Handle<Object> args[] = { type, object, name, old_value };
+  int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
   bool threw;
+
   Execution::Call(isolate,
                   Handle<JSFunction>(isolate->observers_notify_change()),
                   isolate->factory()->undefined_value(),
-                  old_value->IsTheHole() ? 3 : 4, args,
+                  argc, args,
                   &threw);
   ASSERT(!threw);
 }
@@ -3755,7 +3765,7 @@
   Isolate* isolate = proxy->GetIsolate();
 
   // Save identity hash.
-  Handle<Object> hash = JSProxy::GetIdentityHash(proxy, OMIT_CREATION);
+  Handle<Object> hash(proxy->GetIdentityHash(), isolate);
 
   if (proxy->IsJSFunctionProxy()) {
     isolate->factory()->BecomeJSFunction(proxy);
@@ -3767,7 +3777,8 @@
 
   // Inherit identity, if it was present.
   if (hash->IsSmi()) {
-    JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy), Smi::cast(*hash));
+    JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy),
+                              Handle<Smi>::cast(hash));
   }
 }
 
@@ -4111,29 +4122,6 @@
 }
 
 
-MaybeObject* JSObject::SetLocalPropertyIgnoreAttributesTrampoline(
-    Name* key,
-    Object* value,
-    PropertyAttributes attributes,
-    ValueType value_type,
-    StoreMode mode,
-    ExtensibilityCheck extensibility_check) {
-  // TODO(mstarzinger): The trampoline is a giant hack, don't use it anywhere
-  // else or handlification people will start hating you for all eternity.
-  HandleScope scope(GetIsolate());
-  IdempotentPointerToHandleCodeTrampoline trampoline(GetIsolate());
-  return trampoline.CallWithReturnValue(
-      &JSObject::SetLocalPropertyIgnoreAttributes,
-      Handle<JSObject>(this),
-      Handle<Name>(key),
-      Handle<Object>(value, GetIsolate()),
-      attributes,
-      value_type,
-      mode,
-      extensibility_check);
-}
-
-
 // Set a real local property, even if it is READ_ONLY.  If the property is not
 // present, add it with attributes NONE.  This code is an exact clone of
 // SetProperty, with the check for IsReadOnly and the check for a
@@ -4788,52 +4776,52 @@
 }
 
 
-void JSObject::SetIdentityHash(Handle<JSObject> object, Smi* hash) {
-  CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
-                          object->SetHiddenProperty(
-                              object->GetHeap()->identity_hash_string(), hash));
+void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) {
+  Isolate* isolate = object->GetIsolate();
+  SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
 }
 
 
-int JSObject::GetIdentityHash(Handle<JSObject> object) {
-  CALL_AND_RETRY_OR_DIE(object->GetIsolate(),
-                        object->GetIdentityHash(ALLOW_CREATION),
-                        return Smi::cast(__object__)->value(),
-                        return 0);
-}
-
-
-MaybeObject* JSObject::GetIdentityHash(CreationFlag flag) {
+Object* JSObject::GetIdentityHash() {
   Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_string());
-  if (stored_value->IsSmi()) return stored_value;
+  return stored_value->IsSmi() ? stored_value : GetHeap()->undefined_value();
+}
 
-  // Do not generate permanent identity hash code if not requested.
-  if (flag == OMIT_CREATION) return GetHeap()->undefined_value();
 
-  Smi* hash = GenerateIdentityHash();
-  MaybeObject* result = SetHiddenProperty(GetHeap()->identity_hash_string(),
-                                          hash);
-  if (result->IsFailure()) return result;
-  if (result->ToObjectUnchecked()->IsUndefined()) {
+Handle<Object> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
+  Handle<Object> hash(object->GetIdentityHash(), object->GetIsolate());
+  if (hash->IsSmi())
+    return hash;
+
+  Isolate* isolate = object->GetIsolate();
+
+  hash = handle(object->GenerateIdentityHash(), isolate);
+  Handle<Object> result = SetHiddenProperty(object,
+      isolate->factory()->identity_hash_string(), hash);
+
+  if (result->IsUndefined()) {
     // Trying to get hash of detached proxy.
-    return Smi::FromInt(0);
+    return handle(Smi::FromInt(0), isolate);
   }
+
   return hash;
 }
 
 
-Handle<Object> JSProxy::GetIdentityHash(Handle<JSProxy> proxy,
-                                        CreationFlag flag) {
-  CALL_HEAP_FUNCTION(proxy->GetIsolate(), proxy->GetIdentityHash(flag), Object);
+Object* JSProxy::GetIdentityHash() {
+  return this->hash();
 }
 
 
-MaybeObject* JSProxy::GetIdentityHash(CreationFlag flag) {
-  Object* hash = this->hash();
-  if (!hash->IsSmi() && flag == ALLOW_CREATION) {
-    hash = GenerateIdentityHash();
-    set_hash(hash);
-  }
+Handle<Object> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
+  Isolate* isolate = proxy->GetIsolate();
+
+  Handle<Object> hash(proxy->GetIdentityHash(), isolate);
+  if (hash->IsSmi())
+    return hash;
+
+  hash = handle(proxy->GenerateIdentityHash(), isolate);
+  proxy->set_hash(*hash);
   return hash;
 }
 
@@ -4849,9 +4837,7 @@
     return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
   }
   ASSERT(!IsJSGlobalProxy());
-  MaybeObject* hidden_lookup =
-      GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
-  Object* inline_value = hidden_lookup->ToObjectUnchecked();
+  Object* inline_value = GetHiddenPropertiesHashTable();
 
   if (inline_value->IsSmi()) {
     // Handle inline-stored identity hash.
@@ -4870,53 +4856,45 @@
 }
 
 
-Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> obj,
+Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object,
                                            Handle<Name> key,
                                            Handle<Object> value) {
-  CALL_HEAP_FUNCTION(obj->GetIsolate(),
-                     obj->SetHiddenProperty(*key, *value),
-                     Object);
-}
+  Isolate* isolate = object->GetIsolate();
 
-
-MaybeObject* JSObject::SetHiddenProperty(Name* key, Object* value) {
   ASSERT(key->IsUniqueName());
-  if (IsJSGlobalProxy()) {
+  if (object->IsJSGlobalProxy()) {
     // For a proxy, use the prototype as target object.
-    Object* proxy_parent = GetPrototype();
+    Handle<Object> proxy_parent(object->GetPrototype(), isolate);
     // If the proxy is detached, return undefined.
-    if (proxy_parent->IsNull()) return GetHeap()->undefined_value();
+    if (proxy_parent->IsNull()) return isolate->factory()->undefined_value();
     ASSERT(proxy_parent->IsJSGlobalObject());
-    return JSObject::cast(proxy_parent)->SetHiddenProperty(key, value);
+    return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value);
   }
-  ASSERT(!IsJSGlobalProxy());
-  MaybeObject* hidden_lookup =
-      GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
-  Object* inline_value = hidden_lookup->ToObjectUnchecked();
+  ASSERT(!object->IsJSGlobalProxy());
+
+  Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
 
   // If there is no backing store yet, store the identity hash inline.
   if (value->IsSmi() &&
-      key == GetHeap()->identity_hash_string() &&
+      *key == *isolate->factory()->identity_hash_string() &&
       (inline_value->IsUndefined() || inline_value->IsSmi())) {
-    return SetHiddenPropertiesHashTable(value);
+    return JSObject::SetHiddenPropertiesHashTable(object, value);
   }
 
-  hidden_lookup = GetHiddenPropertiesHashTable(CREATE_NEW_IF_ABSENT);
-  ObjectHashTable* hashtable;
-  if (!hidden_lookup->To(&hashtable)) return hidden_lookup;
+  Handle<ObjectHashTable> hashtable =
+      GetOrCreateHiddenPropertiesHashtable(object);
 
   // If it was found, check if the key is already in the dictionary.
-  MaybeObject* insert_result = hashtable->Put(key, value);
-  ObjectHashTable* new_table;
-  if (!insert_result->To(&new_table)) return insert_result;
-  if (new_table != hashtable) {
+  Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
+                                                           value);
+  if (*new_table != *hashtable) {
     // If adding the key expanded the dictionary (i.e., Add returned a new
     // dictionary), store it back to the object.
-    MaybeObject* store_result = SetHiddenPropertiesHashTable(new_table);
-    if (store_result->IsFailure()) return store_result;
+    SetHiddenPropertiesHashTable(object, new_table);
   }
+
   // Return this to mark success.
-  return this;
+  return object;
 }
 
 
@@ -4931,16 +4909,14 @@
     return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key);
   }
 
-  MaybeObject* hidden_lookup =
-      object->GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
-  Object* inline_value = hidden_lookup->ToObjectUnchecked();
+  Object* inline_value = object->GetHiddenPropertiesHashTable();
 
   // We never delete (inline-stored) identity hashes.
-  ASSERT(*key != isolate->heap()->identity_hash_string());
+  ASSERT(*key != *isolate->factory()->identity_hash_string());
   if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
 
   Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
-  PutIntoObjectHashTable(hashtable, key, isolate->factory()->the_hole_value());
+  ObjectHashTable::Put(hashtable, key, isolate->factory()->the_hole_value());
 }
 
 
@@ -4951,10 +4927,8 @@
 }
 
 
-MaybeObject* JSObject::GetHiddenPropertiesHashTable(
-    InitializeHiddenProperties init_option) {
+Object* JSObject::GetHiddenPropertiesHashTable() {
   ASSERT(!IsJSGlobalProxy());
-  Object* inline_value;
   if (HasFastProperties()) {
     // If the object has fast properties, check whether the first slot
     // in the descriptor array matches the hidden string. Since the
@@ -4966,93 +4940,97 @@
       if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
           sorted_index < map()->NumberOfOwnDescriptors()) {
         ASSERT(descriptors->GetType(sorted_index) == FIELD);
-        MaybeObject* maybe_value = this->FastPropertyAt(
-            descriptors->GetDetails(sorted_index).representation(),
+        ASSERT(descriptors->GetDetails(sorted_index).representation().
+               IsCompatibleForLoad(Representation::Tagged()));
+        return this->RawFastPropertyAt(
             descriptors->GetFieldIndex(sorted_index));
-        if (!maybe_value->To(&inline_value)) return maybe_value;
       } else {
-        inline_value = GetHeap()->undefined_value();
+        return GetHeap()->undefined_value();
       }
     } else {
-      inline_value = GetHeap()->undefined_value();
+      return GetHeap()->undefined_value();
     }
   } else {
     PropertyAttributes attributes;
     // You can't install a getter on a property indexed by the hidden string,
     // so we can be sure that GetLocalPropertyPostInterceptor returns a real
     // object.
-    inline_value =
-        GetLocalPropertyPostInterceptor(this,
-                                        GetHeap()->hidden_string(),
-                                        &attributes)->ToObjectUnchecked();
+    return GetLocalPropertyPostInterceptor(this,
+                                           GetHeap()->hidden_string(),
+                                           &attributes)->ToObjectUnchecked();
   }
+}
 
-  if (init_option == ONLY_RETURN_INLINE_VALUE ||
-      inline_value->IsHashTable()) {
-    return inline_value;
-  }
+Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
+    Handle<JSObject> object) {
+  Isolate* isolate = object->GetIsolate();
 
-  ObjectHashTable* hashtable;
   static const int kInitialCapacity = 4;
-  MaybeObject* maybe_obj =
-      ObjectHashTable::Allocate(GetHeap(),
-                                kInitialCapacity,
-                                ObjectHashTable::USE_CUSTOM_MINIMUM_CAPACITY);
-  if (!maybe_obj->To<ObjectHashTable>(&hashtable)) return maybe_obj;
+  Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
+  if (inline_value->IsHashTable()) {
+    return Handle<ObjectHashTable>::cast(inline_value);
+  }
+
+  Handle<ObjectHashTable> hashtable = isolate->factory()->NewObjectHashTable(
+      kInitialCapacity,
+      USE_CUSTOM_MINIMUM_CAPACITY);
 
   if (inline_value->IsSmi()) {
     // We were storing the identity hash inline and now allocated an actual
     // dictionary.  Put the identity hash into the new dictionary.
-    MaybeObject* insert_result =
-        hashtable->Put(GetHeap()->identity_hash_string(), inline_value);
-    ObjectHashTable* new_table;
-    if (!insert_result->To(&new_table)) return insert_result;
-    // We expect no resizing for the first insert.
-    ASSERT_EQ(hashtable, new_table);
+    hashtable = ObjectHashTable::Put(hashtable,
+                                     isolate->factory()->identity_hash_string(),
+                                     inline_value);
   }
 
-  MaybeObject* store_result = SetLocalPropertyIgnoreAttributesTrampoline(
-      GetHeap()->hidden_string(),
+  JSObject::SetLocalPropertyIgnoreAttributes(
+      object,
+      isolate->factory()->hidden_string(),
       hashtable,
       DONT_ENUM,
       OPTIMAL_REPRESENTATION,
       ALLOW_AS_CONSTANT,
       OMIT_EXTENSIBILITY_CHECK);
-  if (store_result->IsFailure()) return store_result;
+
   return hashtable;
 }
 
 
-MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) {
-  ASSERT(!IsJSGlobalProxy());
+Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
+                                                      Handle<Object> value) {
+  ASSERT(!object->IsJSGlobalProxy());
+
+  Isolate* isolate = object->GetIsolate();
+
   // We can store the identity hash inline iff there is no backing store
   // for hidden properties yet.
-  ASSERT(HasHiddenProperties() != value->IsSmi());
-  if (HasFastProperties()) {
+  ASSERT(object->HasHiddenProperties() != value->IsSmi());
+  if (object->HasFastProperties()) {
     // If the object has fast properties, check whether the first slot
     // in the descriptor array matches the hidden string. Since the
     // hidden strings hash code is zero (and no other name has hash
     // code zero) it will always occupy the first entry if present.
-    DescriptorArray* descriptors = this->map()->instance_descriptors();
+    DescriptorArray* descriptors = object->map()->instance_descriptors();
     if (descriptors->number_of_descriptors() > 0) {
       int sorted_index = descriptors->GetSortedKeyIndex(0);
-      if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
-          sorted_index < map()->NumberOfOwnDescriptors()) {
+      if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string()
+          && sorted_index < object->map()->NumberOfOwnDescriptors()) {
         ASSERT(descriptors->GetType(sorted_index) == FIELD);
-        FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), value);
-        return this;
+        object->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index),
+                                  *value);
+        return object;
       }
     }
   }
-  MaybeObject* store_result = SetLocalPropertyIgnoreAttributesTrampoline(
-      GetHeap()->hidden_string(),
-      value,
-      DONT_ENUM,
-      OPTIMAL_REPRESENTATION,
-      ALLOW_AS_CONSTANT,
-      OMIT_EXTENSIBILITY_CHECK);
-  if (store_result->IsFailure()) return store_result;
-  return this;
+
+  SetLocalPropertyIgnoreAttributes(object,
+                                   isolate->factory()->hidden_string(),
+                                   value,
+                                   DONT_ENUM,
+                                   OPTIMAL_REPRESENTATION,
+                                   ALLOW_AS_CONSTANT,
+                                   OMIT_EXTENSIBILITY_CHECK);
+  return object;
 }
 
 
@@ -5443,6 +5421,9 @@
 
 Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
   Isolate* isolate = object->GetIsolate();
+
+  if (!object->map()->is_extensible()) return object;
+
   if (object->IsAccessCheckNeeded() &&
       !isolate->MayNamedAccess(*object,
                                isolate->heap()->undefined_value(),
@@ -5485,6 +5466,11 @@
   new_map->set_is_extensible(false);
   object->set_map(*new_map);
   ASSERT(!object->map()->is_extensible());
+
+  if (FLAG_harmony_observation && object->map()->is_observed()) {
+    EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
+                        isolate->factory()->the_hole_value());
+  }
   return object;
 }
 
@@ -5513,6 +5499,7 @@
 Handle<Object> JSObject::Freeze(Handle<JSObject> object) {
   // Freezing non-strict arguments should be handled elsewhere.
   ASSERT(!object->HasNonStrictArgumentsElements());
+  ASSERT(!object->map()->is_observed());
 
   if (object->map()->is_frozen()) return object;
 
@@ -5837,7 +5824,9 @@
     case FAST_HOLEY_ELEMENTS: {
       Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
       if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
-        isolate->counters()->cow_arrays_created_runtime()->Increment();
+        if (copying) {
+          isolate->counters()->cow_arrays_created_runtime()->Increment();
+        }
 #ifdef DEBUG
         for (int i = 0; i < elements->length(); i++) {
           ASSERT(!elements->get(i)->IsJSObject());
@@ -9884,6 +9873,48 @@
 }
 
 
+void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
+  if (function->has_initial_map()) return;
+  Isolate* isolate = function->GetIsolate();
+
+  // First create a new map with the size and number of in-object properties
+  // suggested by the function.
+  InstanceType instance_type;
+  int instance_size;
+  int in_object_properties;
+  if (function->shared()->is_generator()) {
+    instance_type = JS_GENERATOR_OBJECT_TYPE;
+    instance_size = JSGeneratorObject::kSize;
+    in_object_properties = 0;
+  } else {
+    instance_type = JS_OBJECT_TYPE;
+    instance_size = function->shared()->CalculateInstanceSize();
+    in_object_properties = function->shared()->CalculateInObjectProperties();
+  }
+  Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
+
+  // Fetch or allocate prototype.
+  Handle<Object> prototype;
+  if (function->has_instance_prototype()) {
+    prototype = handle(function->instance_prototype(), isolate);
+  } else {
+    prototype = isolate->factory()->NewFunctionPrototype(function);
+  }
+  map->set_inobject_properties(in_object_properties);
+  map->set_unused_property_fields(in_object_properties);
+  map->set_prototype(*prototype);
+  ASSERT(map->has_fast_object_elements());
+
+  if (!function->shared()->is_generator()) {
+    function->shared()->StartInobjectSlackTracking(*map);
+  }
+
+  // Finally link initial map and constructor function.
+  function->set_initial_map(*map);
+  map->set_constructor(*function);
+}
+
+
 void JSFunction::SetInstanceClassName(String* name) {
   shared()->set_instance_class_name(name);
 }
@@ -10317,13 +10348,14 @@
 
 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
   ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
-  VisitPointer(rinfo->target_object_address());
+  Object* p = rinfo->target_object();
+  VisitPointer(&p);
 }
 
 
 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
-  Address* p = rinfo->target_reference_address();
-  VisitExternalReference(p);
+  Address p = rinfo->target_reference();
+  VisitExternalReference(&p);
 }
 
 
@@ -10631,12 +10663,25 @@
 }
 
 
+static Code::Age EffectiveAge(Code::Age age) {
+  if (age == Code::kNotExecutedCodeAge) {
+    // Treat that's never been executed as old immediately.
+    age = Code::kIsOldCodeAge;
+  } else if (age == Code::kExecutedOnceCodeAge) {
+    // Pre-age code that has only been executed once.
+    age = Code::kPreAgedCodeAge;
+  }
+  return age;
+}
+
+
 void Code::MakeOlder(MarkingParity current_parity) {
   byte* sequence = FindCodeAgeSequence();
   if (sequence != NULL) {
     Age age;
     MarkingParity code_parity;
     GetCodeAgeAndParity(sequence, &age, &code_parity);
+    age = EffectiveAge(age);
     if (age != kLastCodeAge && code_parity != current_parity) {
       PatchPlatformCodeAge(GetIsolate(),
                            sequence,
@@ -10648,8 +10693,7 @@
 
 
 bool Code::IsOld() {
-  Age age = GetAge();
-  return age >= kIsOldCodeAge;
+  return GetAge() >= kIsOldCodeAge;
 }
 
 
@@ -10664,9 +10708,14 @@
 
 
 Code::Age Code::GetAge() {
+  return EffectiveAge(GetRawAge());
+}
+
+
+Code::Age Code::GetRawAge() {
   byte* sequence = FindCodeAgeSequence();
   if (sequence == NULL) {
-    return Code::kNoAgeCodeAge;
+    return kNoAgeCodeAge;
   }
   Age age;
   MarkingParity parity;
@@ -10697,15 +10746,13 @@
 #undef HANDLE_CODE_AGE
   stub = *builtins->MarkCodeAsExecutedOnce();
   if (code == stub) {
-    // Treat that's never been executed as old immediatly.
-    *age = kIsOldCodeAge;
+    *age = kNotExecutedCodeAge;
     *parity = NO_MARKING_PARITY;
     return;
   }
   stub = *builtins->MarkCodeAsExecutedTwice();
   if (code == stub) {
-    // Pre-age code that has only been executed once.
-    *age = kPreAgedCodeAge;
+    *age = kExecutedOnceCodeAge;
     *parity = NO_MARKING_PARITY;
     return;
   }
@@ -14749,8 +14796,8 @@
 }
 
 
-Handle<PropertyCell> GlobalObject::EnsurePropertyCell(
-    Handle<GlobalObject> global,
+Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell(
+    Handle<JSGlobalObject> global,
     Handle<Name> name) {
   ASSERT(!global->HasFastProperties());
   int entry = global->property_dictionary()->FindEntry(*name);
@@ -15138,7 +15185,7 @@
       HashTable<Shape, Key>::Allocate(
           heap,
           at_least_space_for,
-          HashTable<Shape, Key>::USE_DEFAULT_MINIMUM_CAPACITY,
+          USE_DEFAULT_MINIMUM_CAPACITY,
           pretenure);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
@@ -15704,61 +15751,99 @@
 }
 
 
+Handle<ObjectHashSet> ObjectHashSet::EnsureCapacity(
+    Handle<ObjectHashSet> table,
+    int n,
+    Handle<Object> key,
+    PretenureFlag pretenure) {
+  Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table;
+  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
+                     table_base->EnsureCapacity(n, *key, pretenure),
+                     ObjectHashSet);
+}
+
+
+Handle<ObjectHashSet> ObjectHashSet::Shrink(Handle<ObjectHashSet> table,
+                                            Handle<Object> key) {
+  Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table;
+  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
+                     table_base->Shrink(*key),
+                     ObjectHashSet);
+}
+
+
 bool ObjectHashSet::Contains(Object* key) {
   ASSERT(IsKey(key));
 
   // If the object does not have an identity hash, it was never used as a key.
-  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
-    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return false;
-  }
+  Object* hash = key->GetHash();
+  if (hash->IsUndefined()) return false;
+
   return (FindEntry(key) != kNotFound);
 }
 
 
-MaybeObject* ObjectHashSet::Add(Object* key) {
-  ASSERT(IsKey(key));
+Handle<ObjectHashSet> ObjectHashSet::Add(Handle<ObjectHashSet> table,
+                                         Handle<Object> key) {
+  ASSERT(table->IsKey(*key));
 
   // Make sure the key object has an identity hash code.
-  int hash;
-  { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
-    if (maybe_hash->IsFailure()) return maybe_hash;
-    ASSERT(key->GetHash(OMIT_CREATION) == maybe_hash);
-    hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
-  }
-  int entry = FindEntry(key);
+  Handle<Object> object_hash = Object::GetOrCreateHash(key,
+                                                       table->GetIsolate());
+
+  int entry = table->FindEntry(*key);
 
   // Check whether key is already present.
-  if (entry != kNotFound) return this;
+  if (entry != kNotFound) return table;
 
   // Check whether the hash set should be extended and add entry.
-  Object* obj;
-  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
-    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-  }
-  ObjectHashSet* table = ObjectHashSet::cast(obj);
-  entry = table->FindInsertionEntry(hash);
-  table->set(EntryToIndex(entry), key);
-  table->ElementAdded();
-  return table;
+  Handle<ObjectHashSet> new_table =
+      ObjectHashSet::EnsureCapacity(table, 1, key);
+  entry = new_table->FindInsertionEntry(Smi::cast(*object_hash)->value());
+  new_table->set(EntryToIndex(entry), *key);
+  new_table->ElementAdded();
+  return new_table;
 }
 
 
-MaybeObject* ObjectHashSet::Remove(Object* key) {
-  ASSERT(IsKey(key));
+Handle<ObjectHashSet> ObjectHashSet::Remove(Handle<ObjectHashSet> table,
+                                            Handle<Object> key) {
+  ASSERT(table->IsKey(*key));
 
   // If the object does not have an identity hash, it was never used as a key.
-  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
-    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return this;
-  }
-  int entry = FindEntry(key);
+  if (key->GetHash()->IsUndefined()) return table;
+
+  int entry = table->FindEntry(*key);
 
   // Check whether key is actually present.
-  if (entry == kNotFound) return this;
+  if (entry == kNotFound) return table;
 
   // Remove entry and try to shrink this hash set.
-  set_the_hole(EntryToIndex(entry));
-  ElementRemoved();
-  return Shrink(key);
+  table->set_the_hole(EntryToIndex(entry));
+  table->ElementRemoved();
+
+  return ObjectHashSet::Shrink(table, key);
+}
+
+
+Handle<ObjectHashTable> ObjectHashTable::EnsureCapacity(
+    Handle<ObjectHashTable> table,
+    int n,
+    Handle<Object> key,
+    PretenureFlag pretenure) {
+  Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table;
+  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
+                     table_base->EnsureCapacity(n, *key, pretenure),
+                     ObjectHashTable);
+}
+
+
+Handle<ObjectHashTable> ObjectHashTable::Shrink(
+    Handle<ObjectHashTable> table, Handle<Object> key) {
+  Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table;
+  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
+                     table_base->Shrink(*key),
+                     ObjectHashTable);
 }
 
 
@@ -15766,10 +15851,9 @@
   ASSERT(IsKey(key));
 
   // If the object does not have an identity hash, it was never used as a key.
-  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
-    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) {
-      return GetHeap()->the_hole_value();
-    }
+  Object* hash = key->GetHash();
+  if (hash->IsUndefined()) {
+    return GetHeap()->the_hole_value();
   }
   int entry = FindEntry(key);
   if (entry == kNotFound) return GetHeap()->the_hole_value();
@@ -15777,38 +15861,36 @@
 }
 
 
-MaybeObject* ObjectHashTable::Put(Object* key, Object* value) {
-  ASSERT(IsKey(key));
+Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
+                                             Handle<Object> key,
+                                             Handle<Object> value) {
+  ASSERT(table->IsKey(*key));
+
+  Isolate* isolate = table->GetIsolate();
 
   // Make sure the key object has an identity hash code.
-  int hash;
-  { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
-    if (maybe_hash->IsFailure()) return maybe_hash;
-    ASSERT(key->GetHash(OMIT_CREATION) == maybe_hash);
-    hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
-  }
-  int entry = FindEntry(key);
+  Handle<Object> hash = Object::GetOrCreateHash(key, isolate);
+
+  int entry = table->FindEntry(*key);
 
   // Check whether to perform removal operation.
   if (value->IsTheHole()) {
-    if (entry == kNotFound) return this;
-    RemoveEntry(entry);
-    return Shrink(key);
+    if (entry == kNotFound) return table;
+    table->RemoveEntry(entry);
+    return Shrink(table, key);
   }
 
   // Key is already in table, just overwrite value.
   if (entry != kNotFound) {
-    set(EntryToIndex(entry) + 1, value);
-    return this;
+    table->set(EntryToIndex(entry) + 1, *value);
+    return table;
   }
 
   // Check whether the hash table should be extended.
-  Object* obj;
-  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
-    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-  }
-  ObjectHashTable* table = ObjectHashTable::cast(obj);
-  table->AddEntry(table->FindInsertionEntry(hash), key, value);
+  table = EnsureCapacity(table, 1, key);
+  table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()),
+                  *key,
+                  *value);
   return table;
 }
 
diff --git a/src/objects.h b/src/objects.h
index e8c9850..aa91ac1 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -255,13 +255,6 @@
 };
 
 
-// Indicates whether a get method should implicitly create the object looked up.
-enum CreationFlag {
-  ALLOW_CREATION,
-  OMIT_CREATION
-};
-
-
 // Indicates whether transitions can be added to a source map or not.
 enum TransitionFlag {
   INSERT_TRANSITION,
@@ -1509,10 +1502,17 @@
   // Return the object's prototype (might be Heap::null_value()).
   Object* GetPrototype(Isolate* isolate);
 
+  // Returns the permanent hash code associated with this object. May return
+  // undefined if not yet created.
+  Object* GetHash();
+
   // Returns the permanent hash code associated with this object depending on
-  // the actual object type.  Might return a failure in case no hash was
-  // created yet or GC was caused by creation.
-  MUST_USE_RESULT MaybeObject* GetHash(CreationFlag flag);
+  // the actual object type. May create and store a hash code if needed and none
+  // exists.
+  // TODO(rafaelw): Remove isolate parameter when objects.cc is fully
+  // handlified.
+  static Handle<Object> GetOrCreateHash(Handle<Object> object,
+                                        Isolate* isolate);
 
   // Checks whether this object has the same value as the given one.  This
   // function is implemented according to ES5, section 9.12 and can be used
@@ -2003,8 +2003,13 @@
   inline Object* GetConstructor();
 
   // Retrieves a permanent object identity hash code. The undefined value might
-  // be returned in case no hash was created yet and OMIT_CREATION was used.
-  inline MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag);
+  // be returned in case no hash was created yet.
+  inline Object* GetIdentityHash();
+
+  // Retrieves a permanent object identity hash code. May create and store a
+  // hash code if needed and none exists.
+  inline static Handle<Object> GetOrCreateIdentityHash(
+      Handle<JSReceiver> object);
 
   // Lookup a property.  If found, the result is valid and has
   // detailed information.
@@ -2036,6 +2041,9 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSReceiver);
 };
 
+// Forward declaration for JSObject::GetOrCreateHiddenPropertiesHashTable.
+class ObjectHashTable;
+
 // The JSObject describes real heap allocated JavaScript objects with
 // properties.
 // Note that the map of JSObject changes during execution to enable inline
@@ -2185,15 +2193,6 @@
   // or returns an empty handle if such a map is not yet available.
   static Handle<Object> TryMigrateInstance(Handle<JSObject> instance);
 
-  // Can cause GC.
-  MUST_USE_RESULT MaybeObject* SetLocalPropertyIgnoreAttributesTrampoline(
-      Name* key,
-      Object* value,
-      PropertyAttributes attributes,
-      ValueType value_type = OPTIMAL_REPRESENTATION,
-      StoreMode mode = ALLOW_AS_CONSTANT,
-      ExtensibilityCheck extensibility_check = PERFORM_EXTENSIBILITY_CHECK);
-
   // Retrieve a value in a normalized object given a lookup result.
   // Handles the special representation of JS global objects.
   Object* GetNormalizedProperty(LookupResult* result);
@@ -2287,11 +2286,9 @@
 
   // Sets a hidden property on this object. Returns this object if successful,
   // undefined if called on a detached proxy.
-  static Handle<Object> SetHiddenProperty(Handle<JSObject> obj,
+  static Handle<Object> SetHiddenProperty(Handle<JSObject> object,
                                           Handle<Name> key,
                                           Handle<Object> value);
-  // Returns a failure if a GC is required.
-  MUST_USE_RESULT MaybeObject* SetHiddenProperty(Name* key, Object* value);
   // Gets the value of a hidden property with the given key. Returns the hole
   // if the property doesn't exist (or if called on a detached proxy),
   // otherwise returns the value set for the key.
@@ -2303,8 +2300,7 @@
   // Returns true if the object has a property with the hidden string as name.
   bool HasHiddenProperties();
 
-  static int GetIdentityHash(Handle<JSObject> object);
-  static void SetIdentityHash(Handle<JSObject> object, Smi* hash);
+  static void SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash);
 
   inline void ValidateElements();
 
@@ -2858,23 +2854,25 @@
                                  Handle<Object> accessor,
                                  PropertyAttributes attributes);
 
-  enum InitializeHiddenProperties {
-    CREATE_NEW_IF_ABSENT,
-    ONLY_RETURN_INLINE_VALUE
-  };
 
-  // If create_if_absent is true, return the hash table backing store
-  // for hidden properties.  If there is no backing store, allocate one.
-  // If create_if_absent is false, return the hash table backing store
-  // or the inline stored identity hash, whatever is found.
-  MUST_USE_RESULT MaybeObject* GetHiddenPropertiesHashTable(
-      InitializeHiddenProperties init_option);
+  // Return the hash table backing store or the inline stored identity hash,
+  // whatever is found.
+  MUST_USE_RESULT Object* GetHiddenPropertiesHashTable();
+
+  // Return the hash table backing store for hidden properties.  If there is no
+  // backing store, allocate one.
+  static Handle<ObjectHashTable> GetOrCreateHiddenPropertiesHashtable(
+      Handle<JSObject> object);
+
   // Set the hidden property backing store to either a hash table or
   // the inline-stored identity hash.
-  MUST_USE_RESULT MaybeObject* SetHiddenPropertiesHashTable(
-      Object* value);
+  static Handle<Object> SetHiddenPropertiesHashTable(
+      Handle<JSObject> object,
+      Handle<Object> value);
 
-  MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag);
+  MUST_USE_RESULT Object* GetIdentityHash();
+
+  static Handle<Object> GetOrCreateIdentityHash(Handle<JSObject> object);
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject);
 };
@@ -3484,11 +3482,6 @@
 template<typename Shape, typename Key>
 class HashTable: public FixedArray {
  public:
-  enum MinimumCapacity {
-    USE_DEFAULT_MINIMUM_CAPACITY,
-    USE_CUSTOM_MINIMUM_CAPACITY
-  };
-
   // Wrapper methods
   inline uint32_t Hash(Key key) {
     if (Shape::UsesSeed) {
@@ -3599,6 +3592,9 @@
   void Rehash(Key key);
 
  protected:
+  friend class ObjectHashSet;
+  friend class ObjectHashTable;
+
   // Find the entry at which to insert element with the given key that
   // has the given hash value.
   uint32_t FindInsertionEntry(uint32_t hash);
@@ -4062,11 +4058,23 @@
   // Looks up whether the given key is part of this hash set.
   bool Contains(Object* key);
 
+  static Handle<ObjectHashSet> EnsureCapacity(
+      Handle<ObjectHashSet> table,
+      int n,
+      Handle<Object> key,
+      PretenureFlag pretenure = NOT_TENURED);
+
+  // Attempt to shrink hash table after removal of key.
+  static Handle<ObjectHashSet> Shrink(Handle<ObjectHashSet> table,
+                                      Handle<Object> key);
+
   // Adds the given key to this hash set.
-  MUST_USE_RESULT MaybeObject* Add(Object* key);
+  static Handle<ObjectHashSet> Add(Handle<ObjectHashSet> table,
+                                   Handle<Object> key);
 
   // Removes the given key from this hash set.
-  MUST_USE_RESULT MaybeObject* Remove(Object* key);
+  static Handle<ObjectHashSet> Remove(Handle<ObjectHashSet> table,
+                                      Handle<Object> key);
 };
 
 
@@ -4079,13 +4087,25 @@
     return reinterpret_cast<ObjectHashTable*>(obj);
   }
 
+  static Handle<ObjectHashTable> EnsureCapacity(
+      Handle<ObjectHashTable> table,
+      int n,
+      Handle<Object> key,
+      PretenureFlag pretenure = NOT_TENURED);
+
+  // Attempt to shrink hash table after removal of key.
+  static Handle<ObjectHashTable> Shrink(Handle<ObjectHashTable> table,
+                                        Handle<Object> key);
+
   // Looks up the value associated with the given key. The hole value is
   // returned in case the key is not present.
   Object* Lookup(Object* key);
 
   // Adds (or overwrites) the value associated with the given key. Mapping a
   // key to the hole value causes removal of the whole entry.
-  MUST_USE_RESULT MaybeObject* Put(Object* key, Object* value);
+  static Handle<ObjectHashTable> Put(Handle<ObjectHashTable> table,
+                                     Handle<Object> key,
+                                     Handle<Object> value);
 
  private:
   friend class MarkCompactCollector;
@@ -5015,13 +5035,15 @@
   // [deoptimization_data]: Array containing data for deopt.
   DECL_ACCESSORS(deoptimization_data, FixedArray)
 
-  // [type_feedback_info]: This field stores various things, depending on the
-  // kind of the code object.
+  // [raw_type_feedback_info]: This field stores various things, depending on
+  // the kind of the code object.
   //   FUNCTION           => type feedback information.
   //   STUB               => various things, e.g. a SMI
   //   OPTIMIZED_FUNCTION => the next_code_link for optimized code list.
-  DECL_ACCESSORS(type_feedback_info, Object)
-  inline void InitializeTypeFeedbackInfoNoWriteBarrier(Object* value);
+  DECL_ACCESSORS(raw_type_feedback_info, Object)
+  inline Object* type_feedback_info();
+  inline void set_type_feedback_info(
+      Object* value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
   inline int stub_info();
   inline void set_stub_info(int info);
 
@@ -5196,6 +5218,15 @@
 
   void ReplaceNthCell(int n, Cell* replace_with);
 
+  // The entire code object including its header is copied verbatim to the
+  // snapshot so that it can be written in one, fast, memcpy during
+  // deserialization. The deserializer will overwrite some pointers, rather
+  // like a runtime linker, but the random allocation addresses used in the
+  // mksnapshot process would still be present in the unlinked snapshot data,
+  // which would make snapshot production non-reproducible. This method wipes
+  // out the to-be-overwritten header data for reproducible snapshots.
+  inline void WipeOutHeader();
+
   class ExtraICStateStrictMode: public BitField<StrictModeFlag, 0, 1> {};
   class ExtraICStateKeyedAccessStoreMode:
       public BitField<KeyedAccessStoreMode, 1, 4> {};  // NOLINT
@@ -5317,8 +5348,9 @@
     kNoAgeCodeAge = 0,
     CODE_AGE_LIST(DECLARE_CODE_AGE_ENUM)
     kAfterLastCodeAge,
+    kFirstCodeAge = kNotExecutedCodeAge,
     kLastCodeAge = kAfterLastCodeAge - 1,
-    kCodeAgeCount = kAfterLastCodeAge - 1,
+    kCodeAgeCount = kAfterLastCodeAge - kNotExecutedCodeAge - 1,
     kIsOldCodeAge = kSexagenarianCodeAge,
     kPreAgedCodeAge = kIsOldCodeAge - 1
   };
@@ -5334,6 +5366,9 @@
   static bool IsYoungSequence(byte* sequence);
   bool IsOld();
   Age GetAge();
+  // Gets the raw code age, including psuedo code-age values such as
+  // kNotExecutedCodeAge and kExecutedOnceCodeAge.
+  Age GetRawAge();
   static inline Code* GetPreAgedCodeAgeStub(Isolate* isolate) {
     return GetCodeAgeStub(isolate, kNotExecutedCodeAge, NO_MARKING_PARITY);
   }
@@ -7239,6 +7274,7 @@
   inline Map* initial_map();
   inline void set_initial_map(Map* value);
   inline bool has_initial_map();
+  static void EnsureHasInitialMap(Handle<JSFunction> function);
 
   // Get and set the prototype property on a JSFunction. If the
   // function has an initial map the prototype is set on the initial
@@ -7389,10 +7425,6 @@
     return answer;
   }
 
-  // Ensure that the global object has a cell for the given property name.
-  static Handle<PropertyCell> EnsurePropertyCell(Handle<GlobalObject> global,
-                                                 Handle<Name> name);
-
   // Casting.
   static inline GlobalObject* cast(Object* obj);
 
@@ -7414,6 +7446,10 @@
   // Casting.
   static inline JSGlobalObject* cast(Object* obj);
 
+  // Ensure that the global object has a cell for the given property name.
+  static Handle<PropertyCell> EnsurePropertyCell(Handle<JSGlobalObject> global,
+                                                 Handle<Name> name);
+
   // Dispatched behavior.
   DECLARE_PRINTER(JSGlobalObject)
   DECLARE_VERIFIER(JSGlobalObject)
@@ -9357,9 +9393,9 @@
                                                  uint32_t index,
                                                  DeleteMode mode);
 
-  MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag);
-  static Handle<Object> GetIdentityHash(Handle<JSProxy> proxy,
-                                        CreationFlag flag);
+  MUST_USE_RESULT Object* GetIdentityHash();
+
+  static Handle<Object> GetOrCreateIdentityHash(Handle<JSProxy> proxy);
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSProxy);
 };
diff --git a/src/runtime.cc b/src/runtime.cc
index 15cfc85..dd36a53 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -575,32 +575,6 @@
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralShallow) {
-  HandleScope scope(isolate);
-  ASSERT(args.length() == 3);
-  CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0);
-  CONVERT_SMI_ARG_CHECKED(literals_index, 1);
-  CONVERT_ARG_HANDLE_CHECKED(FixedArray, elements, 2);
-
-  Handle<AllocationSite> site = GetLiteralAllocationSite(isolate, literals,
-      literals_index, elements);
-  RETURN_IF_EMPTY_HANDLE(isolate, site);
-
-  JSObject* boilerplate = JSObject::cast(site->transition_info());
-  if (boilerplate->elements()->map() ==
-      isolate->heap()->fixed_cow_array_map()) {
-    isolate->counters()->cow_arrays_created_runtime()->Increment();
-  }
-
-  if (AllocationSite::GetMode(boilerplate->GetElementsKind()) ==
-      TRACK_ALLOCATION_SITE) {
-    return isolate->heap()->CopyJSObject(boilerplate, *site);
-  }
-
-  return isolate->heap()->CopyJSObject(boilerplate);
-}
-
-
 RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateSymbol) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
@@ -1413,7 +1387,7 @@
   CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
   Handle<Object> key(args[1], isolate);
   Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
-  table = ObjectHashSetAdd(table, key);
+  table = ObjectHashSet::Add(table, key);
   holder->set_table(*table);
   return isolate->heap()->undefined_value();
 }
@@ -1435,7 +1409,7 @@
   CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
   Handle<Object> key(args[1], isolate);
   Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
-  table = ObjectHashSetRemove(table, key);
+  table = ObjectHashSet::Remove(table, key);
   holder->set_table(*table);
   return isolate->heap()->undefined_value();
 }
@@ -1490,7 +1464,7 @@
   Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
   Handle<Object> lookup(table->Lookup(*key), isolate);
   Handle<ObjectHashTable> new_table =
-      PutIntoObjectHashTable(table, key, isolate->factory()->the_hole_value());
+      ObjectHashTable::Put(table, key, isolate->factory()->the_hole_value());
   holder->set_table(*new_table);
   return isolate->heap()->ToBoolean(!lookup->IsTheHole());
 }
@@ -1503,7 +1477,7 @@
   CONVERT_ARG_HANDLE_CHECKED(Object, key, 1);
   CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
   Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
-  Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
+  Handle<ObjectHashTable> new_table = ObjectHashTable::Put(table, key, value);
   holder->set_table(*new_table);
   return isolate->heap()->undefined_value();
 }
@@ -1569,7 +1543,7 @@
       weak_collection->table()));
   Handle<Object> lookup(table->Lookup(*key), isolate);
   Handle<ObjectHashTable> new_table =
-      PutIntoObjectHashTable(table, key, isolate->factory()->the_hole_value());
+      ObjectHashTable::Put(table, key, isolate->factory()->the_hole_value());
   weak_collection->set_table(*new_table);
   return isolate->heap()->ToBoolean(!lookup->IsTheHole());
 }
@@ -1583,7 +1557,7 @@
   Handle<Object> value(args[2], isolate);
   Handle<ObjectHashTable> table(
       ObjectHashTable::cast(weak_collection->table()));
-  Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
+  Handle<ObjectHashTable> new_table = ObjectHashTable::Put(table, key, value);
   weak_collection->set_table(*new_table);
   return isolate->heap()->undefined_value();
 }
@@ -3000,30 +2974,28 @@
 
 
 RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSGeneratorObject) {
-  SealHandleScope shs(isolate);
+  HandleScope scope(isolate);
   ASSERT(args.length() == 0);
 
   JavaScriptFrameIterator it(isolate);
   JavaScriptFrame* frame = it.frame();
-  JSFunction* function = frame->function();
+  Handle<JSFunction> function(frame->function());
   RUNTIME_ASSERT(function->shared()->is_generator());
 
-  JSGeneratorObject* generator;
+  Handle<JSGeneratorObject> generator;
   if (frame->IsConstructor()) {
-    generator = JSGeneratorObject::cast(frame->receiver());
+    generator = handle(JSGeneratorObject::cast(frame->receiver()));
   } else {
-    MaybeObject* maybe_generator =
-        isolate->heap()->AllocateJSGeneratorObject(function);
-    if (!maybe_generator->To(&generator)) return maybe_generator;
+    generator = isolate->factory()->NewJSGeneratorObject(function);
   }
-  generator->set_function(function);
+  generator->set_function(*function);
   generator->set_context(Context::cast(frame->context()));
   generator->set_receiver(frame->receiver());
   generator->set_continuation(0);
   generator->set_operand_stack(isolate->heap()->empty_fixed_array());
   generator->set_stack_handler_index(-1);
 
-  return generator;
+  return *generator;
 }
 
 
diff --git a/src/runtime.h b/src/runtime.h
index 1b7e32e..55276f8 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -300,7 +300,6 @@
   F(MaterializeRegExpLiteral, 4, 1)\
   F(CreateObjectLiteral, 4, 1) \
   F(CreateArrayLiteral, 3, 1) \
-  F(CreateArrayLiteralShallow, 3, 1) \
   \
   /* Harmony generators */ \
   F(CreateJSGeneratorObject, 0, 1) \
diff --git a/src/serialize.cc b/src/serialize.cc
index 7ed3666..3cc7053 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -1668,12 +1668,12 @@
 
 
 void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
-  Object** current = rinfo->target_object_address();
+  Object* current = rinfo->target_object();
 
   int skip = OutputRawData(rinfo->target_address_address(),
                            kCanReturnSkipInsteadOfSkipping);
   HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
-  serializer_->SerializeObject(*current, representation, kStartOfObject, skip);
+  serializer_->SerializeObject(current, representation, kStartOfObject, skip);
   bytes_processed_so_far_ += rinfo->target_address_size();
 }
 
@@ -1694,12 +1694,12 @@
   Address references_start = rinfo->target_address_address();
   int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
 
-  Address* current = rinfo->target_reference_address();
+  Address current = rinfo->target_reference();
   int representation = rinfo->IsCodedSpecially() ?
                        kFromCode + kStartOfObject : kPlain + kStartOfObject;
   sink_->Put(kExternalReference + representation, "ExternalRef");
   sink_->PutInt(skip, "SkipB4ExternalRef");
-  int reference_id = serializer_->EncodeExternalReference(*current);
+  int reference_id = serializer_->EncodeExternalReference(current);
   sink_->PutInt(reference_id, "reference id");
   bytes_processed_so_far_ += rinfo->target_address_size();
 }
@@ -1776,10 +1776,29 @@
 }
 
 
+static Code* CloneCodeObject(HeapObject* code) {
+  Address copy = new byte[code->Size()];
+  OS::MemCopy(copy, code->address(), code->Size());
+  return Code::cast(HeapObject::FromAddress(copy));
+}
+
+
+static void WipeOutRelocations(Code* code) {
+  int mode_mask =
+      RelocInfo::kCodeTargetMask |
+      RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+      RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+      RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+  for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
+    it.rinfo()->WipeOut();
+  }
+}
+
+
 int Serializer::ObjectSerializer::OutputRawData(
     Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
   Address object_start = object_->address();
-  Address base = object_start + bytes_processed_so_far_;
+  int base = bytes_processed_so_far_;
   int up_to_offset = static_cast<int>(up_to - object_start);
   int to_skip = up_to_offset - bytes_processed_so_far_;
   int bytes_to_output = to_skip;
@@ -1809,10 +1828,22 @@
       sink_->Put(kRawData, "RawData");
       sink_->PutInt(bytes_to_output, "length");
     }
-    for (int i = 0; i < bytes_to_output; i++) {
-      unsigned int data = base[i];
-      sink_->PutSection(data, "Byte");
+
+    // To make snapshots reproducible, we need to wipe out all pointers in code.
+    if (code_object_) {
+      Code* code = CloneCodeObject(object_);
+      WipeOutRelocations(code);
+      // We need to wipe out the header fields *after* wiping out the
+      // relocations, because some of these fields are needed for the latter.
+      code->WipeOutHeader();
+      object_start = code->address();
     }
+
+    const char* description = code_object_ ? "Code" : "Byte";
+    for (int i = 0; i < bytes_to_output; i++) {
+      sink_->PutSection(object_start[base + i], description);
+    }
+    if (code_object_) delete[] object_start;
   }
   if (to_skip != 0 && return_skip == kIgnoringReturn) {
     sink_->Put(kSkip, "Skip");
diff --git a/src/spaces.cc b/src/spaces.cc
index fe5eeb5..f35db69 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1122,6 +1122,11 @@
 }
 
 
+void PagedSpace::IncreaseCapacity(int size) {
+  accounting_stats_.ExpandSpace(size);
+}
+
+
 void PagedSpace::ReleasePage(Page* page, bool unlink) {
   ASSERT(page->LiveBytes() == 0);
   ASSERT(AreaSize() == page->area_size());
@@ -1511,6 +1516,7 @@
   initial_capacity_ = RoundDown(initial_capacity, Page::kPageSize);
   capacity_ = initial_capacity;
   maximum_capacity_ = RoundDown(maximum_capacity, Page::kPageSize);
+  maximum_committed_ = 0;
   committed_ = false;
   start_ = start;
   address_mask_ = ~(maximum_capacity - 1);
@@ -1543,6 +1549,7 @@
     current = new_page;
   }
 
+  SetCapacity(capacity_);
   committed_ = true;
   Reset();
   return true;
@@ -1591,7 +1598,7 @@
       start_ + capacity_, delta, executable())) {
     return false;
   }
-  capacity_ = new_capacity;
+  SetCapacity(new_capacity);
   NewSpacePage* last_page = anchor()->prev_page();
   ASSERT(last_page != anchor());
   for (int i = pages_before; i < pages_after; i++) {
@@ -1631,7 +1638,7 @@
     ASSERT((current_page_ >= first_page()) && (current_page_ <= new_last_page));
   }
 
-  capacity_ = new_capacity;
+  SetCapacity(new_capacity);
 
   return true;
 }
@@ -1694,6 +1701,14 @@
 }
 
 
+void SemiSpace::SetCapacity(int new_capacity) {
+  capacity_ = new_capacity;
+  if (capacity_ > maximum_committed_) {
+    maximum_committed_ = capacity_;
+  }
+}
+
+
 void SemiSpace::set_age_mark(Address mark) {
   ASSERT(NewSpacePage::FromLimit(mark)->semi_space() == this);
   age_mark_ = mark;
@@ -2938,6 +2953,7 @@
 bool LargeObjectSpace::SetUp() {
   first_page_ = NULL;
   size_ = 0;
+  maximum_committed_ = 0;
   page_count_ = 0;
   objects_size_ = 0;
   chunk_map_.Clear();
@@ -2984,6 +3000,10 @@
   page->set_next_page(first_page_);
   first_page_ = page;
 
+  if (size_ > maximum_committed_) {
+    maximum_committed_ = size_;
+  }
+
   // Register all MemoryChunk::kAlignment-aligned chunks covered by
   // this large page in the chunk map.
   uintptr_t base = reinterpret_cast<uintptr_t>(page) / MemoryChunk::kAlignment;
diff --git a/src/spaces.h b/src/spaces.h
index 2cd92c5..db0415b 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -1388,6 +1388,7 @@
   // Zero out all the allocation statistics (i.e., no capacity).
   void Clear() {
     capacity_ = 0;
+    max_capacity_ = 0;
     size_ = 0;
     waste_ = 0;
   }
@@ -1406,6 +1407,7 @@
 
   // Accessors for the allocation statistics.
   intptr_t Capacity() { return capacity_; }
+  intptr_t MaxCapacity() { return max_capacity_; }
   intptr_t Size() { return size_; }
   intptr_t Waste() { return waste_; }
 
@@ -1415,6 +1417,9 @@
   void ExpandSpace(int size_in_bytes) {
     capacity_ += size_in_bytes;
     size_ += size_in_bytes;
+    if (capacity_ > max_capacity_) {
+      max_capacity_ = capacity_;
+    }
     ASSERT(size_ >= 0);
   }
 
@@ -1448,6 +1453,7 @@
 
  private:
   intptr_t capacity_;
+  intptr_t max_capacity_;
   intptr_t size_;
   intptr_t waste_;
 };
@@ -1689,6 +1695,9 @@
   // spaces this equals the capacity.
   intptr_t CommittedMemory() { return Capacity(); }
 
+  // The maximum amount of memory ever committed for this space.
+  intptr_t MaximumCommittedMemory() { return accounting_stats_.MaxCapacity(); }
+
   // Approximate amount of physical memory committed for this space.
   size_t CommittedPhysicalMemory();
 
@@ -1795,9 +1804,7 @@
     accounting_stats_.AllocateBytes(bytes);
   }
 
-  void IncreaseCapacity(int size) {
-    accounting_stats_.ExpandSpace(size);
-  }
+  void IncreaseCapacity(int size);
 
   // Releases an unused page and shrinks the space.
   void ReleasePage(Page* page, bool unlink);
@@ -2207,6 +2214,9 @@
 
   static void Swap(SemiSpace* from, SemiSpace* to);
 
+  // Returns the maximum amount of memory ever committed by the semi space.
+  size_t MaximumCommittedMemory() { return maximum_committed_; }
+
   // Approximate amount of physical memory committed for this space.
   size_t CommittedPhysicalMemory();
 
@@ -2215,6 +2225,9 @@
   // Copies the flags into the masked positions on all pages in the space.
   void FlipPages(intptr_t flags, intptr_t flag_mask);
 
+  // Updates Capacity and MaximumCommitted based on new capacity.
+  void SetCapacity(int new_capacity);
+
   NewSpacePage* anchor() { return &anchor_; }
 
   // The current and maximum capacity of the space.
@@ -2222,6 +2235,8 @@
   int maximum_capacity_;
   int initial_capacity_;
 
+  intptr_t maximum_committed_;
+
   // The start address of the space.
   Address start_;
   // Used to govern object promotion during mark-compact collection.
@@ -2407,6 +2422,12 @@
     return Capacity();
   }
 
+  // Return the total amount of memory committed for new space.
+  intptr_t MaximumCommittedMemory() {
+    return to_space_.MaximumCommittedMemory() +
+        from_space_.MaximumCommittedMemory();
+  }
+
   // Approximate amount of physical memory committed for this space.
   size_t CommittedPhysicalMemory();
 
@@ -2802,6 +2823,10 @@
     return objects_size_;
   }
 
+  intptr_t MaximumCommittedMemory() {
+    return maximum_committed_;
+  }
+
   intptr_t CommittedMemory() {
     return Size();
   }
@@ -2853,6 +2878,7 @@
 
  private:
   intptr_t max_capacity_;
+  intptr_t maximum_committed_;
   // The head of the linked list of large object chunks.
   LargePage* first_page_;
   intptr_t size_;  // allocated bytes
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 67002a3..1ec00d4 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -177,12 +177,12 @@
   Handle<Name> cache_name = factory()->empty_string();
   Handle<JSObject> current;
   Handle<Object> next = receiver;
-  Handle<GlobalObject> global;
+  Handle<JSGlobalObject> global;
   do {
     current = Handle<JSObject>::cast(next);
     next = Handle<Object>(current->GetPrototype(), isolate_);
-    if (current->IsGlobalObject()) {
-      global = Handle<GlobalObject>::cast(current);
+    if (current->IsJSGlobalObject()) {
+      global = Handle<JSGlobalObject>::cast(current);
       cache_name = name;
     } else if (!current->HasFastProperties()) {
       cache_name = name;
@@ -1208,6 +1208,40 @@
 }
 
 
+void LoadStubCompiler::NonexistentHandlerFrontend(
+    Handle<JSObject> object,
+    Handle<JSObject> last,
+    Handle<Name> name,
+    Label* success,
+    Handle<JSGlobalObject> global) {
+  Label miss;
+
+  Register holder =
+      HandlerFrontendHeader(object, receiver(), last, name, &miss);
+
+  if (!last->HasFastProperties() &&
+      !last->IsJSGlobalObject() &&
+      !last->IsJSGlobalProxy()) {
+    if (!name->IsUniqueName()) {
+      ASSERT(name->IsString());
+      name = factory()->InternalizeString(Handle<String>::cast(name));
+    }
+    ASSERT(last->property_dictionary()->FindEntry(*name) ==
+        NameDictionary::kNotFound);
+    GenerateDictionaryNegativeLookup(masm(), &miss, holder, name,
+                                     scratch2(), scratch3());
+  }
+
+  // If the last object in the prototype chain is a global object,
+  // check that the global property cell is empty.
+  if (!global.is_null()) {
+    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
+  }
+
+  HandlerFrontendFooter(name, success, &miss);
+}
+
+
 Handle<Code> LoadStubCompiler::CompileLoadField(
     Handle<JSObject> object,
     Handle<JSObject> holder,
diff --git a/src/stub-cache.h b/src/stub-cache.h
index 38bc7a3..42685b2 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -434,6 +434,18 @@
                                                   int index,
                                                   Register prototype);
 
+  // Helper function used to check that the dictionary doesn't contain
+  // the property. This function may return false negatives, so miss_label
+  // must always call a backup property check that is complete.
+  // This function is safe to call if the receiver has fast properties.
+  // Name must be unique and receiver must be a heap object.
+  static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                               Label* miss_label,
+                                               Register receiver,
+                                               Handle<Name> name,
+                                               Register r0,
+                                               Register r1);
+
   // Generates prototype loading code that uses the objects from the
   // context we were in when this function was called. If the context
   // has changed, a jump to miss is performed. This ties the generated
@@ -469,6 +481,24 @@
                                             Register scratch2,
                                             Label* miss_label);
 
+  // Generate code to check that a global property cell is empty. Create
+  // the property cell at compilation time if no cell exists for the
+  // property.
+  static void GenerateCheckPropertyCell(MacroAssembler* masm,
+                                        Handle<JSGlobalObject> global,
+                                        Handle<Name> name,
+                                        Register scratch,
+                                        Label* miss);
+
+  // Calls GenerateCheckPropertyCell for each global object in the prototype
+  // chain from object to (but not including) holder.
+  static void GenerateCheckPropertyCells(MacroAssembler* masm,
+                                         Handle<JSObject> object,
+                                         Handle<JSObject> holder,
+                                         Handle<Name> name,
+                                         Register scratch,
+                                         Label* miss);
+
   static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
 
   // Generates code that verifies that the property holder has not changed
@@ -673,7 +703,7 @@
   Handle<Code> CompileLoadNonexistent(Handle<JSObject> object,
                                       Handle<JSObject> last,
                                       Handle<Name> name,
-                                      Handle<GlobalObject> global);
+                                      Handle<JSGlobalObject> global);
 
   Handle<Code> CompileLoadGlobal(Handle<JSObject> object,
                                  Handle<GlobalObject> holder,
@@ -704,7 +734,7 @@
                                   Handle<JSObject> last,
                                   Handle<Name> name,
                                   Label* success,
-                                  Handle<GlobalObject> global);
+                                  Handle<JSGlobalObject> global);
 
   void GenerateLoadField(Register reg,
                          Handle<JSObject> holder,
diff --git a/src/typedarray.js b/src/typedarray.js
index 1e67bc3..f2b5d2d 100644
--- a/src/typedarray.js
+++ b/src/typedarray.js
@@ -34,59 +34,71 @@
 
 
 // --------------- Typed Arrays ---------------------
+macro TYPED_ARRAYS(FUNCTION)
+// arrayIds below should be synchronized with Runtime_TypedArrayInitialize.
+FUNCTION(1, Uint8Array, 1)
+FUNCTION(2, Int8Array, 1)
+FUNCTION(3, Uint16Array, 2)
+FUNCTION(4, Int16Array, 2)
+FUNCTION(5, Uint32Array, 4)
+FUNCTION(6, Int32Array, 4)
+FUNCTION(7, Float32Array, 4)
+FUNCTION(8, Float64Array, 8)
+FUNCTION(9, Uint8ClampedArray, 1)
+endmacro
 
-function CreateTypedArrayConstructor(name, elementSize, arrayId, constructor) {
-  function ConstructByArrayBuffer(obj, buffer, byteOffset, length) {
-    var offset = ToPositiveInteger(byteOffset, "invalid_typed_array_length")
+macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE)
+  function NAMEConstructor(arg1, arg2, arg3) {
+    function ConstructByArrayBuffer(obj, buffer, byteOffset, length) {
+      var offset = ToPositiveInteger(byteOffset, "invalid_typed_array_length")
 
-    if (offset % elementSize !== 0) {
-      throw MakeRangeError("invalid_typed_array_alignment",
-          "start offset", name, elementSize);
-    }
-    var bufferByteLength = %ArrayBufferGetByteLength(buffer);
-    if (offset > bufferByteLength) {
-      throw MakeRangeError("invalid_typed_array_offset");
-    }
-
-    var newByteLength;
-    var newLength;
-    if (IS_UNDEFINED(length)) {
-      if (bufferByteLength % elementSize !== 0) {
+      if (offset % ELEMENT_SIZE !== 0) {
         throw MakeRangeError("invalid_typed_array_alignment",
-          "byte length", name, elementSize);
+            "start offset", "NAME", ELEMENT_SIZE);
       }
-      newByteLength = bufferByteLength - offset;
-      newLength = newByteLength / elementSize;
-    } else {
-      var newLength = ToPositiveInteger(length, "invalid_typed_array_length");
-      newByteLength = newLength * elementSize;
-    }
-    if (offset + newByteLength > bufferByteLength) {
-      throw MakeRangeError("invalid_typed_array_length");
-    }
-    %TypedArrayInitialize(obj, arrayId, buffer, offset, newByteLength);
-  }
+      var bufferByteLength = %ArrayBufferGetByteLength(buffer);
+      if (offset > bufferByteLength) {
+        throw MakeRangeError("invalid_typed_array_offset");
+      }
 
-  function ConstructByLength(obj, length) {
-    var l = ToPositiveInteger(length, "invalid_typed_array_length");
-    var byteLength = l * elementSize;
-    var buffer = new $ArrayBuffer(byteLength);
-    %TypedArrayInitialize(obj, arrayId, buffer, 0, byteLength);
-  }
+      var newByteLength;
+      var newLength;
+      if (IS_UNDEFINED(length)) {
+        if (bufferByteLength % ELEMENT_SIZE !== 0) {
+          throw MakeRangeError("invalid_typed_array_alignment",
+            "byte length", "NAME", ELEMENT_SIZE);
+        }
+        newByteLength = bufferByteLength - offset;
+        newLength = newByteLength / ELEMENT_SIZE;
+      } else {
+        var newLength = ToPositiveInteger(length, "invalid_typed_array_length");
+        newByteLength = newLength * ELEMENT_SIZE;
+      }
+      if (offset + newByteLength > bufferByteLength) {
+        throw MakeRangeError("invalid_typed_array_length");
+      }
+      %TypedArrayInitialize(obj, ARRAY_ID, buffer, offset, newByteLength);
+    }
 
-  function ConstructByArrayLike(obj, arrayLike) {
-    var length = arrayLike.length;
-    var l = ToPositiveInteger(length, "invalid_typed_array_length");
-    if(!%TypedArrayInitializeFromArrayLike(obj, arrayId, arrayLike, l)) {
-      for (var i = 0; i < l; i++) {
-        // It is crucial that we let any execptions from arrayLike[i]
-        // propagate outside the function.
-        obj[i] = arrayLike[i];
+    function ConstructByLength(obj, length) {
+      var l = ToPositiveInteger(length, "invalid_typed_array_length");
+      var byteLength = l * ELEMENT_SIZE;
+      var buffer = new $ArrayBuffer(byteLength);
+      %TypedArrayInitialize(obj, ARRAY_ID, buffer, 0, byteLength);
+    }
+
+    function ConstructByArrayLike(obj, arrayLike) {
+      var length = arrayLike.length;
+      var l = ToPositiveInteger(length, "invalid_typed_array_length");
+      if(!%TypedArrayInitializeFromArrayLike(obj, ARRAY_ID, arrayLike, l)) {
+        for (var i = 0; i < l; i++) {
+          // It is crucial that we let any execptions from arrayLike[i]
+          // propagate outside the function.
+          obj[i] = arrayLike[i];
+        }
       }
     }
-  }
 
-  return function (arg1, arg2, arg3) {
     if (%_IsConstructCall()) {
       if (IS_ARRAYBUFFER(arg1)) {
         ConstructByArrayBuffer(this, arg1, arg2, arg3);
@@ -97,10 +109,12 @@
         ConstructByArrayLike(this, arg1);
       }
     } else {
-      throw MakeTypeError("constructor_not_function", [name])
+      throw MakeTypeError("constructor_not_function", ["NAME"])
     }
   }
-}
+endmacro
+
+TYPED_ARRAYS(TYPED_ARRAY_CONSTRUCTOR)
 
 function TypedArrayGetBuffer() {
   return %TypedArrayGetBuffer(this);
@@ -247,10 +261,8 @@
 
 // -------------------------------------------------------------------
 
-function SetupTypedArray(arrayId, name, constructor, elementSize) {
+function SetupTypedArray(constructor, fun, elementSize) {
   %CheckIsBootstrapping();
-  var fun = CreateTypedArrayConstructor(name, elementSize,
-                                        arrayId, constructor);
   %SetCode(constructor, fun);
   %FunctionSetPrototype(constructor, new $Object());
 
@@ -272,17 +284,12 @@
   ));
 }
 
-// arrayIds below should be synchronized with Runtime_TypedArrayInitialize.
-SetupTypedArray(1, "Uint8Array", global.Uint8Array, 1);
-SetupTypedArray(2, "Int8Array", global.Int8Array, 1);
-SetupTypedArray(3, "Uint16Array", global.Uint16Array, 2);
-SetupTypedArray(4, "Int16Array", global.Int16Array, 2);
-SetupTypedArray(5, "Uint32Array", global.Uint32Array, 4);
-SetupTypedArray(6, "Int32Array", global.Int32Array, 4);
-SetupTypedArray(7, "Float32Array", global.Float32Array, 4);
-SetupTypedArray(8, "Float64Array", global.Float64Array, 8);
-SetupTypedArray(9, "Uint8ClampedArray", global.Uint8ClampedArray, 1);
 
+macro SETUP_TYPED_ARRAY(ARRAY_ID, NAME, ELEMENT_SIZE)
+  SetupTypedArray (global.NAME, NAMEConstructor, ELEMENT_SIZE);
+endmacro
+
+TYPED_ARRAYS(SETUP_TYPED_ARRAY)
 
 // --------------------------- DataView -----------------------------
 
diff --git a/src/v8-counters.cc b/src/v8-counters.cc
index a0c3ebd..c899b28 100644
--- a/src/v8-counters.cc
+++ b/src/v8-counters.cc
@@ -82,7 +82,7 @@
         StatsCounter(isolate, "c:" "V8.CountOf_CODE_AGE-" #name); \
     size_of_CODE_AGE_##name##_ = \
         StatsCounter(isolate, "c:" "V8.SizeOf_CODE_AGE-" #name);
-    CODE_AGE_LIST_WITH_NO_AGE(SC)
+    CODE_AGE_LIST_COMPLETE(SC)
 #undef SC
 }
 
diff --git a/src/v8-counters.h b/src/v8-counters.h
index 476021c..55e1e5c 100644
--- a/src/v8-counters.h
+++ b/src/v8-counters.h
@@ -101,6 +101,8 @@
      V8.MemoryHeapSamplePropertyCellSpaceCommitted)                   \
   HM(heap_sample_code_space_committed,                                \
      V8.MemoryHeapSampleCodeSpaceCommitted)                           \
+  HM(heap_sample_maximum_committed,                                   \
+     V8.MemoryHeapSampleMaximumCommitted)                             \
 
 
 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
@@ -341,7 +343,7 @@
     { return &count_of_CODE_AGE_##name##_; } \
   StatsCounter* size_of_CODE_AGE_##name() \
     { return &size_of_CODE_AGE_##name##_; }
-  CODE_AGE_LIST_WITH_NO_AGE(SC)
+  CODE_AGE_LIST_COMPLETE(SC)
 #undef SC
 
   enum Id {
@@ -371,7 +373,7 @@
 #undef COUNTER_ID
 #define COUNTER_ID(name) kCountOfCODE_AGE__##name, \
     kSizeOfCODE_AGE__##name,
-    CODE_AGE_LIST_WITH_NO_AGE(COUNTER_ID)
+    CODE_AGE_LIST_COMPLETE(COUNTER_ID)
 #undef COUNTER_ID
     stats_counter_count
   };
@@ -421,7 +423,7 @@
 #define SC(name) \
   StatsCounter size_of_CODE_AGE_##name##_; \
   StatsCounter count_of_CODE_AGE_##name##_;
-  CODE_AGE_LIST_WITH_NO_AGE(SC)
+  CODE_AGE_LIST_COMPLETE(SC)
 #undef SC
 
   friend class Isolate;
diff --git a/src/v8globals.h b/src/v8globals.h
index 7fa2fd6..4910cb7 100644
--- a/src/v8globals.h
+++ b/src/v8globals.h
@@ -199,6 +199,11 @@
 // allows).
 enum PretenureFlag { NOT_TENURED, TENURED };
 
+enum MinimumCapacity {
+  USE_DEFAULT_MINIMUM_CAPACITY,
+  USE_CUSTOM_MINIMUM_CAPACITY
+};
+
 enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
 
 enum Executability { NOT_EXECUTABLE, EXECUTABLE };
diff --git a/src/v8natives.js b/src/v8natives.js
index c42d5c4..995e7d8 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -1249,7 +1249,7 @@
     throw MakeTypeError("called_on_non_object", ["Object.freeze"]);
   }
   var isProxy = %IsJSProxy(obj);
-  if (isProxy || %HasNonStrictArgumentsElements(obj)) {
+  if (isProxy || %HasNonStrictArgumentsElements(obj) || %IsObserved(obj)) {
     if (isProxy) {
       ProxyFix(obj);
     }
diff --git a/src/version.cc b/src/version.cc
index 6d4efa2..83c0852 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -33,8 +33,8 @@
 // NOTE these macros are used by some of the tool scripts and the build
 // system so their names cannot be changed without changing the scripts.
 #define MAJOR_VERSION     3
-#define MINOR_VERSION     22
-#define BUILD_NUMBER      24
+#define MINOR_VERSION     23
+#define BUILD_NUMBER      0
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index afac886..073fcbe 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -303,15 +303,9 @@
 }
 
 
-Object** RelocInfo::target_object_address() {
-  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
-  return reinterpret_cast<Object**>(pc_);
-}
-
-
-Address* RelocInfo::target_reference_address() {
+Address RelocInfo::target_reference() {
   ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
-  return reinterpret_cast<Address*>(pc_);
+  return Memory::Address_at(pc_);
 }
 
 
@@ -370,6 +364,18 @@
 }
 
 
+void RelocInfo::WipeOut() {
+  if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_)) {
+    Memory::Address_at(pc_) = NULL;
+  } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
+    // Effectively write zero into the relocation.
+    Assembler::set_target_address_at(pc_, pc_ + sizeof(int32_t));
+  } else {
+    UNREACHABLE();
+  }
+}
+
+
 bool RelocInfo::IsPatchedReturnSequence() {
   // The recognized call sequence is:
   //  movq(kScratchRegister, address); call(kScratchRegister);
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index dcb9fa5..99e2ad4 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -2487,6 +2487,24 @@
 }
 
 
+void Assembler::orps(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0x56);
+  emit_sse_operand(dst, src);
+}
+
+
+void Assembler::xorps(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0x57);
+  emit_sse_operand(dst, src);
+}
+
+
 // SSE 2 operations.
 
 void Assembler::movd(XMMRegister dst, Register src) {
@@ -2918,15 +2936,6 @@
 }
 
 
-void Assembler::xorps(XMMRegister dst, XMMRegister src) {
-  EnsureSpace ensure_space(this);
-  emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0x57);
-  emit_sse_operand(dst, src);
-}
-
-
 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
   EnsureSpace ensure_space(this);
   emit(0xF2);
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 508c622..584b3a5 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -1355,8 +1355,9 @@
   void cvttss2si(Register dst, XMMRegister src);
   void cvtlsi2ss(XMMRegister dst, Register src);
 
-  void xorps(XMMRegister dst, XMMRegister src);
   void andps(XMMRegister dst, XMMRegister src);
+  void orps(XMMRegister dst, XMMRegister src);
+  void xorps(XMMRegister dst, XMMRegister src);
 
   void movmskps(Register dst, XMMRegister src);
 
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index b3ab8c1..93bc2b9 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -78,7 +78,7 @@
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
   descriptor->deoptimization_handler_ =
-      Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
+      Runtime::FunctionForId(Runtime::kCreateArrayLiteral)->entry;
 }
 
 
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 7735b55..70d410d 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -1261,12 +1261,19 @@
     current += PrintOperands(idesc.mnem, idesc.op_order_, current);
 
   } else if (opcode == 0x54) {
-    // xorps xmm, xmm/m128
+    // andps xmm, xmm/m128
     int mod, regop, rm;
     get_modrm(*current, &mod, &regop, &rm);
     AppendToBuffer("andps %s,", NameOfXMMRegister(regop));
     current += PrintRightXMMOperand(current);
 
+  } else if (opcode == 0x56) {
+    // orps xmm, xmm/m128
+    int mod, regop, rm;
+    get_modrm(*current, &mod, &regop, &rm);
+    AppendToBuffer("orps %s,", NameOfXMMRegister(regop));
+    current += PrintRightXMMOperand(current);
+
   } else if (opcode == 0x57) {
     // xorps xmm, xmm/m128
     int mod, regop, rm;
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 02ba67b..4a99287 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -1751,19 +1751,14 @@
         DONT_TRACK_ALLOCATION_SITE,
         length);
     __ CallStub(&stub);
-  } else if (expr->depth() > 1) {
+  } else if (expr->depth() > 1 ||
+             Serializer::enabled() ||
+             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
     __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
     __ Push(Smi::FromInt(expr->literal_index()));
     __ Push(constant_elements);
     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
-  } else if (Serializer::enabled() ||
-      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
-    __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
-    __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
-    __ Push(Smi::FromInt(expr->literal_index()));
-    __ Push(constant_elements);
-    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
            FLAG_smi_only_arrays);
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 7c70094..5c6a580 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -1771,7 +1771,7 @@
     __ j(not_equal, &return_left, Label::kNear);  // left == right != 0.
     // At this point, both left and right are either 0 or -0.
     if (operation == HMathMinMax::kMathMin) {
-      __ orpd(left_reg, right_reg);
+      __ orps(left_reg, right_reg);
     } else {
       // Since we operate on +0 and/or -0, addsd and andsd have the same effect.
       __ addsd(left_reg, right_reg);
@@ -1782,7 +1782,7 @@
     __ ucomisd(left_reg, left_reg);  // NaN check.
     __ j(parity_even, &return_left, Label::kNear);
     __ bind(&return_right);
-    __ movsd(left_reg, right_reg);
+    __ movaps(left_reg, right_reg);
 
     __ bind(&return_left);
   }
diff --git a/src/x64/lithium-gap-resolver-x64.cc b/src/x64/lithium-gap-resolver-x64.cc
index 8d1c2a2..01cfb12 100644
--- a/src/x64/lithium-gap-resolver-x64.cc
+++ b/src/x64/lithium-gap-resolver-x64.cc
@@ -305,7 +305,7 @@
     Operand other_operand = cgen_->ToOperand(other);
     __ movsd(xmm0, other_operand);
     __ movsd(other_operand, reg);
-    __ movsd(reg, xmm0);
+    __ movaps(reg, xmm0);
 
   } else {
     // No other combinations are possible.
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 2a0c367..28e2a89 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -107,38 +107,34 @@
 }
 
 
-// Helper function used to check that the dictionary doesn't contain
-// the property. This function may return false negatives, so miss_label
-// must always call a backup property check that is complete.
-// This function is safe to call if the receiver has fast properties.
-// Name must be unique and receiver must be a heap object.
-static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
-                                             Label* miss_label,
-                                             Register receiver,
-                                             Handle<Name> name,
-                                             Register r0,
-                                             Register r1) {
+void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                                    Label* miss_label,
+                                                    Register receiver,
+                                                    Handle<Name> name,
+                                                    Register scratch0,
+                                                    Register scratch1) {
   ASSERT(name->IsUniqueName());
+  ASSERT(!receiver.is(scratch0));
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->negative_lookups(), 1);
   __ IncrementCounter(counters->negative_lookups_miss(), 1);
 
-  __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
+  __ movq(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
 
   const int kInterceptorOrAccessCheckNeededMask =
       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
 
   // Bail out if the receiver has a named interceptor or requires access checks.
-  __ testb(FieldOperand(r0, Map::kBitFieldOffset),
+  __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
            Immediate(kInterceptorOrAccessCheckNeededMask));
   __ j(not_zero, miss_label);
 
   // Check that receiver is a JSObject.
-  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
+  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
   __ j(below, miss_label);
 
   // Load properties array.
-  Register properties = r0;
+  Register properties = scratch0;
   __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
 
   // Check that the properties array is a dictionary.
@@ -152,7 +148,7 @@
                                                    &done,
                                                    properties,
                                                    name,
-                                                   r1);
+                                                   scratch1);
   __ bind(&done);
   __ DecrementCounter(counters->negative_lookups_miss(), 1);
 }
@@ -777,16 +773,13 @@
 }
 
 
-// Generate code to check that a global property cell is empty. Create
-// the property cell at compilation time if no cell exists for the
-// property.
-static void GenerateCheckPropertyCell(MacroAssembler* masm,
-                                      Handle<GlobalObject> global,
-                                      Handle<Name> name,
-                                      Register scratch,
-                                      Label* miss) {
+void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
+                                             Handle<JSGlobalObject> global,
+                                             Handle<Name> name,
+                                             Register scratch,
+                                             Label* miss) {
   Handle<PropertyCell> cell =
-      GlobalObject::EnsurePropertyCell(global, name);
+      JSGlobalObject::EnsurePropertyCell(global, name);
   ASSERT(cell->value()->IsTheHole());
   __ Move(scratch, cell);
   __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
@@ -803,7 +796,7 @@
     Label* miss) {
   if (holder->IsJSGlobalObject()) {
     GenerateCheckPropertyCell(
-        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
+        masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
     GenerateDictionaryNegativeLookup(
         masm, miss, holder_reg, name, scratch1(), scratch2());
@@ -1054,19 +1047,17 @@
 }
 
 
-// Calls GenerateCheckPropertyCell for each global object in the prototype chain
-// from object to (but not including) holder.
-static void GenerateCheckPropertyCells(MacroAssembler* masm,
-                                       Handle<JSObject> object,
-                                       Handle<JSObject> holder,
-                                       Handle<Name> name,
-                                       Register scratch,
-                                       Label* miss) {
+void StubCompiler::GenerateCheckPropertyCells(MacroAssembler* masm,
+                                              Handle<JSObject> object,
+                                              Handle<JSObject> holder,
+                                              Handle<Name> name,
+                                              Register scratch,
+                                              Label* miss) {
   Handle<JSObject> current = object;
   while (!current.is_identical_to(holder)) {
-    if (current->IsGlobalObject()) {
+    if (current->IsJSGlobalObject()) {
       GenerateCheckPropertyCell(masm,
-                                Handle<GlobalObject>::cast(current),
+                                Handle<JSGlobalObject>::cast(current),
                                 name,
                                 scratch,
                                 miss);
@@ -1282,30 +1273,10 @@
 }
 
 
-void LoadStubCompiler::NonexistentHandlerFrontend(
-    Handle<JSObject> object,
-    Handle<JSObject> last,
-    Handle<Name> name,
-    Label* success,
-    Handle<GlobalObject> global) {
-  Label miss;
-
-  HandlerFrontendHeader(object, receiver(), last, name, &miss);
-
-  // If the last object in the prototype chain is a global object,
-  // check that the global property cell is empty.
-  if (!global.is_null()) {
-    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
-  }
-
-  HandlerFrontendFooter(name, success, &miss);
-}
-
-
 void LoadStubCompiler::GenerateLoadField(Register reg,
-                                             Handle<JSObject> holder,
-                                             PropertyIndex field,
-                                             Representation representation) {
+                                         Handle<JSObject> holder,
+                                         PropertyIndex field,
+                                         Representation representation) {
   if (!reg.is(receiver())) __ movq(receiver(), reg);
   if (kind() == Code::LOAD_IC) {
     LoadFieldStub stub(field.is_inobject(holder),
@@ -2957,7 +2928,7 @@
     Handle<JSObject> object,
     Handle<JSObject> last,
     Handle<Name> name,
-    Handle<GlobalObject> global) {
+    Handle<JSGlobalObject> global) {
   Label success;
 
   NonexistentHandlerFrontend(object, last, name, &success, global);