Version 3.7.1

Achieved 33% speedup in debug-mode tests.

Removed special casing of calls to RegExp test and exec methods with no
argument.  Now matches new JSC behaviour.  crbug.com/75740.

Return the empty string on cyclic references in toString (ES5
conformance).

Fixed bug triggered by JSBeautifier.  crbug.com/100409.

Made Math.random state per-context instead of per-process (issue 864).

Fixed stack traces to skip native functions.

Make snapshots (new contexts) smaller and faster.

Fixed handling of Function.apply for non-array arguments.

Fixed evaluation order in defineProperties to match FireFox.

Fixed handling of non-object receivers for array builtins,
crbug.com/100702.

Multiple fixes to improve compliance with test262.

Fixed compatibility with older Android releases.

Fixed compilation with gcc-4.5.3.

Improved performance of WriteUtf8, issue 1665.

Made native syntax an early error in the preparser.

Fixed issues 793 and 893 relating to Function.prototype.bind.

Improved let, const, Set and Map support and other Harmony features
(behind the --harmony flag).

Changed evaluation order for > and <= to match ES5 instead of ES3.

Bug fixes and performance improvements on all platforms.
Review URL: http://codereview.chromium.org/8404030

git-svn-id: http://v8.googlecode.com/svn/trunk@9809 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index a95f3cc..874fcba 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,48 @@
+2011-10-26: Version 3.7.1
+
+        Achieved 33% speedup in debug-mode tests.
+
+        Removed special casing of calls to RegExp test and exec methods with no
+        argument.  Now matches new JSC behaviour.  crbug.com/75740.
+
+        Return the empty string on cyclic references in toString (ES5
+        conformance).
+
+        Fixed bug triggered by JSBeautifier.  crbug.com/100409.
+
+        Made Math.random state per-context instead of per-process (issue 864).
+
+        Fixed stack traces to skip native functions.
+
+        Make snapshots (new contexts) smaller and faster.
+
+        Fixed handling of Function.apply for non-array arguments.
+
+        Fixed evaluation order in defineProperties to match FireFox.
+
+        Fixed handling of non-object receivers for array builtins,
+        crbug.com/100702.
+
+        Multiple fixes to improve compliance with test262.
+
+        Fixed compatibility with older Android releases.
+
+        Fixed compilation with gcc-4.5.3.
+
+        Improved performance of WriteUtf8, issue 1665.
+
+        Made native syntax an early error in the preparser.
+
+        Fixed issues 793 and 893 relating to Function.prototype.bind.
+
+        Improved let, const, Set and Map support and other Harmony features
+        (behind the --harmony flag).
+
+        Changed evaluation order for > and <= to match ES5 instead of ES3.
+
+        Bug fixes and performance improvements on all platforms.
+
+
 2011-10-13: Version 3.7.0
 
         Fixed array handling for Object.defineOwnProperty (ES5 conformance).
diff --git a/preparser/preparser-process.cc b/preparser/preparser-process.cc
index e67851c..b0aeb81 100644
--- a/preparser/preparser-process.cc
+++ b/preparser/preparser-process.cc
@@ -267,34 +267,22 @@
 
 
 ExceptionExpectation ParseExpectation(int argc, const char* argv[]) {
+  // Parse ["throws" [<exn-type> [<start> [<end>]]]].
   ExceptionExpectation expects;
-
-  // Parse exception expectations from (the remainder of) the command line.
   int arg_index = 0;
-  // Skip any flags.
-  while (argc > arg_index && IsFlag(argv[arg_index])) arg_index++;
+  while (argc > arg_index && strncmp("throws", argv[arg_index], 7)) {
+    arg_index++;
+  }
   if (argc > arg_index) {
-    if (strncmp("throws", argv[arg_index], 7)) {
-      // First argument after filename, if present, must be the verbatim
-      // "throws", marking that the preparsing should fail with an exception.
-      fail(NULL, "ERROR: Extra arguments not prefixed by \"throws\".\n");
-    }
     expects.throws = true;
-    do {
-      arg_index++;
-    } while (argc > arg_index && IsFlag(argv[arg_index]));
-    if (argc > arg_index) {
-      // Next argument is the exception type identifier.
+    arg_index++;
+    if (argc > arg_index && !IsFlag(argv[arg_index])) {
       expects.type = argv[arg_index];
-      do {
-        arg_index++;
-      } while (argc > arg_index && IsFlag(argv[arg_index]));
-      if (argc > arg_index) {
+      arg_index++;
+      if (argc > arg_index && !IsFlag(argv[arg_index])) {
         expects.beg_pos = atoi(argv[arg_index]);  // NOLINT
-        do {
-          arg_index++;
-        } while (argc > arg_index && IsFlag(argv[arg_index]));
-        if (argc > arg_index) {
+        arg_index++;
+        if (argc > arg_index && !IsFlag(argv[arg_index])) {
           expects.end_pos = atoi(argv[arg_index]);  // NOLINT
         }
       }
@@ -308,7 +296,8 @@
   // Parse command line.
   // Format:  preparser (<scriptfile> | -e "<source>")
   //                    ["throws" [<exn-type> [<start> [<end>]]]]
-  // Any flags (except an initial -s) are ignored.
+  // Any flags (except an initial -e) are ignored.
+  // Flags must not separate "throws" and its arguments.
 
   // Check for mandatory filename argument.
   int arg_index = 1;
diff --git a/src/SConscript b/src/SConscript
index f3ae807..be4a8f0 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -321,7 +321,7 @@
 
 EXPERIMENTAL_LIBRARY_FILES = '''
 proxy.js
-weakmap.js
+collection.js
 '''.split()
 
 
diff --git a/src/accessors.cc b/src/accessors.cc
index 951209d..02998f9 100644
--- a/src/accessors.cc
+++ b/src/accessors.cc
@@ -527,7 +527,9 @@
     // correctly yet. Compile it now and return the right length.
     HandleScope scope;
     Handle<JSFunction> handle(function);
-    if (!CompileLazy(handle, KEEP_EXCEPTION)) return Failure::Exception();
+    if (!JSFunction::CompileLazy(handle, KEEP_EXCEPTION)) {
+      return Failure::Exception();
+    }
     return Smi::FromInt(handle->shared()->length());
   } else {
     return Smi::FromInt(function->shared()->length());
@@ -759,7 +761,12 @@
     caller = potential_caller;
     potential_caller = it.next();
   }
-
+  // If caller is bound, return null. This is compatible with JSC, and
+  // allows us to make bound functions use the strict function map
+  // and its associated throwing caller and arguments.
+  if (caller->shared()->bound()) {
+    return isolate->heap()->null_value();
+  }
   return CheckNonStrictCallerOrThrow(isolate, caller);
 }
 
diff --git a/src/api.cc b/src/api.cc
index a03b741..ac4f07f 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -2794,7 +2794,7 @@
   ENTER_V8(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
   EXCEPTION_PREAMBLE(isolate);
-  i::Handle<i::Object> result = i::GetElement(self, index);
+  i::Handle<i::Object> result = i::Object::GetElement(self, index);
   has_pending_exception = result.is_null();
   EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>());
   return Utils::ToLocal(result);
@@ -2874,8 +2874,10 @@
   ENTER_V8(isolate);
   i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
+  bool threw = false;
   i::Handle<i::FixedArray> value =
-      i::GetKeysInFixedArrayFor(self, i::INCLUDE_PROTOS);
+      i::GetKeysInFixedArrayFor(self, i::INCLUDE_PROTOS, &threw);
+  if (threw) return Local<v8::Array>();
   // Because we use caching to speed up enumeration it is important
   // to never change the result of the basic enumeration function so
   // we clone the result.
@@ -2893,8 +2895,10 @@
   ENTER_V8(isolate);
   i::HandleScope scope(isolate);
   i::Handle<i::JSObject> self = Utils::OpenHandle(this);
+  bool threw = false;
   i::Handle<i::FixedArray> value =
-      i::GetKeysInFixedArrayFor(self, i::LOCAL_ONLY);
+      i::GetKeysInFixedArrayFor(self, i::LOCAL_ONLY, &threw);
+  if (threw) return Local<v8::Array>();
   // Because we use caching to speed up enumeration it is important
   // to never change the result of the basic enumeration function so
   // we clone the result.
@@ -3093,7 +3097,10 @@
   // If the property being looked up is a callback, it can throw
   // an exception.
   EXCEPTION_PREAMBLE(isolate);
-  i::Handle<i::Object> result = i::GetProperty(receiver, name, lookup);
+  PropertyAttributes ignored;
+  i::Handle<i::Object> result =
+      i::Object::GetProperty(receiver, receiver, lookup, name,
+                             &ignored);
   has_pending_exception = result.is_null();
   EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>());
 
@@ -3110,7 +3117,7 @@
   ENTER_V8(isolate);
   i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this);
   i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
-  i::LookupResult lookup;
+  i::LookupResult lookup(isolate);
   self_obj->LookupRealNamedPropertyInPrototypes(*key_obj, &lookup);
   return GetPropertyByLookup(isolate, self_obj, key_obj, &lookup);
 }
@@ -3123,7 +3130,7 @@
   ENTER_V8(isolate);
   i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this);
   i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
-  i::LookupResult lookup;
+  i::LookupResult lookup(isolate);
   self_obj->LookupRealNamedProperty(*key_obj, &lookup);
   return GetPropertyByLookup(isolate, self_obj, key_obj, &lookup);
 }
@@ -3634,13 +3641,30 @@
   if (IsDeadCheck(isolate, "v8::String::WriteUtf8()")) return 0;
   LOG_API(isolate, "String::WriteUtf8");
   ENTER_V8(isolate);
-  i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
   i::Handle<i::String> str = Utils::OpenHandle(this);
+  if (str->IsAsciiRepresentation()) {
+    int len;
+    if (capacity == -1) {
+      capacity = str->length() + 1;
+      len = str->length();
+    } else {
+      len = i::Min(capacity, str->length());
+    }
+    i::String::WriteToFlat(*str, buffer, 0, len);
+    if (nchars_ref != NULL) *nchars_ref = len;
+    if (!(options & NO_NULL_TERMINATION) && capacity > len) {
+      buffer[len] = '\0';
+      return len + 1;
+    }
+    return len;
+  }
+
+  i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
   isolate->string_tracker()->RecordWrite(str);
   if (options & HINT_MANY_WRITES_EXPECTED) {
     // Flatten the string for efficiency.  This applies whether we are
     // using StringInputBuffer or Get(i) to access the characters.
-    str->TryFlatten();
+    FlattenString(str);
   }
   write_input_buffer.Reset(0, *str);
   int len = str->length();
@@ -3961,6 +3985,15 @@
 
 
 void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) {
+  if (!i::Isolate::Current()->IsInitialized()) {
+    // Isolate is unitialized thus heap is not configured yet.
+    heap_statistics->set_total_heap_size(0);
+    heap_statistics->set_total_heap_size_executable(0);
+    heap_statistics->set_used_heap_size(0);
+    heap_statistics->set_heap_size_limit(0);
+    return;
+  }
+
   i::Heap* heap = i::Isolate::Current()->heap();
   heap_statistics->set_total_heap_size(heap->CommittedMemory());
   heap_statistics->set_total_heap_size_executable(
@@ -3973,14 +4006,15 @@
 bool v8::V8::IdleNotification() {
   // Returning true tells the caller that it need not
   // continue to call IdleNotification.
-  if (!i::Isolate::Current()->IsInitialized()) return true;
+  i::Isolate* isolate = i::Isolate::Current();
+  if (isolate == NULL || !isolate->IsInitialized()) return true;
   return i::V8::IdleNotification();
 }
 
 
 void v8::V8::LowMemoryNotification() {
   i::Isolate* isolate = i::Isolate::Current();
-  if (!isolate->IsInitialized()) return;
+  if (isolate == NULL || !isolate->IsInitialized()) return;
   isolate->heap()->CollectAllAvailableGarbage();
 }
 
@@ -4075,8 +4109,9 @@
   }
   // Leave V8.
 
-  if (env.is_null())
+  if (env.is_null()) {
     return Persistent<Context>();
+  }
   return Persistent<Context>(Utils::ToLocal(env));
 }
 
diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h
index 93cecf5..7f9f4ce 100644
--- a/src/arm/assembler-arm-inl.h
+++ b/src/arm/assembler-arm-inl.h
@@ -74,10 +74,10 @@
 }
 
 
-void RelocInfo::set_target_address(Address target) {
+void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
   Assembler::set_target_address_at(pc_, target);
-  if (host() != NULL && IsCodeTarget(rmode_)) {
+  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
     Object* target_code = Code::GetCodeFromTargetAddress(target);
     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
         host(), this, HeapObject::cast(target_code));
@@ -103,10 +103,12 @@
 }
 
 
-void RelocInfo::set_target_object(Object* target) {
+void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
-  if (host() != NULL && target->IsHeapObject()) {
+  if (mode == UPDATE_WRITE_BARRIER &&
+      host() != NULL &&
+      target->IsHeapObject()) {
     host()->GetHeap()->incremental_marking()->RecordWrite(
         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
   }
@@ -136,11 +138,12 @@
 }
 
 
-void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) {
+void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
+                                WriteBarrierMode mode) {
   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
   Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
   Memory::Address_at(pc_) = address;
-  if (host() != NULL) {
+  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
     // TODO(1550) We are passing NULL as a slot because cell can never be on
     // evacuation candidate.
     host()->GetHeap()->incremental_marking()->RecordWrite(
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index d19b64d..247479d 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -304,9 +304,9 @@
 const DwVfpRegister d15 = { 15 };
 
 // Aliases for double registers.
-const DwVfpRegister kFirstCalleeSavedDoubleReg = d8;
-const DwVfpRegister kLastCalleeSavedDoubleReg = d15;
-const DwVfpRegister kDoubleRegZero = d14;
+static const DwVfpRegister& kFirstCalleeSavedDoubleReg = d8;
+static const DwVfpRegister& kLastCalleeSavedDoubleReg = d15;
+static const DwVfpRegister& kDoubleRegZero = d14;
 
 
 // Coprocessor register
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 32b7896..29bf190 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -86,12 +86,6 @@
 }
 
 
-// This constant has the same value as JSArray::kPreallocatedArrayElements and
-// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
-// below should be reconsidered.
-static const int kLoopUnfoldLimit = 4;
-
-
 // Allocate an empty JSArray. The allocated array is put into the result
 // register. An elements backing store is allocated with size initial_capacity
 // and filled with the hole values.
@@ -101,9 +95,9 @@
                                  Register scratch1,
                                  Register scratch2,
                                  Register scratch3,
-                                 int initial_capacity,
                                  Label* gc_required) {
-  ASSERT(initial_capacity > 0);
+  const int initial_capacity = JSArray::kPreallocatedArrayElements;
+  STATIC_ASSERT(initial_capacity >= 0);
   // Load the initial map from the array function.
   __ ldr(scratch1, FieldMemOperand(array_function,
                                    JSFunction::kPrototypeOrInitialMapOffset));
@@ -153,12 +147,24 @@
   ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
   __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
 
-  // Fill the FixedArray with the hole value.
+  // Fill the FixedArray with the hole value. Inline the code if short.
+  if (initial_capacity == 0) return;
   ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
-  ASSERT(initial_capacity <= kLoopUnfoldLimit);
   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
-  for (int i = 0; i < initial_capacity; i++) {
+  static const int kLoopUnfoldLimit = 4;
+  if (initial_capacity <= kLoopUnfoldLimit) {
+    for (int i = 0; i < initial_capacity; i++) {
+      __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
+    }
+  } else {
+    Label loop, entry;
+    __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
+    __ b(&entry);
+    __ bind(&loop);
     __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
+    __ bind(&entry);
+    __ cmp(scratch1, scratch2);
+    __ b(lt, &loop);
   }
 }
 
@@ -173,7 +179,7 @@
 // register elements_array_storage is scratched.
 static void AllocateJSArray(MacroAssembler* masm,
                             Register array_function,  // Array function.
-                            Register array_size,  // As a smi.
+                            Register array_size,  // As a smi, cannot be 0.
                             Register result,
                             Register elements_array_storage,
                             Register elements_array_end,
@@ -181,32 +187,18 @@
                             Register scratch2,
                             bool fill_with_hole,
                             Label* gc_required) {
-  Label not_empty, allocated;
-
   // Load the initial map from the array function.
   __ ldr(elements_array_storage,
          FieldMemOperand(array_function,
                          JSFunction::kPrototypeOrInitialMapOffset));
 
-  // Check whether an empty sized array is requested.
-  __ tst(array_size, array_size);
-  __ b(ne, &not_empty);
-
-  // If an empty array is requested allocate a small elements array anyway. This
-  // keeps the code below free of special casing for the empty array.
-  int size = JSArray::kSize +
-             FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
-  __ AllocateInNewSpace(size,
-                        result,
-                        elements_array_end,
-                        scratch1,
-                        gc_required,
-                        TAG_OBJECT);
-  __ jmp(&allocated);
+  if (FLAG_debug_code) {  // Assert that array size is not zero.
+    __ tst(array_size, array_size);
+    __ Assert(ne, "array size is unexpectedly 0");
+  }
 
   // Allocate the JSArray object together with space for a FixedArray with the
   // requested number of elements.
-  __ bind(&not_empty);
   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   __ mov(elements_array_end,
          Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
@@ -226,7 +218,6 @@
   // result: JSObject
   // elements_array_storage: initial map
   // array_size: size of array (smi)
-  __ bind(&allocated);
   __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
   __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
   __ str(elements_array_storage,
@@ -256,14 +247,6 @@
   ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
   __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
   STATIC_ASSERT(kSmiTag == 0);
-  __ tst(array_size, array_size);
-  // Length of the FixedArray is the number of pre-allocated elements if
-  // the actual JSArray has length 0 and the size of the JSArray for non-empty
-  // JSArrays. The length of a FixedArray is stored as a smi.
-  __ mov(array_size,
-         Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)),
-         LeaveCC,
-         eq);
   ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
   __ str(array_size,
          MemOperand(elements_array_storage, kPointerSize, PostIndex));
@@ -311,20 +294,20 @@
 static void ArrayNativeCode(MacroAssembler* masm,
                             Label* call_generic_code) {
   Counters* counters = masm->isolate()->counters();
-  Label argc_one_or_more, argc_two_or_more;
+  Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array;
 
   // Check for array construction with zero arguments or one.
   __ cmp(r0, Operand(0, RelocInfo::NONE));
   __ b(ne, &argc_one_or_more);
 
   // Handle construction of an empty array.
+  __ bind(&empty_array);
   AllocateEmptyJSArray(masm,
                        r1,
                        r2,
                        r3,
                        r4,
                        r5,
-                       JSArray::kPreallocatedArrayElements,
                        call_generic_code);
   __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
   // Setup return value, remove receiver from stack and return.
@@ -339,6 +322,13 @@
   __ b(ne, &argc_two_or_more);
   STATIC_ASSERT(kSmiTag == 0);
   __ ldr(r2, MemOperand(sp));  // Get the argument from the stack.
+  __ tst(r2, r2);
+  __ b(ne, &not_empty_array);
+  __ Drop(1);  // Adjust stack.
+  __ mov(r0, Operand(0));  // Treat this as a call with argc of zero.
+  __ b(&empty_array);
+
+  __ bind(&not_empty_array);
   __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
   __ b(ne, call_generic_code);
 
@@ -1027,9 +1017,9 @@
     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
 
     // Set up the roots register.
-    ExternalReference roots_address =
-        ExternalReference::roots_address(masm->isolate());
-    __ mov(r10, Operand(roots_address));
+    ExternalReference roots_array_start =
+        ExternalReference::roots_array_start(masm->isolate());
+    __ mov(r10, Operand(roots_array_start));
 
     // Push the function and the receiver onto the stack.
     __ push(r1);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 44923a1..412ba00 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -263,7 +263,12 @@
   // [sp + (2 * kPointerSize)]: literals array.
 
   // All sizes here are multiples of kPointerSize.
-  int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
+  int elements_size = 0;
+  if (length_ > 0) {
+    elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
+        ? FixedDoubleArray::SizeFor(length_)
+        : FixedArray::SizeFor(length_);
+  }
   int size = JSArray::kSize + elements_size;
 
   // Load boilerplate object into r3 and check if we need to create a
@@ -283,6 +288,9 @@
     if (mode_ == CLONE_ELEMENTS) {
       message = "Expected (writable) fixed array";
       expected_map_index = Heap::kFixedArrayMapRootIndex;
+    } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
+      message = "Expected (writable) fixed double array";
+      expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
     } else {
       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
       message = "Expected copy-on-write fixed array";
@@ -322,6 +330,7 @@
     __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
 
     // Copy the elements array.
+    ASSERT((elements_size % kPointerSize) == 0);
     __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
   }
 
@@ -3913,7 +3922,7 @@
   }
 
   // Get the prototype of the function.
-  __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
+  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
 
   // Check that the function prototype is a JS object.
   __ JumpIfSmi(prototype, &slow);
@@ -6668,7 +6677,82 @@
 }
 
 
-MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
+void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
+                                                        Label* miss,
+                                                        Label* done,
+                                                        Register receiver,
+                                                        Register properties,
+                                                        Handle<String> name,
+                                                        Register scratch0) {
+  // If names of slots in range from 1 to kProbes - 1 for the hash value are
+  // not equal to the name and kProbes-th slot is not used (its name is the
+  // undefined value), it guarantees the hash table doesn't contain the
+  // property. It's true even if some slots represent deleted properties
+  // (their names are the null value).
+  for (int i = 0; i < kInlinedProbes; i++) {
+    // scratch0 points to properties hash.
+    // Compute the masked index: (hash + i + i * i) & mask.
+    Register index = scratch0;
+    // Capacity is smi 2^n.
+    __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
+    __ sub(index, index, Operand(1));
+    __ and_(index, index, Operand(
+        Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
+
+    // Scale the index by multiplying by the entry size.
+    ASSERT(StringDictionary::kEntrySize == 3);
+    __ add(index, index, Operand(index, LSL, 1));  // index *= 3.
+
+    Register entity_name = scratch0;
+    // Having undefined at this place means the name is not contained.
+    ASSERT_EQ(kSmiTagSize, 1);
+    Register tmp = properties;
+    __ add(tmp, properties, Operand(index, LSL, 1));
+    __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
+
+    ASSERT(!tmp.is(entity_name));
+    __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
+    __ cmp(entity_name, tmp);
+    __ b(eq, done);
+
+    if (i != kInlinedProbes - 1) {
+      // Stop if found the property.
+      __ cmp(entity_name, Operand(Handle<String>(name)));
+      __ b(eq, miss);
+
+      // Check if the entry name is not a symbol.
+      __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
+      __ ldrb(entity_name,
+              FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
+      __ tst(entity_name, Operand(kIsSymbolMask));
+      __ b(eq, miss);
+
+      // Restore the properties.
+      __ ldr(properties,
+             FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+    }
+  }
+
+  const int spill_mask =
+      (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() |
+       r2.bit() | r1.bit() | r0.bit());
+
+  __ stm(db_w, sp, spill_mask);
+  __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+  __ mov(r1, Operand(Handle<String>(name)));
+  StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+  __ CallStub(&stub);
+  __ tst(r0, Operand(r0));
+  __ ldm(ia_w, sp, spill_mask);
+
+  __ b(eq, done);
+  __ b(ne, miss);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
     MacroAssembler* masm,
     Label* miss,
     Label* done,
@@ -6927,6 +7011,13 @@
   { r3, r1, r2, EMIT_REMEMBERED_SET },
   // KeyedStoreStubCompiler::GenerateStoreFastElement.
   { r4, r2, r3, EMIT_REMEMBERED_SET },
+  // ElementsTransitionGenerator::GenerateSmiOnlyToObject
+  // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+  // and ElementsTransitionGenerator::GenerateDoubleToObject
+  { r2, r3, r9, EMIT_REMEMBERED_SET },
+  // ElementsTransitionGenerator::GenerateDoubleToObject
+  { r6, r2, r0, EMIT_REMEMBERED_SET },
+  { r2, r6, r9, EMIT_REMEMBERED_SET },
   // Null termination.
   { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
 };
@@ -7163,7 +7254,6 @@
   // Fall through when we need to inform the incremental marker.
 }
 
-
 #undef __
 
 } }  // namespace v8::internal
diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h
index 3ba75ba..647fc8d 100644
--- a/src/arm/code-stubs-arm.h
+++ b/src/arm/code-stubs-arm.h
@@ -799,7 +799,17 @@
 
   void Generate(MacroAssembler* masm);
 
-  MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup(
+  static void GenerateNegativeLookup(MacroAssembler* masm,
+                                     Label* miss,
+                                     Label* done,
+                                     Register receiver,
+                                     Register properties,
+                                     Handle<String> name,
+                                     Register scratch0);
+
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
+  MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup(
       MacroAssembler* masm,
       Label* miss,
       Label* done,
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 3993ed0..508d830 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -30,10 +30,13 @@
 #if defined(V8_TARGET_ARCH_ARM)
 
 #include "codegen.h"
+#include "macro-assembler.h"
 
 namespace v8 {
 namespace internal {
 
+#define __ ACCESS_MASM(masm)
+
 // -------------------------------------------------------------------------
 // Platform-specific RuntimeCallHelper functions.
 
@@ -51,6 +54,252 @@
 }
 
 
+// -------------------------------------------------------------------------
+// Code generators
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- r0    : value
+  //  -- r1    : key
+  //  -- r2    : receiver
+  //  -- lr    : return address
+  //  -- r3    : target map, scratch for subsequent call
+  //  -- r4    : scratch (elements)
+  // -----------------------------------
+  // Set transitioned map.
+  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
+  __ RecordWriteField(r2,
+                      HeapObject::kMapOffset,
+                      r3,
+                      r9,
+                      kLRHasNotBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+}
+
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- r0    : value
+  //  -- r1    : key
+  //  -- r2    : receiver
+  //  -- lr    : return address
+  //  -- r3    : target map, scratch for subsequent call
+  //  -- r4    : scratch (elements)
+  // -----------------------------------
+  Label loop, entry, convert_hole, gc_required;
+  bool vfp3_supported = CpuFeatures::IsSupported(VFP3);
+  __ push(lr);
+
+  __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
+  __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
+  // r4: source FixedArray
+  // r5: number of elements (smi-tagged)
+
+  // Allocate new FixedDoubleArray.
+  __ mov(lr, Operand(FixedDoubleArray::kHeaderSize));
+  __ add(lr, lr, Operand(r5, LSL, 2));
+  __ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
+  // r6: destination FixedDoubleArray, not tagged as heap object
+  __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex);
+  __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
+  // Set destination FixedDoubleArray's length.
+  __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
+  // Update receiver's map.
+
+  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
+  __ RecordWriteField(r2,
+                      HeapObject::kMapOffset,
+                      r3,
+                      r9,
+                      kLRHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Replace receiver's backing store with newly created FixedDoubleArray.
+  __ add(r3, r6, Operand(kHeapObjectTag));
+  __ str(r3, FieldMemOperand(r2, JSObject::kElementsOffset));
+  __ RecordWriteField(r2,
+                      JSObject::kElementsOffset,
+                      r3,
+                      r9,
+                      kLRHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+
+  // Prepare for conversion loop.
+  __ add(r3, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+  __ add(r7, r6, Operand(FixedDoubleArray::kHeaderSize));
+  __ add(r6, r7, Operand(r5, LSL, 2));
+  __ mov(r4, Operand(kHoleNanLower32));
+  __ mov(r5, Operand(kHoleNanUpper32));
+  // r3: begin of source FixedArray element fields, not tagged
+  // r4: kHoleNanLower32
+  // r5: kHoleNanUpper32
+  // r6: end of destination FixedDoubleArray, not tagged
+  // r7: begin of FixedDoubleArray element fields, not tagged
+  if (!vfp3_supported) __ Push(r1, r0);
+
+  __ b(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  __ pop(lr);
+  __ b(fail);
+
+  // Convert and copy elements.
+  __ bind(&loop);
+  __ ldr(r9, MemOperand(r3, 4, PostIndex));
+  // r9: current element
+  __ JumpIfNotSmi(r9, &convert_hole);
+
+  // Normal smi, convert to double and store.
+  __ SmiUntag(r9);
+  if (vfp3_supported) {
+    CpuFeatures::Scope scope(VFP3);
+    __ vmov(s0, r9);
+    __ vcvt_f64_s32(d0, s0);
+    __ vstr(d0, r7, 0);
+    __ add(r7, r7, Operand(8));
+  } else {
+    FloatingPointHelper::ConvertIntToDouble(masm,
+                                            r9,
+                                            FloatingPointHelper::kCoreRegisters,
+                                            d0,
+                                            r0,
+                                            r1,
+                                            lr,
+                                            s0);
+    __ Strd(r0, r1, MemOperand(r7, 8, PostIndex));
+  }
+  __ b(&entry);
+
+  // Hole found, store the-hole NaN.
+  __ bind(&convert_hole);
+  __ Strd(r4, r5, MemOperand(r7, 8, PostIndex));
+
+  __ bind(&entry);
+  __ cmp(r7, r6);
+  __ b(lt, &loop);
+
+  if (!vfp3_supported) __ Pop(r1, r0);
+  __ pop(lr);
+}
+
+
+void ElementsTransitionGenerator::GenerateDoubleToObject(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- r0    : value
+  //  -- r1    : key
+  //  -- r2    : receiver
+  //  -- lr    : return address
+  //  -- r3    : target map, scratch for subsequent call
+  //  -- r4    : scratch (elements)
+  // -----------------------------------
+  Label entry, loop, convert_hole, gc_required;
+
+  __ push(lr);
+  __ Push(r3, r2, r1, r0);
+
+  __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
+  __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
+  // r4: source FixedDoubleArray
+  // r5: number of elements (smi-tagged)
+
+  // Allocate new FixedArray.
+  __ mov(r0, Operand(FixedDoubleArray::kHeaderSize));
+  __ add(r0, r0, Operand(r5, LSL, 1));
+  __ AllocateInNewSpace(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
+  // r6: destination FixedArray, not tagged as heap object
+  __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
+  __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
+  // Set destination FixedDoubleArray's length.
+  __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
+
+  // Prepare for conversion loop.
+  __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
+  __ add(r3, r6, Operand(FixedArray::kHeaderSize));
+  __ add(r6, r6, Operand(kHeapObjectTag));
+  __ add(r5, r3, Operand(r5, LSL, 1));
+  __ LoadRoot(r7, Heap::kTheHoleValueRootIndex);
+  __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
+  // Using offsetted addresses in r4 to fully take advantage of post-indexing.
+  // r3: begin of destination FixedArray element fields, not tagged
+  // r4: begin of source FixedDoubleArray element fields, not tagged, +4
+  // r5: end of destination FixedArray, not tagged
+  // r6: destination FixedArray
+  // r7: the-hole pointer
+  // r9: heap number map
+  __ b(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  __ Pop(r3, r2, r1, r0);
+  __ pop(lr);
+  __ b(fail);
+
+  __ bind(&loop);
+  __ ldr(r1, MemOperand(r4, 8, PostIndex));
+  // lr: current element's upper 32 bit
+  // r4: address of next element's upper 32 bit
+  __ cmp(r1, Operand(kHoleNanUpper32));
+  __ b(eq, &convert_hole);
+
+  // Non-hole double, copy value into a heap number.
+  __ AllocateHeapNumber(r2, r0, lr, r9, &gc_required);
+  // r2: new heap number
+  __ ldr(r0, MemOperand(r4, 12, NegOffset));
+  __ Strd(r0, r1, FieldMemOperand(r2, HeapNumber::kValueOffset));
+  __ mov(r0, r3);
+  __ str(r2, MemOperand(r3, 4, PostIndex));
+  __ RecordWrite(r6,
+                 r0,
+                 r2,
+                 kLRHasBeenSaved,
+                 kDontSaveFPRegs,
+                 EMIT_REMEMBERED_SET,
+                 OMIT_SMI_CHECK);
+  __ b(&entry);
+
+  // Replace the-hole NaN with the-hole pointer.
+  __ bind(&convert_hole);
+  __ str(r7, MemOperand(r3, 4, PostIndex));
+
+  __ bind(&entry);
+  __ cmp(r3, r5);
+  __ b(lt, &loop);
+
+  __ Pop(r3, r2, r1, r0);
+  // Update receiver's map.
+  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
+  __ RecordWriteField(r2,
+                      HeapObject::kMapOffset,
+                      r3,
+                      r9,
+                      kLRHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Replace receiver's backing store with newly created and filled FixedArray.
+  __ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
+  __ RecordWriteField(r2,
+                      JSObject::kElementsOffset,
+                      r6,
+                      r9,
+                      kLRHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  __ pop(lr);
+}
+
+#undef __
+
 } }  // namespace v8::internal
 
 #endif  // V8_TARGET_ARCH_ARM
diff --git a/src/arm/codegen-arm.h b/src/arm/codegen-arm.h
index 1c0d508..f54231c 100644
--- a/src/arm/codegen-arm.h
+++ b/src/arm/codegen-arm.h
@@ -29,7 +29,6 @@
 #define V8_ARM_CODEGEN_ARM_H_
 
 #include "ast.h"
-#include "code-stubs-arm.h"
 #include "ic-inl.h"
 
 namespace v8 {
diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc
index bb03d74..8505c7d 100644
--- a/src/arm/deoptimizer-arm.cc
+++ b/src/arm/deoptimizer-arm.cc
@@ -100,7 +100,6 @@
     }
   }
 
-
 #ifdef DEBUG
   // Destroy the code which is not supposed to be run again.
   int instructions =
@@ -178,16 +177,13 @@
   Memory::uint32_at(stack_check_address_pointer) =
       reinterpret_cast<uint32_t>(replacement_code->entry());
 
-  RelocInfo rinfo(pc_after - 2 * kInstrSize,
-                  RelocInfo::CODE_TARGET,
-                  0,
-                  unoptimized_code);
-  unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode(
-      unoptimized_code, &rinfo, replacement_code);
+  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+      unoptimized_code, pc_after - 2 * kInstrSize, replacement_code);
 }
 
 
-void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
+void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
+                                         Address pc_after,
                                          Code* check_code,
                                          Code* replacement_code) {
   const int kInstrSize = Assembler::kInstrSize;
@@ -209,8 +205,8 @@
   Memory::uint32_at(stack_check_address_pointer) =
       reinterpret_cast<uint32_t>(check_code->entry());
 
-  check_code->GetHeap()->incremental_marking()->
-      RecordCodeTargetPatch(pc_after - 2 * kInstrSize, check_code);
+  check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+      unoptimized_code, pc_after - 2 * kInstrSize, check_code);
 }
 
 
@@ -727,7 +723,6 @@
   __ ldr(r3, MemOperand(r2, FrameDescription::frame_size_offset()));
   __ bind(&inner_push_loop);
   __ sub(r3, r3, Operand(sizeof(uint32_t)));
-  // __ add(r6, r2, Operand(r3, LSL, 1));
   __ add(r6, r2, Operand(r3));
   __ ldr(r7, MemOperand(r6, FrameDescription::frame_content_offset()));
   __ push(r7);
@@ -761,8 +756,9 @@
   __ pop(ip);  // remove lr
 
   // Set up the roots register.
-  ExternalReference roots_address = ExternalReference::roots_address(isolate);
-  __ mov(r10, Operand(roots_address));
+  ExternalReference roots_array_start =
+      ExternalReference::roots_array_start(isolate);
+  __ mov(r10, Operand(roots_array_start));
 
   __ pop(ip);  // remove pc
   __ pop(r7);  // get continuation, leave pc on stack
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 353ce5b..497a295 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -269,7 +269,10 @@
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
         int ignored = 0;
-        EmitDeclaration(scope()->function(), CONST, NULL, &ignored);
+        VariableProxy* proxy = scope()->function();
+        ASSERT(proxy->var()->mode() == CONST ||
+               proxy->var()->mode() == CONST_HARMONY);
+        EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -718,6 +721,8 @@
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
   Variable* variable = proxy->var();
+  bool binding_needs_init =
+      mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
       ++(*global_count);
@@ -729,7 +734,7 @@
         Comment cmnt(masm_, "[ Declaration");
         VisitForAccumulatorValue(function);
         __ str(result_register(), StackOperand(variable));
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         Comment cmnt(masm_, "[ Declaration");
         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
         __ str(ip, StackOperand(variable));
@@ -763,7 +768,7 @@
                                   EMIT_REMEMBERED_SET,
                                   OMIT_SMI_CHECK);
         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         Comment cmnt(masm_, "[ Declaration");
         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
         __ str(ip, ContextOperand(cp, variable->index()));
@@ -775,9 +780,13 @@
     case Variable::LOOKUP: {
       Comment cmnt(masm_, "[ Declaration");
       __ mov(r2, Operand(variable->name()));
-      // Declaration nodes are always introduced in one of three modes.
-      ASSERT(mode == VAR || mode == CONST || mode == LET);
-      PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE;
+      // Declaration nodes are always introduced in one of four modes.
+      ASSERT(mode == VAR ||
+             mode == CONST ||
+             mode == CONST_HARMONY ||
+             mode == LET);
+      PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
+          ? READ_ONLY : NONE;
       __ mov(r1, Operand(Smi::FromInt(attr)));
       // Push initial value, if any.
       // Note: For variables we must not push an initial value (such as
@@ -787,7 +796,7 @@
         __ Push(cp, r2, r1);
         // Push initial value for function declaration.
         VisitForStackValue(function);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
         __ Push(cp, r2, r1, r0);
       } else {
@@ -929,11 +938,17 @@
   __ bind(&done_convert);
   __ push(r0);
 
+  // Check for proxies.
+  Label call_runtime;
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
+  __ b(le, &call_runtime);
+
   // Check cache validity in generated code. This is a fast case for
   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   // guarantee cache validity, call the runtime system to check cache
   // validity or get the property names in a fixed array.
-  Label next, call_runtime;
+  Label next;
   // Preload a couple of values used in the loop.
   Register  empty_fixed_array_value = r6;
   __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
@@ -1012,9 +1027,16 @@
   __ jmp(&loop);
 
   // We got a fixed array in register r0. Iterate through that.
+  Label non_proxy;
   __ bind(&fixed_array);
-  __ mov(r1, Operand(Smi::FromInt(0)));  // Map (0) - force slow check.
-  __ Push(r1, r0);
+  __ mov(r1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
+  __ ldr(r2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
+  __ b(gt, &non_proxy);
+  __ mov(r1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
+  __ bind(&non_proxy);
+  __ Push(r1, r0);  // Smi and array
   __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
   __ mov(r0, Operand(Smi::FromInt(0)));
   __ Push(r1, r0);  // Fixed array length (as smi) and initial index.
@@ -1031,18 +1053,23 @@
   __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
 
-  // Get the expected map from the stack or a zero map in the
+  // Get the expected map from the stack or a smi in the
   // permanent slow case into register r2.
   __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
 
   // Check if the expected map still matches that of the enumerable.
-  // If not, we have to filter the key.
+  // If not, we may have to filter the key.
   Label update_each;
   __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   __ cmp(r4, Operand(r2));
   __ b(eq, &update_each);
 
+  // For proxies, no filtering is done.
+  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
+  __ cmp(r2, Operand(Smi::FromInt(0)));
+  __ b(eq, &update_each);
+
   // Convert the entry to a string or (smi) 0 if it isn't a property
   // any more. If the property has been removed while iterating, we
   // just skip it.
@@ -1097,7 +1124,7 @@
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(info->strict_mode_flag());
     __ mov(r0, Operand(info));
     __ push(r0);
     __ CallStub(&stub);
@@ -1128,7 +1155,7 @@
   Scope* s = scope();
   while (s != NULL) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
         __ tst(temp, temp);
@@ -1141,7 +1168,7 @@
     }
     // If no outer scope calls eval, we do not need to check more
     // context extensions.
-    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
     s = s->outer_scope();
   }
 
@@ -1185,7 +1212,7 @@
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
         __ tst(temp, temp);
@@ -1224,11 +1251,12 @@
     Variable* local = var->local_if_not_shadowed();
     __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
     if (local->mode() == CONST ||
+        local->mode() == CONST_HARMONY ||
         local->mode() == LET) {
       __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
       if (local->mode() == CONST) {
         __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
-      } else {  // LET
+      } else {  // LET || CONST_HARMONY
         __ b(ne, done);
         __ mov(r0, Operand(var->name()));
         __ push(r0);
@@ -1266,13 +1294,15 @@
       Comment cmnt(masm_, var->IsContextSlot()
                               ? "Context variable"
                               : "Stack variable");
-      if (var->mode() != LET && var->mode() != CONST) {
+      if (!var->binding_needs_init()) {
         context()->Plug(var);
       } else {
         // Let and const need a read barrier.
         GetVar(r0, var);
         __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
-        if (var->mode() == LET) {
+        if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+          // Throw a reference error when using an uninitialized let/const
+          // binding in harmony mode.
           Label done;
           __ b(ne, &done);
           __ mov(r0, Operand(var->name()));
@@ -1280,6 +1310,8 @@
           __ CallRuntime(Runtime::kThrowReferenceError, 1);
           __ bind(&done);
         } else {
+          // Uninitalized const bindings outside of harmony mode are unholed.
+          ASSERT(var->mode() == CONST);
           __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
         }
         context()->Plug(r0);
@@ -1467,13 +1499,19 @@
 
   ZoneList<Expression*>* subexprs = expr->values();
   int length = subexprs->length();
+  Handle<FixedArray> constant_elements = expr->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+  Handle<FixedArrayBase> constant_elements_values(
+      FixedArrayBase::cast(constant_elements->get(1)));
 
   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
-  __ mov(r1, Operand(expr->constant_elements()));
+  __ mov(r1, Operand(constant_elements));
   __ Push(r3, r2, r1);
-  if (expr->constant_elements()->map() ==
+  if (constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
@@ -1485,8 +1523,14 @@
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
-    FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
+    ASSERT(constant_elements_kind == FAST_ELEMENTS ||
+           constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+           FLAG_smi_only_arrays);
+    FastCloneShallowArrayStub::Mode mode =
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, length);
     __ CallStub(&stub);
   }
 
@@ -1509,24 +1553,56 @@
     }
     VisitForAccumulatorValue(subexpr);
 
-    // Store the subexpression value in the array's elements.
     __ ldr(r6, MemOperand(sp));  // Copy of array literal.
     __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
+    __ ldr(r2, FieldMemOperand(r6, JSObject::kMapOffset));
     int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-    __ str(result_register(), FieldMemOperand(r1, offset));
 
-    Label no_map_change;
-    __ JumpIfSmi(result_register(), &no_map_change);
-    // Update the write barrier for the array store with r0 as the scratch
-    // register.
+    Label element_done;
+    Label double_elements;
+    Label smi_element;
+    Label slow_elements;
+    Label fast_elements;
+    __ CheckFastElements(r2, r3, &double_elements);
+
+    // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+    __ JumpIfSmi(result_register(), &smi_element);
+    __ CheckFastSmiOnlyElements(r2, r3, &fast_elements);
+
+    // Store into the array literal requires a elements transition. Call into
+    // the runtime.
+    __ bind(&slow_elements);
+    __ push(r6);  // Copy of array literal.
+    __ mov(r1, Operand(Smi::FromInt(i)));
+    __ mov(r2, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
+    __ mov(r3, Operand(Smi::FromInt(strict_mode_flag())));  // Strict mode.
+    __ Push(r1, result_register(), r2, r3);
+    __ CallRuntime(Runtime::kSetProperty, 5);
+    __ b(&element_done);
+
+      // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+    __ bind(&double_elements);
+    __ mov(r3, Operand(Smi::FromInt(i)));
+    __ StoreNumberToDoubleElements(result_register(), r3, r6, r1, r4, r5, r9,
+                                   r7, &slow_elements);
+    __ b(&element_done);
+
+    // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+    __ bind(&fast_elements);
+    __ str(result_register(), FieldMemOperand(r1, offset));
+    // Update the write barrier for the array store.
     __ RecordWriteField(
         r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs,
         EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
-    __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
-    __ CheckFastSmiOnlyElements(r3, r2, &no_map_change);
-    __ push(r6);  // Copy of array literal.
-    __ CallRuntime(Runtime::kNonSmiElementStored, 1);
-    __ bind(&no_map_change);
+    __ b(&element_done);
+
+    // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+    // FAST_ELEMENTS, and value is Smi.
+    __ bind(&smi_element);
+    __ str(result_register(), FieldMemOperand(r1, offset));
+    // Fall through
+
+    __ bind(&element_done);
 
     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   }
@@ -1903,8 +1979,9 @@
       }
     }
 
-  } else if (var->mode() != CONST) {
-    // Assignment to var or initializing assignment to let.
+  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
+    // Assignment to var or initializing assignment to let/const
+    // in harmony mode.
     if (var->IsStackAllocated() || var->IsContextSlot()) {
       MemOperand location = VarOperand(var, r1);
       if (FLAG_debug_code && op == Token::INIT_LET) {
@@ -2784,7 +2861,8 @@
   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   if (CpuFeatures::IsSupported(VFP3)) {
     __ PrepareCallCFunction(1, r0);
-    __ mov(r0, Operand(ExternalReference::isolate_address()));
+    __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX));
+    __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
     __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
 
     CpuFeatures::Scope scope(VFP3);
@@ -2804,8 +2882,9 @@
     __ mov(r0, r4);
   } else {
     __ PrepareCallCFunction(2, r0);
+    __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX));
     __ mov(r0, Operand(r4));
-    __ mov(r1, Operand(ExternalReference::isolate_address()));
+    __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
     __ CallCFunction(
         ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
   }
@@ -4071,33 +4150,25 @@
         case Token::EQ_STRICT:
         case Token::EQ:
           cond = eq;
-          __ pop(r1);
           break;
         case Token::LT:
           cond = lt;
-          __ pop(r1);
           break;
         case Token::GT:
-          // Reverse left and right sides to obtain ECMA-262 conversion order.
-          cond = lt;
-          __ mov(r1, result_register());
-          __ pop(r0);
+          cond = gt;
          break;
         case Token::LTE:
-          // Reverse left and right sides to obtain ECMA-262 conversion order.
-          cond = ge;
-          __ mov(r1, result_register());
-          __ pop(r0);
+          cond = le;
           break;
         case Token::GTE:
           cond = ge;
-          __ pop(r1);
           break;
         case Token::IN:
         case Token::INSTANCEOF:
         default:
           UNREACHABLE();
       }
+      __ pop(r1);
 
       bool inline_smi_code = ShouldInlineSmiCase(op);
       JumpPatchSite patch_site(masm_);
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 6e0badc..18d4a9f 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -382,10 +382,10 @@
 
 // The generated code does not accept smi keys.
 // The generated code falls through if both probes miss.
-static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
-                                          int argc,
-                                          Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
+                                               int argc,
+                                               Code::Kind kind,
+                                               Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- r1    : receiver
   //  -- r2    : name
@@ -395,7 +395,7 @@
   // Probe the stub cache.
   Code::Flags flags = Code::ComputeFlags(kind,
                                          MONOMORPHIC,
-                                         extra_ic_state,
+                                         extra_state,
                                          NORMAL,
                                          argc);
   Isolate::Current()->stub_cache()->GenerateProbe(
@@ -464,7 +464,7 @@
 }
 
 
-static void GenerateCallNormal(MacroAssembler* masm, int argc) {
+void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   //  -- r2    : name
   //  -- lr    : return address
@@ -486,10 +486,10 @@
 }
 
 
-static void GenerateCallMiss(MacroAssembler* masm,
-                             int argc,
-                             IC::UtilityId id,
-                             Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMiss(MacroAssembler* masm,
+                              int argc,
+                              IC::UtilityId id,
+                              Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- r2    : name
   //  -- lr    : return address
@@ -541,7 +541,7 @@
   }
 
   // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   ParameterCount actual(argc);
@@ -553,18 +553,6 @@
 }
 
 
-void CallIC::GenerateMiss(MacroAssembler* masm,
-                          int argc,
-                          Code::ExtraICState extra_ic_state) {
-  // ----------- S t a t e -------------
-  //  -- r2    : name
-  //  -- lr    : return address
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
-}
-
-
 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
                                  int argc,
                                  Code::ExtraICState extra_ic_state) {
@@ -580,27 +568,6 @@
 }
 
 
-void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  //  -- r2    : name
-  //  -- lr    : return address
-  // -----------------------------------
-
-  GenerateCallNormal(masm, argc);
-  GenerateMiss(masm, argc, Code::kNoExtraICState);
-}
-
-
-void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  //  -- r2    : name
-  //  -- lr    : return address
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
-}
-
-
 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   //  -- r2    : name
@@ -718,7 +685,7 @@
   __ JumpIfSmi(r2, &miss);
   __ IsObjectJSStringType(r2, r0, &miss);
 
-  GenerateCallNormal(masm, argc);
+  CallICBase::GenerateNormal(masm, argc);
   __ bind(&miss);
   GenerateMiss(masm, argc);
 }
@@ -1244,6 +1211,47 @@
 }
 
 
+void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
+  // ---------- S t a t e --------------
+  //  -- r2     : receiver
+  //  -- r3     : target map
+  //  -- lr     : return address
+  // -----------------------------------
+  // Must return the modified receiver in r0.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+    __ mov(r0, r2);
+    __ Ret();
+    __ bind(&fail);
+  }
+
+  __ push(r2);
+  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
+}
+
+
+void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
+    MacroAssembler* masm) {
+  // ---------- S t a t e --------------
+  //  -- r2     : receiver
+  //  -- r3     : target map
+  //  -- lr     : return address
+  // -----------------------------------
+  // Must return the modified receiver in r0.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
+    __ mov(r0, r2);
+    __ Ret();
+    __ bind(&fail);
+  }
+
+  __ push(r2);
+  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
+}
+
+
 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
                                               StrictModeFlag strict_mode) {
   // ---------- S t a t e --------------
@@ -1559,11 +1567,9 @@
     case Token::LT:
       return lt;
     case Token::GT:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return lt;
+      return gt;
     case Token::LTE:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return ge;
+      return le;
     case Token::GTE:
       return ge;
     default:
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index 8495939..5197842 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -391,6 +391,12 @@
 }
 
 
+void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
+  object()->PrintTo(stream);
+  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
+}
+
+
 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
     : spill_slot_count_(0),
       info_(info),
@@ -1404,12 +1410,10 @@
 
 
 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
-  Token::Value op = instr->token();
   ASSERT(instr->left()->representation().IsTagged());
   ASSERT(instr->right()->representation().IsTagged());
-  bool reversed = (op == Token::GT || op == Token::LTE);
-  LOperand* left = UseFixed(instr->left(), reversed ? r0 : r1);
-  LOperand* right = UseFixed(instr->right(), reversed ? r1 : r0);
+  LOperand* left = UseFixed(instr->left(), r1);
+  LOperand* right = UseFixed(instr->right(), r0);
   LCmpT* result = new LCmpT(left, right);
   return MarkAsCall(DefineFixed(result, r0), instr);
 }
@@ -1421,8 +1425,8 @@
   if (r.IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->left());
-    LOperand* right = UseRegisterAtStart(instr->right());
+    LOperand* left = UseRegisterOrConstantAtStart(instr->left());
+    LOperand* right = UseRegisterOrConstantAtStart(instr->right());
     return new LCmpIDAndBranch(left, right);
   } else {
     ASSERT(r.IsDouble());
@@ -1970,6 +1974,26 @@
 }
 
 
+LInstruction* LChunkBuilder::DoTransitionElementsKind(
+    HTransitionElementsKind* instr) {
+  if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
+      instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+    LOperand* object = UseRegister(instr->object());
+    LOperand* new_map_reg = TempRegister();
+    LTransitionElementsKind* result =
+        new LTransitionElementsKind(object, new_map_reg, NULL);
+    return DefineSameAsFirst(result);
+  } else {
+    LOperand* object = UseFixed(instr->object(), r0);
+    LOperand* fixed_object_reg = FixedTemp(r2);
+    LOperand* new_map_reg = FixedTemp(r3);
+    LTransitionElementsKind* result =
+        new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+    return MarkAsCall(DefineFixed(result, r0), instr);
+  }
+}
+
+
 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
   bool needs_write_barrier = instr->NeedsWriteBarrier();
 
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 73c7e45..5733bd0 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -162,6 +162,7 @@
   V(ThisFunction)                               \
   V(Throw)                                      \
   V(ToFastProperties)                           \
+  V(TransitionElementsKind)                     \
   V(Typeof)                                     \
   V(TypeofIsAndBranch)                          \
   V(UnaryMathOperation)                         \
@@ -1260,7 +1261,6 @@
   LOperand* context() { return InputAt(0); }
   LOperand* value() { return InputAt(1); }
   int slot_index() { return hydrogen()->slot_index(); }
-  int needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); }
 
   virtual void PrintDataTo(StringStream* stream);
 };
@@ -1277,7 +1277,9 @@
 
 
 class LThisFunction: public LTemplateInstruction<1, 0, 0> {
+ public:
   DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
+  DECLARE_HYDROGEN_ACCESSOR(ThisFunction)
 };
 
 
@@ -1561,7 +1563,6 @@
   Handle<Object> name() const { return hydrogen()->name(); }
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
-  bool needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
 };
 
@@ -1581,7 +1582,8 @@
   LOperand* object() { return inputs_[0]; }
   LOperand* value() { return inputs_[1]; }
   Handle<Object> name() const { return hydrogen()->name(); }
-  bool strict_mode() { return hydrogen()->strict_mode(); }
+  StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
+  bool strict_mode() { return strict_mode_flag() == kStrictMode; }
 };
 
 
@@ -1669,6 +1671,30 @@
 };
 
 
+class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
+ public:
+  LTransitionElementsKind(LOperand* object,
+                          LOperand* new_map_temp,
+                          LOperand* temp_reg) {
+    inputs_[0] = object;
+    temps_[0] = new_map_temp;
+    temps_[1] = temp_reg;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
+                               "transition-elements-kind")
+  DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  LOperand* object() { return inputs_[0]; }
+  LOperand* new_map_reg() { return temps_[0]; }
+  LOperand* temp_reg() { return temps_[1]; }
+  Handle<Map> original_map() { return hydrogen()->original_map(); }
+  Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
+};
+
+
 class LStringAdd: public LTemplateInstruction<1, 2, 0> {
  public:
   LStringAdd(LOperand* left, LOperand* right) {
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 70ef884..4cf7df4 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -410,6 +410,12 @@
 }
 
 
+double LCodeGen::ToDouble(LConstantOperand* op) const {
+  Handle<Object> value = chunk_->LookupLiteral(op);
+  return value->Number();
+}
+
+
 Operand LCodeGen::ToOperand(LOperand* op) {
   if (op->IsConstantOperand()) {
     LConstantOperand* const_op = LConstantOperand::cast(op);
@@ -1705,30 +1711,44 @@
 }
 
 
-void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
-  __ cmp(ToRegister(left), ToRegister(right));
-}
-
-
 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
   LOperand* left = instr->InputAt(0);
   LOperand* right = instr->InputAt(1);
   int false_block = chunk_->LookupDestination(instr->false_block_id());
   int true_block = chunk_->LookupDestination(instr->true_block_id());
+  Condition cond = TokenToCondition(instr->op(), false);
 
-  if (instr->is_double()) {
-    // Compare left and right as doubles and load the
-    // resulting flags into the normal status register.
-    __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
-    // If a NaN is involved, i.e. the result is unordered (V set),
-    // jump to false block label.
-    __ b(vs, chunk_->GetAssemblyLabel(false_block));
+  if (left->IsConstantOperand() && right->IsConstantOperand()) {
+    // We can statically evaluate the comparison.
+    double left_val = ToDouble(LConstantOperand::cast(left));
+    double right_val = ToDouble(LConstantOperand::cast(right));
+    int next_block =
+      EvalComparison(instr->op(), left_val, right_val) ? true_block
+                                                       : false_block;
+    EmitGoto(next_block);
   } else {
-    EmitCmpI(left, right);
+    if (instr->is_double()) {
+      // Compare left and right operands as doubles and load the
+      // resulting flags into the normal status register.
+      __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
+      // If a NaN is involved, i.e. the result is unordered (V set),
+      // jump to false block label.
+      __ b(vs, chunk_->GetAssemblyLabel(false_block));
+    } else {
+      if (right->IsConstantOperand()) {
+        __ cmp(ToRegister(left),
+               Operand(ToInteger32(LConstantOperand::cast(right))));
+      } else if (left->IsConstantOperand()) {
+        __ cmp(ToRegister(right),
+               Operand(ToInteger32(LConstantOperand::cast(left))));
+        // We transposed the operands. Reverse the condition.
+        cond = ReverseCondition(cond);
+      } else {
+        __ cmp(ToRegister(left), ToRegister(right));
+      }
+    }
+    EmitBranch(true_block, false_block, cond);
   }
-
-  Condition cc = TokenToCondition(instr->op(), instr->is_double());
-  EmitBranch(true_block, false_block, cc);
 }
 
 
@@ -2176,9 +2196,6 @@
   __ cmp(r0, Operand(0));  // This instruction also signals no smi code inlined.
 
   Condition condition = ComputeCompareCondition(op);
-  if (op == Token::GT || op == Token::LTE) {
-    condition = ReverseCondition(condition);
-  }
   __ LoadRoot(ToRegister(instr->result()),
               Heap::kTrueValueRootIndex,
               condition);
@@ -2251,13 +2268,19 @@
   __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
 
   // Cells are always in the remembered set.
-  __ RecordWriteField(scratch,
-                      JSGlobalPropertyCell::kValueOffset,
-                      value,
-                      scratch2,
-                      kLRHasBeenSaved,
-                      kSaveFPRegs,
-                      OMIT_REMEMBERED_SET);
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+    __ RecordWriteField(scratch,
+                        JSGlobalPropertyCell::kValueOffset,
+                        value,
+                        scratch2,
+                        kLRHasBeenSaved,
+                        kSaveFPRegs,
+                        OMIT_REMEMBERED_SET,
+                        check_needed);
+  }
 }
 
 
@@ -2285,13 +2308,18 @@
   Register value = ToRegister(instr->value());
   MemOperand target = ContextOperand(context, instr->slot_index());
   __ str(value, target);
-  if (instr->needs_write_barrier()) {
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
     __ RecordWriteContextSlot(context,
                               target.offset(),
                               value,
                               scratch0(),
                               kLRHasBeenSaved,
-                              kSaveFPRegs);
+                              kSaveFPRegs,
+                              EMIT_REMEMBERED_SET,
+                              check_needed);
   }
 }
 
@@ -2312,7 +2340,7 @@
                                                Register object,
                                                Handle<Map> type,
                                                Handle<String> name) {
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   type->LookupInDescriptors(NULL, *name, &lookup);
   ASSERT(lookup.IsProperty() &&
          (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
@@ -2778,7 +2806,7 @@
 
 void LCodeGen::DoThisFunction(LThisFunction* instr) {
   Register result = ToRegister(instr->result());
-  __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+  LoadHeapObject(result, instr->hydrogen()->closure());
 }
 
 
@@ -3297,21 +3325,36 @@
   }
 
   // Do the store.
+  HType type = instr->hydrogen()->value()->type();
+  SmiCheck check_needed =
+      type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
   if (instr->is_in_object()) {
     __ str(value, FieldMemOperand(object, offset));
-    if (instr->needs_write_barrier()) {
+    if (instr->hydrogen()->NeedsWriteBarrier()) {
       // Update the write barrier for the object for in-object properties.
-      __ RecordWriteField(
-          object, offset, value, scratch, kLRHasBeenSaved, kSaveFPRegs);
+      __ RecordWriteField(object,
+                          offset,
+                          value,
+                          scratch,
+                          kLRHasBeenSaved,
+                          kSaveFPRegs,
+                          EMIT_REMEMBERED_SET,
+                          check_needed);
     }
   } else {
     __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
     __ str(value, FieldMemOperand(scratch, offset));
-    if (instr->needs_write_barrier()) {
+    if (instr->hydrogen()->NeedsWriteBarrier()) {
       // Update the write barrier for the properties array.
       // object is used as a scratch register.
-      __ RecordWriteField(
-          scratch, offset, value, object, kLRHasBeenSaved, kSaveFPRegs);
+      __ RecordWriteField(scratch,
+                          offset,
+                          value,
+                          object,
+                          kLRHasBeenSaved,
+                          kSaveFPRegs,
+                          EMIT_REMEMBERED_SET,
+                          check_needed);
     }
   }
 }
@@ -3362,9 +3405,18 @@
   }
 
   if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
     // Compute address of modified element and store it into key register.
     __ add(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
-    __ RecordWrite(elements, key, value, kLRHasBeenSaved, kSaveFPRegs);
+    __ RecordWrite(elements,
+                   key,
+                   value,
+                   kLRHasBeenSaved,
+                   kSaveFPRegs,
+                   EMIT_REMEMBERED_SET,
+                   check_needed);
   }
 }
 
@@ -3487,6 +3539,48 @@
 }
 
 
+void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
+  Register object_reg = ToRegister(instr->object());
+  Register new_map_reg = ToRegister(instr->new_map_reg());
+  Register scratch = scratch0();
+
+  Handle<Map> from_map = instr->original_map();
+  Handle<Map> to_map = instr->transitioned_map();
+  ElementsKind from_kind = from_map->elements_kind();
+  ElementsKind to_kind = to_map->elements_kind();
+
+  Label not_applicable;
+  __ ldr(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset));
+  __ cmp(scratch, Operand(from_map));
+  __ b(ne, &not_applicable);
+  __ mov(new_map_reg, Operand(to_map));
+  if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
+    // Write barrier.
+    __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
+                        scratch, kLRHasBeenSaved, kDontSaveFPRegs);
+  } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
+      to_kind == FAST_DOUBLE_ELEMENTS) {
+    Register fixed_object_reg = ToRegister(instr->temp_reg());
+    ASSERT(fixed_object_reg.is(r2));
+    ASSERT(new_map_reg.is(r3));
+    __ mov(fixed_object_reg, object_reg);
+    CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
+             RelocInfo::CODE_TARGET, instr);
+  } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    Register fixed_object_reg = ToRegister(instr->temp_reg());
+    ASSERT(fixed_object_reg.is(r2));
+    ASSERT(new_map_reg.is(r3));
+    __ mov(fixed_object_reg, object_reg);
+    CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
+             RelocInfo::CODE_TARGET, instr);
+  } else {
+    UNREACHABLE();
+  }
+  __ bind(&not_applicable);
+}
+
+
 void LCodeGen::DoStringAdd(LStringAdd* instr) {
   __ push(ToRegister(instr->left()));
   __ push(ToRegister(instr->right()));
@@ -4203,10 +4297,15 @@
 
 
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
+  Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+
   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
-  __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
+  __ mov(r1, Operand(constant_elements));
   __ Push(r3, r2, r1);
 
   // Pick the right runtime function or stub to call.
@@ -4223,7 +4322,9 @@
     CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
   } else {
     FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ELEMENTS;
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
     FastCloneShallowArrayStub stub(mode, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   }
@@ -4315,8 +4416,7 @@
   Handle<SharedFunctionInfo> shared_info = instr->shared_info();
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && shared_info->num_literals() == 0) {
-    FastNewClosureStub stub(
-        shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(shared_info->strict_mode_flag());
     __ mov(r1, Operand(shared_info));
     __ push(r1);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -4349,8 +4449,9 @@
                                                   false_label,
                                                   input,
                                                   instr->type_literal());
-
-  EmitBranch(true_block, false_block, final_branch_condition);
+  if (final_branch_condition != kNoCondition) {
+    EmitBranch(true_block, false_block, final_branch_condition);
+  }
 }
 
 
@@ -4420,9 +4521,7 @@
     final_branch_condition = eq;
 
   } else {
-    final_branch_condition = ne;
     __ b(false_label);
-    // A dead branch instruction will be generated after this point.
   }
 
   return final_branch_condition;
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 711e459..b01e496 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -86,6 +86,7 @@
                                         SwVfpRegister flt_scratch,
                                         DoubleRegister dbl_scratch);
   int ToInteger32(LConstantOperand* op) const;
+  double ToDouble(LConstantOperand* op) const;
   Operand ToOperand(LOperand* op);
   MemOperand ToMemOperand(LOperand* op) const;
   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
@@ -139,8 +140,8 @@
   bool is_done() const { return status_ == DONE; }
   bool is_aborted() const { return status_ == ABORTED; }
 
-  int strict_mode_flag() const {
-    return info()->is_strict_mode() ? kStrictMode : kNonStrictMode;
+  StrictModeFlag strict_mode_flag() const {
+    return info()->strict_mode_flag();
   }
 
   LChunk* chunk() const { return chunk_; }
@@ -206,7 +207,7 @@
                                LInstruction* instr);
 
   // Generate a direct call to a known function.  Expects the function
-  // to be in edi.
+  // to be in r1.
   void CallKnownFunction(Handle<JSFunction> function,
                          int arity,
                          LInstruction* instr,
@@ -263,7 +264,6 @@
   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
   void EmitGoto(int block);
   void EmitBranch(int left_block, int right_block, Condition cc);
-  void EmitCmpI(LOperand* left, LOperand* right);
   void EmitNumberUntagD(Register input,
                         DoubleRegister result,
                         bool deoptimize_on_undefined,
@@ -272,8 +272,10 @@
   // Emits optimized code for typeof x == "y".  Modifies input register.
   // Returns the condition on which a final split to
   // true and false label should be made, to optimize fallthrough.
-  Condition EmitTypeofIs(Label* true_label, Label* false_label,
-                         Register input, Handle<String> type_name);
+  Condition EmitTypeofIs(Label* true_label,
+                         Label* false_label,
+                         Register input,
+                         Handle<String> type_name);
 
   // Emits optimized code for %_IsObject(x).  Preserves input register.
   // Returns the condition on which a final split to
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 918f9eb..cf4258c 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -1101,24 +1101,16 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
-  ASSERT(function->is_compiled());
-
   // Get the function and setup the context.
   mov(r1, Operand(Handle<JSFunction>(function)));
   ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
 
-  // Invoke the cached code.
-  Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
-    InvokeCode(r3, expected, actual, flag, NullCallWrapper(), call_kind);
-  } else {
-    InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag, call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
+  InvokeCode(r3, expected, actual, flag, NullCallWrapper(), call_kind);
 }
 
 
@@ -1602,6 +1594,7 @@
   ASSERT(!result.is(scratch1));
   ASSERT(!result.is(scratch2));
   ASSERT(!scratch1.is(scratch2));
+  ASSERT(!object_size.is(ip));
   ASSERT(!result.is(ip));
   ASSERT(!scratch1.is(ip));
   ASSERT(!scratch2.is(ip));
@@ -2030,7 +2023,8 @@
 void MacroAssembler::TryGetFunctionPrototype(Register function,
                                              Register result,
                                              Register scratch,
-                                             Label* miss) {
+                                             Label* miss,
+                                             bool miss_on_bound_function) {
   // Check that the receiver isn't a smi.
   JumpIfSmi(function, miss);
 
@@ -2038,6 +2032,16 @@
   CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
   b(ne, miss);
 
+  if (miss_on_bound_function) {
+    ldr(scratch,
+        FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
+    ldr(scratch,
+        FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
+    tst(scratch,
+        Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
+    b(ne, miss);
+  }
+
   // Make sure that the function has an instance prototype.
   Label non_instance;
   ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
@@ -3147,8 +3151,10 @@
 #ifdef CAN_USE_ARMV5_INSTRUCTIONS
   clz(zeros, source);  // This instruction is only supported after ARM5.
 #else
-  mov(zeros, Operand(0, RelocInfo::NONE));
+  // Order of the next two lines is important: zeros register
+  // can be the same as source register.
   Move(scratch, source);
+  mov(zeros, Operand(0, RelocInfo::NONE));
   // Top 16.
   tst(scratch, Operand(0xffff0000));
   add(zeros, zeros, Operand(16), LeaveCC, eq);
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 8ee468a..90c4b37 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -320,8 +320,11 @@
   }
 
   // Push four registers.  Pushes leftmost register first (to highest address).
-  void Push(Register src1, Register src2,
-            Register src3, Register src4, Condition cond = al) {
+  void Push(Register src1,
+            Register src2,
+            Register src3,
+            Register src4,
+            Condition cond = al) {
     ASSERT(!src1.is(src2));
     ASSERT(!src2.is(src3));
     ASSERT(!src1.is(src3));
@@ -360,6 +363,57 @@
     }
   }
 
+  // Pop three registers.  Pops rightmost register first (from lower address).
+  void Pop(Register src1, Register src2, Register src3, Condition cond = al) {
+    ASSERT(!src1.is(src2));
+    ASSERT(!src2.is(src3));
+    ASSERT(!src1.is(src3));
+    if (src1.code() > src2.code()) {
+      if (src2.code() > src3.code()) {
+        ldm(ia_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
+      } else {
+        ldr(src3, MemOperand(sp, 4, PostIndex), cond);
+        ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
+      }
+    } else {
+      Pop(src2, src3, cond);
+      str(src1, MemOperand(sp, 4, PostIndex), cond);
+    }
+  }
+
+  // Pop four registers.  Pops rightmost register first (from lower address).
+  void Pop(Register src1,
+           Register src2,
+           Register src3,
+           Register src4,
+           Condition cond = al) {
+    ASSERT(!src1.is(src2));
+    ASSERT(!src2.is(src3));
+    ASSERT(!src1.is(src3));
+    ASSERT(!src1.is(src4));
+    ASSERT(!src2.is(src4));
+    ASSERT(!src3.is(src4));
+    if (src1.code() > src2.code()) {
+      if (src2.code() > src3.code()) {
+        if (src3.code() > src4.code()) {
+          ldm(ia_w,
+              sp,
+              src1.bit() | src2.bit() | src3.bit() | src4.bit(),
+              cond);
+        } else {
+          ldr(src4, MemOperand(sp, 4, PostIndex), cond);
+          ldm(ia_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
+        }
+      } else {
+        Pop(src3, src4, cond);
+        ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
+      }
+    } else {
+      Pop(src2, src3, src4, cond);
+      ldr(src1, MemOperand(sp, 4, PostIndex), cond);
+    }
+  }
+
   // Push and pop the registers that can hold pointers, as defined by the
   // RegList constant kSafepointSavedRegisters.
   void PushSafepointRegisters();
@@ -672,7 +726,8 @@
   void TryGetFunctionPrototype(Register function,
                                Register result,
                                Register scratch,
-                               Label* miss);
+                               Label* miss,
+                               bool miss_on_bound_function = false);
 
   // Compare object type for heap object.  heap_object contains a non-Smi
   // whose object type should be compared with the given type.  This both
diff --git a/src/arm/regexp-macro-assembler-arm.cc b/src/arm/regexp-macro-assembler-arm.cc
index c876467..b212f9f 100644
--- a/src/arm/regexp-macro-assembler-arm.cc
+++ b/src/arm/regexp-macro-assembler-arm.cc
@@ -1111,6 +1111,11 @@
     frame_entry<const String*>(re_frame, kInputString) = *subject;
     frame_entry<const byte*>(re_frame, kInputStart) = new_address;
     frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
+  } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
+    // Subject string might have been a ConsString that underwent
+    // short-circuiting during GC. That will not change start_address but
+    // will change pointer inside the subject handle.
+    frame_entry<const String*>(re_frame, kInputString) = *subject;
   }
 
   return 0;
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 5704202..542cc30 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -1268,9 +1268,9 @@
 
 // Returns the limit of the stack area to enable checking for stack overflows.
 uintptr_t Simulator::StackLimit() const {
-  // Leave a safety margin of 256 bytes to prevent overrunning the stack when
+  // Leave a safety margin of 512 bytes to prevent overrunning the stack when
   // pushing values.
-  return reinterpret_cast<uintptr_t>(stack_) + 256;
+  return reinterpret_cast<uintptr_t>(stack_) + 512;
 }
 
 
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 4558afe..f9a10c4 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -95,7 +95,63 @@
 // must always call a backup property check that is complete.
 // This function is safe to call if the receiver has fast properties.
 // Name must be a symbol and receiver must be a heap object.
-MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
+static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                             Label* miss_label,
+                                             Register receiver,
+                                             Handle<String> name,
+                                             Register scratch0,
+                                             Register scratch1) {
+  ASSERT(name->IsSymbol());
+  Counters* counters = masm->isolate()->counters();
+  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
+  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
+
+  Label done;
+
+  const int kInterceptorOrAccessCheckNeededMask =
+      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
+
+  // Bail out if the receiver has a named interceptor or requires access checks.
+  Register map = scratch1;
+  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
+  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
+  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
+  __ b(ne, miss_label);
+
+  // Check that receiver is a JSObject.
+  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
+  __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
+  __ b(lt, miss_label);
+
+  // Load properties array.
+  Register properties = scratch0;
+  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+  // Check that the properties array is a dictionary.
+  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
+  Register tmp = properties;
+  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
+  __ cmp(map, tmp);
+  __ b(ne, miss_label);
+
+  // Restore the temporarily used register.
+  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+
+
+  StringDictionaryLookupStub::GenerateNegativeLookup(masm,
+                                                     miss_label,
+                                                     &done,
+                                                     receiver,
+                                                     properties,
+                                                     name,
+                                                     scratch1);
+  __ bind(&done);
+  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup(
     MacroAssembler* masm,
     Label* miss_label,
     Register receiver,
@@ -138,7 +194,7 @@
   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
 
 
-  MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
+  MaybeObject* result = StringDictionaryLookupStub::TryGenerateNegativeLookup(
       masm,
       miss_label,
       &done,
@@ -259,8 +315,10 @@
 // are loaded directly otherwise the property is loaded from the properties
 // fixed array.
 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
-                                            Register dst, Register src,
-                                            JSObject* holder, int index) {
+                                            Register dst,
+                                            Register src,
+                                            Handle<JSObject> holder,
+                                            int index) {
   // Adjust for the number of properties stored in the holder.
   index -= holder->map()->inobject_properties();
   if (index < 0) {
@@ -367,9 +425,9 @@
 // may be clobbered.  Upon branch to miss_label, the receiver and name
 // registers have their original values.
 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
-                                      JSObject* object,
+                                      Handle<JSObject> object,
                                       int index,
-                                      Map* transition,
+                                      Handle<Map> transition,
                                       Register receiver_reg,
                                       Register name_reg,
                                       Register scratch,
@@ -395,11 +453,11 @@
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
 
   // Perform map transition for the receiver if necessary.
-  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
+  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
     // The properties must be extended before we can store the value.
     // We jump to a runtime call that extends the properties array.
     __ push(receiver_reg);
-    __ mov(r2, Operand(Handle<Map>(transition)));
+    __ mov(r2, Operand(transition));
     __ Push(r2, r0);
     __ TailCallExternalReference(
         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
@@ -409,10 +467,10 @@
     return;
   }
 
-  if (transition != NULL) {
+  if (!transition.is_null()) {
     // Update the map of the object; no write barrier updating is
     // needed because the map is never in new space.
-    __ mov(ip, Operand(Handle<Map>(transition)));
+    __ mov(ip, Operand(transition));
     __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
   }
 
@@ -467,20 +525,15 @@
 
 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
-  Code* code = NULL;
-  if (kind == Code::LOAD_IC) {
-    code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
-  } else {
-    code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
-  }
-
-  Handle<Code> ic(code);
-  __ Jump(ic, RelocInfo::CODE_TARGET);
+  Handle<Code> code = (kind == Code::LOAD_IC)
+      ? masm->isolate()->builtins()->LoadIC_Miss()
+      : masm->isolate()->builtins()->KeyedLoadIC_Miss();
+  __ Jump(code, RelocInfo::CODE_TARGET);
 }
 
 
 static void GenerateCallFunction(MacroAssembler* masm,
-                                 Object* object,
+                                 Handle<Object> object,
                                  const ParameterCount& arguments,
                                  Label* miss,
                                  Code::ExtraICState extra_ic_state) {
@@ -868,7 +921,26 @@
 // Generate code to check that a global property cell is empty. Create
 // the property cell at compilation time if no cell exists for the
 // property.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
+static void GenerateCheckPropertyCell(MacroAssembler* masm,
+                                      Handle<GlobalObject> global,
+                                      Handle<String> name,
+                                      Register scratch,
+                                      Label* miss) {
+  Handle<JSGlobalPropertyCell> cell =
+      GlobalObject::EnsurePropertyCell(global, name);
+  ASSERT(cell->value()->IsTheHole());
+  __ mov(scratch, Operand(cell));
+  __ ldr(scratch,
+         FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
+  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+  __ cmp(scratch, ip);
+  __ b(ne, miss);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell(
     MacroAssembler* masm,
     GlobalObject* global,
     String* name,
@@ -889,9 +961,32 @@
   return cell;
 }
 
+
 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
 // from object to (but not including) holder.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
+static void GenerateCheckPropertyCells(MacroAssembler* masm,
+                                       Handle<JSObject> object,
+                                       Handle<JSObject> holder,
+                                       Handle<String> name,
+                                       Register scratch,
+                                       Label* miss) {
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    if (current->IsGlobalObject()) {
+      GenerateCheckPropertyCell(masm,
+                                Handle<GlobalObject>::cast(current),
+                                name,
+                                scratch,
+                                miss);
+    }
+    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
+  }
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells(
     MacroAssembler* masm,
     JSObject* object,
     JSObject* holder,
@@ -902,7 +997,7 @@
   while (current != holder) {
     if (current->IsGlobalObject()) {
       // Returns a cell or a failure.
-      MaybeObject* result = GenerateCheckPropertyCell(
+      MaybeObject* result = TryGenerateCheckPropertyCell(
           masm,
           GlobalObject::cast(current),
           name,
@@ -1027,6 +1122,112 @@
 #define __ ACCESS_MASM(masm())
 
 
+Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
+                                       Register object_reg,
+                                       Handle<JSObject> holder,
+                                       Register holder_reg,
+                                       Register scratch1,
+                                       Register scratch2,
+                                       Handle<String> name,
+                                       int save_at_depth,
+                                       Label* miss) {
+  // Make sure there's no overlap between holder and object registers.
+  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
+  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
+         && !scratch2.is(scratch1));
+
+  // Keep track of the current object in register reg.
+  Register reg = object_reg;
+  int depth = 0;
+
+  if (save_at_depth == depth) {
+    __ str(reg, MemOperand(sp));
+  }
+
+  // Check the maps in the prototype chain.
+  // Traverse the prototype chain from the object and do map checks.
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    ++depth;
+
+    // Only global objects and objects that do not require access
+    // checks are allowed in stubs.
+    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
+
+    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
+    if (!current->HasFastProperties() &&
+        !current->IsJSGlobalObject() &&
+        !current->IsJSGlobalProxy()) {
+      if (!name->IsSymbol()) {
+        name = factory()->LookupSymbol(name);
+      }
+      ASSERT(current->property_dictionary()->FindEntry(*name) ==
+             StringDictionary::kNotFound);
+
+      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
+                                       scratch1, scratch2);
+
+      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+      __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
+    } else {
+      Handle<Map> current_map(current->map());
+      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
+      __ cmp(scratch1, Operand(current_map));
+      // Branch on the result of the map check.
+      __ b(ne, miss);
+      // Check access rights to the global object.  This has to happen after
+      // the map check so that we know that the object is actually a global
+      // object.
+      if (current->IsJSGlobalProxy()) {
+        __ CheckAccessGlobalProxy(reg, scratch2, miss);
+      }
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+
+      if (heap()->InNewSpace(*prototype)) {
+        // The prototype is in new space; we cannot store a reference to it
+        // in the code.  Load it from the map.
+        __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
+      } else {
+        // The prototype is in old space; load it directly.
+        __ mov(reg, Operand(prototype));
+      }
+    }
+
+    if (save_at_depth == depth) {
+      __ str(reg, MemOperand(sp));
+    }
+
+    // Go to the next object in the prototype chain.
+    current = prototype;
+  }
+
+  // Log the check depth.
+  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
+
+  // Check the holder map.
+  __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
+  __ cmp(scratch1, Operand(Handle<Map>(current->map())));
+  __ b(ne, miss);
+
+  // Perform security check for access to the global object.
+  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
+  if (holder->IsJSGlobalProxy()) {
+    __ CheckAccessGlobalProxy(reg, scratch1, miss);
+  }
+
+  // If we've skipped any global objects, it's not enough to verify that
+  // their maps haven't changed.  We also need to check that the property
+  // cell for the property is still empty.
+  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
+
+  // Return the register containing the holder.
+  return reg;
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
 Register StubCompiler::CheckPrototypes(JSObject* object,
                                        Register object_reg,
                                        JSObject* holder,
@@ -1076,12 +1277,13 @@
       ASSERT(current->property_dictionary()->FindEntry(name) ==
              StringDictionary::kNotFound);
 
-      MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
-                                                                      miss,
-                                                                      reg,
-                                                                      name,
-                                                                      scratch1,
-                                                                      scratch2);
+      MaybeObject* negative_lookup =
+          TryGenerateDictionaryNegativeLookup(masm(),
+                                              miss,
+                                              reg,
+                                              name,
+                                              scratch1,
+                                              scratch2);
       if (negative_lookup->IsFailure()) {
         set_failure(Failure::cast(negative_lookup));
         return reg;
@@ -1150,17 +1352,17 @@
   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   if (holder->IsJSGlobalProxy()) {
     __ CheckAccessGlobalProxy(reg, scratch1, miss);
-  };
+  }
 
   // If we've skipped any global objects, it's not enough to verify
   // that their maps haven't changed.  We also need to check that the
   // property cell for the property is still empty.
-  MaybeObject* result = GenerateCheckPropertyCells(masm(),
-                                                   object,
-                                                   holder,
-                                                   name,
-                                                   scratch1,
-                                                   miss);
+  MaybeObject* result = TryGenerateCheckPropertyCells(masm(),
+                                                      object,
+                                                      holder,
+                                                      name,
+                                                      scratch1,
+                                                      miss);
   if (result->IsFailure()) set_failure(Failure::cast(result));
 
   // Return the register containing the holder.
@@ -1168,45 +1370,44 @@
 }
 
 
-void StubCompiler::GenerateLoadField(JSObject* object,
-                                     JSObject* holder,
+void StubCompiler::GenerateLoadField(Handle<JSObject> object,
+                                     Handle<JSObject> holder,
                                      Register receiver,
                                      Register scratch1,
                                      Register scratch2,
                                      Register scratch3,
                                      int index,
-                                     String* name,
+                                     Handle<String> name,
                                      Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss);
 
   // Check that the maps haven't changed.
-  Register reg =
-      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
-                      name, miss);
+  Register reg = CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
   GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
   __ Ret();
 }
 
 
-void StubCompiler::GenerateLoadConstant(JSObject* object,
-                                        JSObject* holder,
+void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
+                                        Handle<JSObject> holder,
                                         Register receiver,
                                         Register scratch1,
                                         Register scratch2,
                                         Register scratch3,
-                                        Object* value,
-                                        String* name,
+                                        Handle<Object> value,
+                                        Handle<String> name,
                                         Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss);
 
   // Check that the maps haven't changed.
-  CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, name,
-                  miss);
+  CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
 
   // Return the constant value.
-  __ mov(r0, Operand(Handle<Object>(value)));
+  __ mov(r0, Operand(value));
   __ Ret();
 }
 
@@ -1365,7 +1566,8 @@
       // We found FIELD property in prototype chain of interceptor's holder.
       // Retrieve a field from field's holder.
       GenerateFastPropertyLoad(masm(), r0, holder_reg,
-                               lookup->holder(), lookup->GetFieldIndex());
+                               Handle<JSObject>(lookup->holder()),
+                               lookup->GetFieldIndex());
       __ Ret();
     } else {
       // We found CALLBACKS property in prototype chain of interceptor's
@@ -1416,9 +1618,9 @@
 }
 
 
-void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
+void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
   if (kind_ == Code::KEYED_CALL_IC) {
-    __ cmp(r2, Operand(Handle<String>(name)));
+    __ cmp(r2, Operand(name));
     __ b(ne, miss);
   }
 }
@@ -1478,11 +1680,22 @@
 }
 
 
-MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj =
+void CallStubCompiler::GenerateMissBranch() {
+  Handle<Code> code =
       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
                                                kind_,
-                                               extra_ic_state_);
+                                               extra_state_);
+  __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* CallStubCompiler::TryGenerateMissBranch() {
+  MaybeObject* maybe_obj =
+      isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(),
+                                                  kind_,
+                                                  extra_state_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1490,10 +1703,10 @@
 }
 
 
-MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r2    : name
   //  -- lr    : return address
@@ -1513,12 +1726,11 @@
   Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
   GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
 
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
+  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
-  if (maybe_result->IsFailure()) return maybe_result;
+  GenerateMissBranch();
 
   // Return the generated code.
   return GetCode(FIELD, name);
@@ -1543,7 +1755,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   Register receiver = r1;
 
@@ -1619,7 +1831,7 @@
       __ bind(&with_write_barrier);
 
       __ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset));
-      __ CheckFastSmiOnlyElements(r6, r6, &call_builtin);
+      __ CheckFastObjectElements(r6, r6, &call_builtin);
 
       // Save new length.
       __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
@@ -1709,11 +1921,11 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1738,7 +1950,7 @@
   Register receiver = r1;
   Register elements = r3;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack
   const int argc = arguments().immediate();
@@ -1798,11 +2010,11 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1831,12 +2043,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1884,11 +2096,11 @@
   // Restore function name in r2.
   __ Move(r2, Handle<String>(name));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1917,12 +2129,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1972,11 +2184,11 @@
   // Restore function name in r2.
   __ Move(r2, Handle<String>(name));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2001,7 +2213,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
@@ -2044,11 +2256,11 @@
 
   __ bind(&miss);
   // r2: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2078,7 +2290,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss, slow;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
@@ -2192,11 +2404,11 @@
 
   __ bind(&miss);
   // r2: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2220,7 +2432,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
@@ -2293,11 +2505,11 @@
 
   __ bind(&miss);
   // r2: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2322,7 +2534,7 @@
 
   Label miss, miss_before_stack_reserved;
 
-  GenerateNameCheck(name, &miss_before_stack_reserved);
+  GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2347,11 +2559,11 @@
   FreeSpaceForFastApiCall(masm());
 
   __ bind(&miss_before_stack_reserved);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2375,7 +2587,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack
   const int argc = arguments().immediate();
@@ -2474,18 +2686,18 @@
       UNREACHABLE();
   }
 
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2499,18 +2711,18 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
   // Get the receiver from the stack.
   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
 
-  CallInterceptorCompiler compiler(this, arguments(), r2, extra_ic_state_);
+  CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_);
   MaybeObject* result = compiler.Compile(masm(),
                                          object,
                                          holder,
@@ -2530,15 +2742,16 @@
   // Restore receiver.
   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
 
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
+  GenerateCallFunction(masm(), Handle<Object>(object), arguments(), &miss,
+                       extra_state_);
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
@@ -2563,7 +2776,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
@@ -2585,39 +2798,33 @@
   // Jump to the cached code (tail call).
   Counters* counters = masm()->isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
-  ASSERT(function->is_compiled());
   Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
-    __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
-                  NullCallWrapper(), call_kind);
-  } else {
-    __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
-                  JUMP_FUNCTION, call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
+  __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
+                NullCallWrapper(), call_kind);
 
   // Handle call cache miss.
   __ bind(&miss);
   __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(NORMAL, name);
+  return TryGetCode(NORMAL, name);
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
+Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
                                                   int index,
-                                                  Map* transition,
-                                                  String* name) {
+                                                  Handle<Map> transition,
+                                                  Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : receiver
@@ -2626,24 +2833,20 @@
   // -----------------------------------
   Label miss;
 
-  GenerateStoreField(masm(),
-                     object,
-                     index,
-                     transition,
-                     r1, r2, r3,
-                     &miss);
+  GenerateStoreField(masm(), object, index, transition, r1, r2, r3, &miss);
   __ bind(&miss);
   Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
-                                                     AccessorInfo* callback,
-                                                     String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreCallback(
+    Handle<JSObject> object,
+    Handle<AccessorInfo> callback,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : receiver
@@ -2670,7 +2873,7 @@
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
 
   __ push(r1);  // receiver
-  __ mov(ip, Operand(Handle<AccessorInfo>(callback)));  // callback info
+  __ mov(ip, Operand(callback));  // callback info
   __ Push(ip, r2, r0);
 
   // Do tail-call to the runtime system.
@@ -2689,8 +2892,9 @@
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
-                                                        String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
+    Handle<JSObject> receiver,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : receiver
@@ -2737,9 +2941,10 @@
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
-                                                   JSGlobalPropertyCell* cell,
-                                                   String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreGlobal(
+    Handle<GlobalObject> object,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : receiver
@@ -2757,7 +2962,7 @@
   // cell could have been deleted and reintroducing the global needs
   // to update the property details in the property dictionary of the
   // global object. We bail out to the runtime system to do that.
-  __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell)));
+  __ mov(r4, Operand(cell));
   __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
   __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
   __ cmp(r5, r6);
@@ -2790,9 +2995,9 @@
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
-                                                      JSObject* object,
-                                                      JSObject* last) {
+Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
+                                                      Handle<JSObject> object,
+                                                      Handle<JSObject> last) {
   // ----------- S t a t e -------------
   //  -- r0    : receiver
   //  -- lr    : return address
@@ -2808,15 +3013,8 @@
   // If the last object in the prototype chain is a global object,
   // check that the global property cell is empty.
   if (last->IsGlobalObject()) {
-    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
-                                                  GlobalObject::cast(last),
-                                                  name,
-                                                  r1,
-                                                  &miss);
-    if (cell->IsFailure()) {
-      miss.Unuse();
-      return cell;
-    }
+    GenerateCheckPropertyCell(
+        masm(), Handle<GlobalObject>::cast(last), name, r1, &miss);
   }
 
   // Return undefined if maps of the full prototype chain are still the
@@ -2828,14 +3026,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, heap()->empty_string());
+  return GetCode(NONEXISTENT, factory()->empty_string());
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : receiver
   //  -- r2    : name
@@ -2874,14 +3072,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
-                                                   JSObject* holder,
-                                                   Object* value,
-                                                   String* name) {
+Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
+                                                   Handle<JSObject> holder,
+                                                   Handle<Object> value,
+                                                   Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : receiver
   //  -- r2    : name
@@ -2908,7 +3106,7 @@
   // -----------------------------------
   Label miss;
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
   GenerateLoadInterceptor(object,
                           holder,
@@ -2924,15 +3122,16 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
-                                                 GlobalObject* holder,
-                                                 JSGlobalPropertyCell* cell,
-                                                 String* name,
-                                                 bool is_dont_delete) {
+Handle<Code> LoadStubCompiler::CompileLoadGlobal(
+    Handle<JSObject> object,
+    Handle<GlobalObject> holder,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<String> name,
+    bool is_dont_delete) {
   // ----------- S t a t e -------------
   //  -- r0    : receiver
   //  -- r2    : name
@@ -2943,7 +3142,7 @@
   // If the object is the holder then we know that it's a global
   // object which can only happen for contextual calls. In this case,
   // the receiver cannot be a smi.
-  if (object != holder) {
+  if (!object.is_identical_to(holder)) {
     __ JumpIfSmi(r0, &miss);
   }
 
@@ -2951,7 +3150,7 @@
   CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
 
   // Get the value from the cell.
-  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
+  __ mov(r3, Operand(cell));
   __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
 
   // Check for deleted property if property can actually be deleted.
@@ -2975,9 +3174,9 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
-                                                     JSObject* receiver,
-                                                     JSObject* holder,
+Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
+                                                     Handle<JSObject> receiver,
+                                                     Handle<JSObject> holder,
                                                      int index) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
@@ -2987,7 +3186,7 @@
   Label miss;
 
   // Check the key is the cached one.
-  __ cmp(r0, Operand(Handle<String>(name)));
+  __ cmp(r0, Operand(name));
   __ b(ne, &miss);
 
   GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
@@ -3024,14 +3223,15 @@
   __ bind(&miss);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
-                                                        JSObject* receiver,
-                                                        JSObject* holder,
-                                                        Object* value) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
+    Handle<String> name,
+    Handle<JSObject> receiver,
+    Handle<JSObject> holder,
+    Handle<Object> value) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
   //  -- r0    : key
@@ -3040,7 +3240,7 @@
   Label miss;
 
   // Check the key is the cached one.
-  __ cmp(r0, Operand(Handle<String>(name)));
+  __ cmp(r0, Operand(name));
   __ b(ne, &miss);
 
   GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
@@ -3066,7 +3266,7 @@
   __ cmp(r0, Operand(Handle<String>(name)));
   __ b(ne, &miss);
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
   GenerateLoadInterceptor(receiver,
                           holder,
@@ -3081,11 +3281,12 @@
   __ bind(&miss);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
   //  -- r0    : key
@@ -3094,7 +3295,7 @@
   Label miss;
 
   // Check the key is the cached one.
-  __ cmp(r0, Operand(Handle<String>(name)));
+  __ cmp(r0, Operand(name));
   __ b(ne, &miss);
 
   GenerateLoadArrayLength(masm(), r1, r2, &miss);
@@ -3105,7 +3306,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
   //  -- r0    : key
@@ -3117,7 +3319,7 @@
   __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
 
   // Check the key is the cached one.
-  __ cmp(r0, Operand(Handle<String>(name)));
+  __ cmp(r0, Operand(name));
   __ b(ne, &miss);
 
   GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
@@ -3130,7 +3332,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
   //  -- r0    : key
@@ -3142,7 +3345,7 @@
   __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
 
   // Check the name hasn't changed.
-  __ cmp(r0, Operand(Handle<String>(name)));
+  __ cmp(r0, Operand(name));
   __ b(ne, &miss);
 
   GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
@@ -3154,33 +3357,29 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
+    Handle<Map> receiver_map) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
   //  -- r0    : key
   //  -- r1    : receiver
   // -----------------------------------
-  Code* stub;
   ElementsKind elements_kind = receiver_map->elements_kind();
-  MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  __ DispatchMap(r1,
-                 r2,
-                 Handle<Map>(receiver_map),
-                 Handle<Code>(stub),
-                 DO_SMI_CHECK);
+  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
+
+  __ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK);
 
   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, factory()->empty_string());
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic(
-    MapList* receiver_maps,
-    CodeList* handler_ics) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
+    MapHandleList* receiver_maps,
+    CodeHandleList* handler_ics) {
   // ----------- S t a t e -------------
   //  -- lr    : return address
   //  -- r0    : key
@@ -3192,11 +3391,9 @@
   int receiver_count = receiver_maps->length();
   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
   for (int current = 0; current < receiver_count; ++current) {
-    Handle<Map> map(receiver_maps->at(current));
-    Handle<Code> code(handler_ics->at(current));
-    __ mov(ip, Operand(map));
+    __ mov(ip, Operand(receiver_maps->at(current)));
     __ cmp(r2, ip);
-    __ Jump(code, RelocInfo::CODE_TARGET, eq);
+    __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, eq);
   }
 
   __ bind(&miss);
@@ -3204,14 +3401,14 @@
   __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
+Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
                                                        int index,
-                                                       Map* transition,
-                                                       String* name) {
+                                                       Handle<Map> transition,
+                                                       Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : name
@@ -3224,17 +3421,12 @@
   __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
 
   // Check that the name has not changed.
-  __ cmp(r1, Operand(Handle<String>(name)));
+  __ cmp(r1, Operand(name));
   __ b(ne, &miss);
 
   // r3 is used as scratch register. r1 and r2 keep their values if a jump to
   // the miss label is generated.
-  GenerateStoreField(masm(),
-                     object,
-                     index,
-                     transition,
-                     r2, r1, r3,
-                     &miss);
+  GenerateStoreField(masm(), object, index, transition, r2, r1, r3, &miss);
   __ bind(&miss);
 
   __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
@@ -3242,11 +3434,12 @@
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
+Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
+    Handle<Map> receiver_map) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : key
@@ -3254,30 +3447,25 @@
   //  -- lr    : return address
   //  -- r3    : scratch
   // -----------------------------------
-  Code* stub;
   ElementsKind elements_kind = receiver_map->elements_kind();
   bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
-  MaybeObject* maybe_stub =
-      KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  __ DispatchMap(r2,
-                 r3,
-                 Handle<Map>(receiver_map),
-                 Handle<Code>(stub),
-                 DO_SMI_CHECK);
+  Handle<Code> stub =
+      KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
+
+  __ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK);
 
   Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, factory()->empty_string());
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic(
-    MapList* receiver_maps,
-    CodeList* handler_stubs,
-    MapList* transitioned_maps) {
+Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
+    MapHandleList* receiver_maps,
+    CodeHandleList* handler_stubs,
+    MapHandleList* transitioned_maps) {
   // ----------- S t a t e -------------
   //  -- r0    : value
   //  -- r1    : key
@@ -3291,17 +3479,15 @@
   int receiver_count = receiver_maps->length();
   __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
   for (int i = 0; i < receiver_count; ++i) {
-    Handle<Map> map(receiver_maps->at(i));
-    Handle<Code> code(handler_stubs->at(i));
-    __ mov(ip, Operand(map));
+    __ mov(ip, Operand(receiver_maps->at(i)));
     __ cmp(r3, ip);
-    if (transitioned_maps->at(i) == NULL) {
-      __ Jump(code, RelocInfo::CODE_TARGET, eq);
+    if (transitioned_maps->at(i).is_null()) {
+      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
     } else {
       Label next_map;
-      __ b(eq, &next_map);
-      __ mov(r4, Operand(Handle<Map>(transitioned_maps->at(i))));
-      __ Jump(code, RelocInfo::CODE_TARGET, al);
+      __ b(ne, &next_map);
+      __ mov(r3, Operand(transitioned_maps->at(i)));
+      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
       __ bind(&next_map);
     }
   }
@@ -3311,7 +3497,7 @@
   __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
 }
 
 
diff --git a/src/array.js b/src/array.js
index e1d7c20..214065c 100644
--- a/src/array.js
+++ b/src/array.js
@@ -1013,18 +1013,22 @@
   }
   if (IS_NULL_OR_UNDEFINED(receiver)) {
     receiver = %GetDefaultReceiver(f) || receiver;
+  } else if (!IS_SPEC_OBJECT(receiver)) {
+    receiver = ToObject(receiver);
   }
 
-  var result = [];
-  var result_length = 0;
+  var result = new $Array();
+  var accumulator = new InternalArray();
+  var accumulator_length = 0;
   for (var i = 0; i < length; i++) {
     var current = array[i];
     if (!IS_UNDEFINED(current) || i in array) {
       if (%_CallFunction(receiver, current, i, array, f)) {
-        result[result_length++] = current;
+        accumulator[accumulator_length++] = current;
       }
     }
   }
+  %MoveArrayContents(accumulator, result);
   return result;
 }
 
@@ -1045,6 +1049,8 @@
   }
   if (IS_NULL_OR_UNDEFINED(receiver)) {
     receiver = %GetDefaultReceiver(f) || receiver;
+  } else if (!IS_SPEC_OBJECT(receiver)) {
+    receiver = ToObject(receiver);
   }
 
   for (var i = 0; i < length; i++) {
@@ -1074,6 +1080,8 @@
   }
   if (IS_NULL_OR_UNDEFINED(receiver)) {
     receiver = %GetDefaultReceiver(f) || receiver;
+  } else if (!IS_SPEC_OBJECT(receiver)) {
+    receiver = ToObject(receiver);
   }
 
   for (var i = 0; i < length; i++) {
@@ -1102,6 +1110,8 @@
   }
   if (IS_NULL_OR_UNDEFINED(receiver)) {
     receiver = %GetDefaultReceiver(f) || receiver;
+  } else if (!IS_SPEC_OBJECT(receiver)) {
+    receiver = ToObject(receiver);
   }
 
   for (var i = 0; i < length; i++) {
@@ -1129,6 +1139,8 @@
   }
   if (IS_NULL_OR_UNDEFINED(receiver)) {
     receiver = %GetDefaultReceiver(f) || receiver;
+  } else if (!IS_SPEC_OBJECT(receiver)) {
+    receiver = ToObject(receiver);
   }
 
   var result = new $Array();
diff --git a/src/assembler.cc b/src/assembler.cc
index bda85e6..4dc2394 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -834,8 +834,8 @@
 }
 
 
-ExternalReference ExternalReference::roots_address(Isolate* isolate) {
-  return ExternalReference(isolate->heap()->roots_address());
+ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
+  return ExternalReference(isolate->heap()->roots_array_start());
 }
 
 
@@ -1137,6 +1137,23 @@
 }
 
 
+bool EvalComparison(Token::Value op, double op1, double op2) {
+  ASSERT(Token::IsCompareOp(op));
+  switch (op) {
+    case Token::EQ:
+    case Token::EQ_STRICT: return (op1 == op2);
+    case Token::NE: return (op1 != op2);
+    case Token::LT: return (op1 < op2);
+    case Token::GT: return (op1 > op2);
+    case Token::LTE: return (op1 <= op2);
+    case Token::GTE: return (op1 >= op2);
+    default:
+      UNREACHABLE();
+      return false;
+  }
+}
+
+
 ExternalReference ExternalReference::double_fp_operation(
     Token::Value operation, Isolate* isolate) {
   typedef double BinaryFPOperation(double x, double y);
diff --git a/src/assembler.h b/src/assembler.h
index e5661c9..5b71363 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -279,14 +279,17 @@
   // this relocation applies to;
   // can only be called if IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
   INLINE(Address target_address());
-  INLINE(void set_target_address(Address target));
+  INLINE(void set_target_address(Address target,
+                                 WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
   INLINE(Object* target_object());
   INLINE(Handle<Object> target_object_handle(Assembler* origin));
   INLINE(Object** target_object_address());
-  INLINE(void set_target_object(Object* target));
+  INLINE(void set_target_object(Object* target,
+                                WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
   INLINE(JSGlobalPropertyCell* target_cell());
   INLINE(Handle<JSGlobalPropertyCell> target_cell_handle());
-  INLINE(void set_target_cell(JSGlobalPropertyCell* cell));
+  INLINE(void set_target_cell(JSGlobalPropertyCell* cell,
+                              WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
 
 
   // Read the address of the word containing the target_address in an
@@ -593,8 +596,8 @@
   static ExternalReference keyed_lookup_cache_keys(Isolate* isolate);
   static ExternalReference keyed_lookup_cache_field_offsets(Isolate* isolate);
 
-  // Static variable Heap::roots_address()
-  static ExternalReference roots_address(Isolate* isolate);
+  // Static variable Heap::roots_array_start()
+  static ExternalReference roots_array_start(Isolate* isolate);
 
   // Static variable StackGuard::address_of_jslimit()
   static ExternalReference address_of_stack_limit(Isolate* isolate);
@@ -847,6 +850,8 @@
   return num_bits_set;
 }
 
+bool EvalComparison(Token::Value op, double op1, double op2);
+
 // Computes pow(x, y) with the special cases in the spec for Math.pow.
 double power_double_int(double x, int y);
 double power_double_double(double x, double y);
diff --git a/src/ast-inl.h b/src/ast-inl.h
index 731ad2f..f8b460d 100644
--- a/src/ast-inl.h
+++ b/src/ast-inl.h
@@ -111,8 +111,18 @@
 }
 
 
-bool FunctionLiteral::strict_mode() const {
-  return scope()->is_strict_mode();
+int FunctionLiteral::start_position() const {
+  return scope()->start_position();
+}
+
+
+int FunctionLiteral::end_position() const {
+  return scope()->end_position();
+}
+
+
+StrictModeFlag FunctionLiteral::strict_mode_flag() const {
+  return scope()->strict_mode_flag();
 }
 
 
diff --git a/src/ast.cc b/src/ast.cc
index d493814..9e34bc0 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -66,7 +66,6 @@
       name_(var->name()),
       var_(NULL),  // Will be set by the call to BindTo.
       is_this_(var->is_this()),
-      inside_with_(false),
       is_trivial_(false),
       position_(RelocInfo::kNoPosition) {
   BindTo(var);
@@ -76,13 +75,11 @@
 VariableProxy::VariableProxy(Isolate* isolate,
                              Handle<String> name,
                              bool is_this,
-                             bool inside_with,
                              int position)
     : Expression(isolate),
       name_(name),
       var_(NULL),
       is_this_(is_this),
-      inside_with_(inside_with),
       is_trivial_(false),
       position_(position) {
   // Names must be canonicalized for fast equality checks.
@@ -468,7 +465,7 @@
 
 
 bool ThisFunction::IsInlineable() const {
-  return false;
+  return true;
 }
 
 
@@ -723,7 +720,7 @@
     holder_ = Handle<JSObject>::null();
   }
   while (true) {
-    LookupResult lookup;
+    LookupResult lookup(type->GetIsolate());
     type->LookupInDescriptors(NULL, *name, &lookup);
     // If the function wasn't found directly in the map, we start
     // looking upwards through the prototype chain.
diff --git a/src/ast.h b/src/ast.h
index 0efc483..3de00ef 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -405,7 +405,10 @@
         mode_(mode),
         fun_(fun),
         scope_(scope) {
-    ASSERT(mode == VAR || mode == CONST || mode == LET);
+    ASSERT(mode == VAR ||
+           mode == CONST ||
+           mode == CONST_HARMONY ||
+           mode == LET);
     // At the moment there are no "const functions"'s in JavaScript...
     ASSERT(fun == NULL || mode == VAR || mode == LET);
   }
@@ -1128,7 +1131,6 @@
   Handle<String> name() const { return name_; }
   Variable* var() const { return var_; }
   bool is_this() const { return is_this_; }
-  bool inside_with() const { return inside_with_; }
   int position() const { return position_; }
 
   void MarkAsTrivial() { is_trivial_ = true; }
@@ -1140,14 +1142,12 @@
   Handle<String> name_;
   Variable* var_;  // resolved variable, or NULL
   bool is_this_;
-  bool inside_with_;
   bool is_trivial_;
   int position_;
 
   VariableProxy(Isolate* isolate,
                 Handle<String> name,
                 bool is_this,
-                bool inside_with,
                 int position = RelocInfo::kNoPosition);
 
   friend class Scope;
@@ -1620,8 +1620,6 @@
                   bool has_only_simple_this_property_assignments,
                   Handle<FixedArray> this_property_assignments,
                   int num_parameters,
-                  int start_position,
-                  int end_position,
                   Type type,
                   bool has_duplicate_parameters)
       : Expression(isolate),
@@ -1634,8 +1632,6 @@
             has_only_simple_this_property_assignments),
         this_property_assignments_(this_property_assignments),
         num_parameters_(num_parameters),
-        start_position_(start_position),
-        end_position_(end_position),
         function_token_position_(RelocInfo::kNoPosition),
         inferred_name_(HEAP->empty_string()),
         is_expression_(type != DECLARATION),
@@ -1651,11 +1647,12 @@
   ZoneList<Statement*>* body() const { return body_; }
   void set_function_token_position(int pos) { function_token_position_ = pos; }
   int function_token_position() const { return function_token_position_; }
-  int start_position() const { return start_position_; }
-  int end_position() const { return end_position_; }
+  int start_position() const;
+  int end_position() const;
   bool is_expression() const { return is_expression_; }
   bool is_anonymous() const { return is_anonymous_; }
-  bool strict_mode() const;
+  bool strict_mode() const { return strict_mode_flag() == kStrictMode; }
+  StrictModeFlag strict_mode_flag() const;
 
   int materialized_literal_count() { return materialized_literal_count_; }
   int expected_property_count() { return expected_property_count_; }
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index dc722cb..6735ff4 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -38,6 +38,7 @@
 #include "macro-assembler.h"
 #include "natives.h"
 #include "objects-visiting.h"
+#include "platform.h"
 #include "snapshot.h"
 #include "extensions/externalize-string-extension.h"
 #include "extensions/gc-extension.h"
@@ -362,6 +363,7 @@
   if (is_ecma_native) {
     function->shared()->set_instance_class_name(*symbol);
   }
+  function->shared()->set_native(true);
   return function;
 }
 
@@ -375,26 +377,28 @@
   PropertyAttributes attributes =
       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
 
+  DescriptorArray::WhitenessWitness witness(*descriptors);
+
   {  // Add length.
     Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionLength);
     CallbacksDescriptor d(*factory()->length_symbol(), *foreign, attributes);
-    descriptors->Set(0, &d);
+    descriptors->Set(0, &d, witness);
   }
   {  // Add name.
     Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionName);
     CallbacksDescriptor d(*factory()->name_symbol(), *foreign, attributes);
-    descriptors->Set(1, &d);
+    descriptors->Set(1, &d, witness);
   }
   {  // Add arguments.
     Handle<Foreign> foreign =
         factory()->NewForeign(&Accessors::FunctionArguments);
     CallbacksDescriptor d(*factory()->arguments_symbol(), *foreign, attributes);
-    descriptors->Set(2, &d);
+    descriptors->Set(2, &d, witness);
   }
   {  // Add caller.
     Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionCaller);
     CallbacksDescriptor d(*factory()->caller_symbol(), *foreign, attributes);
-    descriptors->Set(3, &d);
+    descriptors->Set(3, &d, witness);
   }
   if (prototypeMode != DONT_ADD_PROTOTYPE) {
     // Add prototype.
@@ -404,9 +408,9 @@
     Handle<Foreign> foreign =
         factory()->NewForeign(&Accessors::FunctionPrototype);
     CallbacksDescriptor d(*factory()->prototype_symbol(), *foreign, attributes);
-    descriptors->Set(4, &d);
+    descriptors->Set(4, &d, witness);
   }
-  descriptors->Sort();
+  descriptors->Sort(witness);
   return descriptors;
 }
 
@@ -522,41 +526,43 @@
                                     ? 4
                                     : 5);
   PropertyAttributes attributes = static_cast<PropertyAttributes>(
-      DONT_ENUM | DONT_DELETE | READ_ONLY);
+      DONT_ENUM | DONT_DELETE);
+
+  DescriptorArray::WhitenessWitness witness(*descriptors);
 
   {  // length
     Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionLength);
     CallbacksDescriptor d(*factory()->length_symbol(), *foreign, attributes);
-    descriptors->Set(0, &d);
+    descriptors->Set(0, &d, witness);
   }
   {  // name
     Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionName);
     CallbacksDescriptor d(*factory()->name_symbol(), *foreign, attributes);
-    descriptors->Set(1, &d);
+    descriptors->Set(1, &d, witness);
   }
   {  // arguments
     CallbacksDescriptor d(*factory()->arguments_symbol(),
                           *arguments,
                           attributes);
-    descriptors->Set(2, &d);
+    descriptors->Set(2, &d, witness);
   }
   {  // caller
     CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attributes);
-    descriptors->Set(3, &d);
+    descriptors->Set(3, &d, witness);
   }
 
   // prototype
   if (prototypeMode != DONT_ADD_PROTOTYPE) {
-    if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
-      attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
+    if (prototypeMode != ADD_WRITEABLE_PROTOTYPE) {
+      attributes = static_cast<PropertyAttributes>(attributes | READ_ONLY);
     }
     Handle<Foreign> foreign =
         factory()->NewForeign(&Accessors::FunctionPrototype);
     CallbacksDescriptor d(*factory()->prototype_symbol(), *foreign, attributes);
-    descriptors->Set(4, &d);
+    descriptors->Set(4, &d, witness);
   }
 
-  descriptors->Sort();
+  descriptors->Sort(witness);
   return descriptors;
 }
 
@@ -941,6 +947,7 @@
     ASSERT_EQ(0, initial_map->inobject_properties());
 
     Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(5);
+    DescriptorArray::WhitenessWitness witness(*descriptors);
     PropertyAttributes final =
         static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
     int enum_index = 0;
@@ -950,7 +957,7 @@
                             JSRegExp::kSourceFieldIndex,
                             final,
                             enum_index++);
-      descriptors->Set(0, &field);
+      descriptors->Set(0, &field, witness);
     }
     {
       // ECMA-262, section 15.10.7.2.
@@ -958,7 +965,7 @@
                             JSRegExp::kGlobalFieldIndex,
                             final,
                             enum_index++);
-      descriptors->Set(1, &field);
+      descriptors->Set(1, &field, witness);
     }
     {
       // ECMA-262, section 15.10.7.3.
@@ -966,7 +973,7 @@
                             JSRegExp::kIgnoreCaseFieldIndex,
                             final,
                             enum_index++);
-      descriptors->Set(2, &field);
+      descriptors->Set(2, &field, witness);
     }
     {
       // ECMA-262, section 15.10.7.4.
@@ -974,7 +981,7 @@
                             JSRegExp::kMultilineFieldIndex,
                             final,
                             enum_index++);
-      descriptors->Set(3, &field);
+      descriptors->Set(3, &field, witness);
     }
     {
       // ECMA-262, section 15.10.7.5.
@@ -984,10 +991,10 @@
                             JSRegExp::kLastIndexFieldIndex,
                             writable,
                             enum_index++);
-      descriptors->Set(4, &field);
+      descriptors->Set(4, &field, witness);
     }
     descriptors->SetNextEnumerationIndex(enum_index);
-    descriptors->Sort();
+    descriptors->Sort(witness);
 
     initial_map->set_inobject_properties(5);
     initial_map->set_pre_allocated_property_fields(5);
@@ -1065,7 +1072,7 @@
                             DONT_ENUM);
 
 #ifdef DEBUG
-    LookupResult lookup;
+    LookupResult lookup(isolate);
     result->LocalLookup(heap->callee_symbol(), &lookup);
     ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
     ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsCalleeIndex);
@@ -1084,11 +1091,6 @@
   }
 
   {  // --- aliased_arguments_boilerplate_
-    Handle<Map> old_map(global_context()->arguments_boilerplate()->map());
-    Handle<Map> new_map = factory->CopyMapDropTransitions(old_map);
-    new_map->set_pre_allocated_property_fields(2);
-    Handle<JSObject> result = factory->NewJSObjectFromMap(new_map);
-    new_map->set_elements_kind(NON_STRICT_ARGUMENTS_ELEMENTS);
     // Set up a well-formed parameter map to make assertions happy.
     Handle<FixedArray> elements = factory->NewFixedArray(2);
     elements->set_map(heap->non_strict_arguments_elements_map());
@@ -1097,12 +1099,16 @@
     elements->set(0, *array);
     array = factory->NewFixedArray(0);
     elements->set(1, *array);
-    Handle<Map> non_strict_arguments_elements_map =
-        factory->GetElementsTransitionMap(result,
-                                          NON_STRICT_ARGUMENTS_ELEMENTS);
-    result->set_map(*non_strict_arguments_elements_map);
-    ASSERT(result->HasNonStrictArgumentsElements());
+
+    Handle<Map> old_map(global_context()->arguments_boilerplate()->map());
+    Handle<Map> new_map = factory->CopyMapDropTransitions(old_map);
+    new_map->set_pre_allocated_property_fields(2);
+    Handle<JSObject> result = factory->NewJSObjectFromMap(new_map);
+    // Set elements kind after allocating the object because
+    // NewJSObjectFromMap assumes a fast elements map.
+    new_map->set_elements_kind(NON_STRICT_ARGUMENTS_ELEMENTS);
     result->set_elements(*elements);
+    ASSERT(result->HasNonStrictArgumentsElements());
     global_context()->set_aliased_arguments_boilerplate(*result);
   }
 
@@ -1125,19 +1131,20 @@
 
     // Create the descriptor array for the arguments object.
     Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(3);
+    DescriptorArray::WhitenessWitness witness(*descriptors);
     {  // length
       FieldDescriptor d(*factory->length_symbol(), 0, DONT_ENUM);
-      descriptors->Set(0, &d);
+      descriptors->Set(0, &d, witness);
     }
     {  // callee
       CallbacksDescriptor d(*factory->callee_symbol(), *callee, attributes);
-      descriptors->Set(1, &d);
+      descriptors->Set(1, &d, witness);
     }
     {  // caller
       CallbacksDescriptor d(*factory->caller_symbol(), *caller, attributes);
-      descriptors->Set(2, &d);
+      descriptors->Set(2, &d, witness);
     }
-    descriptors->Sort();
+    descriptors->Sort(witness);
 
     // Create the map. Allocate one in-object field for length.
     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE,
@@ -1162,7 +1169,7 @@
                             DONT_ENUM);
 
 #ifdef DEBUG
-    LookupResult lookup;
+    LookupResult lookup(isolate);
     result->LocalLookup(heap->length_symbol(), &lookup);
     ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
     ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
@@ -1221,6 +1228,14 @@
 
   // Initialize the data slot.
   global_context()->set_data(heap->undefined_value());
+
+  {
+    // Initialize the random seed slot.
+    Handle<ByteArray> zeroed_byte_array(
+        factory->NewByteArray(kRandomStateSize));
+    global_context()->set_random_seed(*zeroed_byte_array);
+    memset(zeroed_byte_array->GetDataStartAddress(), 0, kRandomStateSize);
+  }
 }
 
 
@@ -1228,12 +1243,26 @@
   Handle<JSObject> global = Handle<JSObject>(global_context()->global());
 
   // TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no
-  // longer need to live behind a flag, so WeakMap gets added to the snapshot.
-  if (FLAG_harmony_weakmaps) {  // -- W e a k M a p
-    Handle<JSObject> prototype =
-        factory()->NewJSObject(isolate()->object_function(), TENURED);
-    InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
-                    prototype, Builtins::kIllegal, true);
+  // longer need to live behind a flag, so functions get added to the snapshot.
+  if (FLAG_harmony_collections) {
+    {  // -- S e t
+      Handle<JSObject> prototype =
+          factory()->NewJSObject(isolate()->object_function(), TENURED);
+      InstallFunction(global, "Set", JS_SET_TYPE, JSSet::kSize,
+                      prototype, Builtins::kIllegal, true);
+    }
+    {  // -- M a p
+      Handle<JSObject> prototype =
+          factory()->NewJSObject(isolate()->object_function(), TENURED);
+      InstallFunction(global, "Map", JS_MAP_TYPE, JSMap::kSize,
+                      prototype, Builtins::kIllegal, true);
+    }
+    {  // -- W e a k M a p
+      Handle<JSObject> prototype =
+          factory()->NewJSObject(isolate()->object_function(), TENURED);
+      InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
+                      prototype, Builtins::kIllegal, true);
+    }
   }
 }
 
@@ -1362,6 +1391,7 @@
     INSTALL_NATIVE(JSFunction, "DerivedHasTrap", derived_has_trap);
     INSTALL_NATIVE(JSFunction, "DerivedGetTrap", derived_get_trap);
     INSTALL_NATIVE(JSFunction, "DerivedSetTrap", derived_set_trap);
+    INSTALL_NATIVE(JSFunction, "ProxyEnumerate", proxy_enumerate);
   }
 }
 
@@ -1696,7 +1726,9 @@
     Handle<DescriptorArray> reresult_descriptors =
         factory()->NewDescriptorArray(3);
 
-    reresult_descriptors->CopyFrom(0, *array_descriptors, 0);
+    DescriptorArray::WhitenessWitness witness(*reresult_descriptors);
+
+    reresult_descriptors->CopyFrom(0, *array_descriptors, 0, witness);
 
     int enum_index = 0;
     {
@@ -1704,7 +1736,7 @@
                                   JSRegExpResult::kIndexIndex,
                                   NONE,
                                   enum_index++);
-      reresult_descriptors->Set(1, &index_field);
+      reresult_descriptors->Set(1, &index_field, witness);
     }
 
     {
@@ -1712,9 +1744,9 @@
                                   JSRegExpResult::kInputIndex,
                                   NONE,
                                   enum_index++);
-      reresult_descriptors->Set(2, &input_field);
+      reresult_descriptors->Set(2, &input_field, witness);
     }
-    reresult_descriptors->Sort();
+    reresult_descriptors->Sort(witness);
 
     initial_map->set_inobject_properties(2);
     initial_map->set_pre_allocated_property_fields(2);
@@ -1741,9 +1773,9 @@
                "native proxy.js") == 0) {
       if (!CompileExperimentalBuiltin(isolate(), i)) return false;
     }
-    if (FLAG_harmony_weakmaps &&
+    if (FLAG_harmony_collections &&
         strcmp(ExperimentalNatives::GetScriptName(i).start(),
-               "native weakmap.js") == 0) {
+               "native collection.js") == 0) {
       if (!CompileExperimentalBuiltin(isolate(), i)) return false;
     }
   }
@@ -1989,6 +2021,12 @@
       false);
   ASSERT(isolate->has_pending_exception() != result);
   if (!result) {
+    // We print out the name of the extension that fail to install.
+    // When an error is thrown during bootstrapping we automatically print
+    // the line number at which this happened to the console in the isolate
+    // error throwing functionality.
+    OS::PrintError("Error installing extension '%s'.\n",
+                   current->extension()->name());
     isolate->clear_pending_exception();
   }
   current->set_state(v8::INSTALLED);
@@ -2008,7 +2046,9 @@
     builtins->set_javascript_builtin(id, *function);
     Handle<SharedFunctionInfo> shared
         = Handle<SharedFunctionInfo>(function->shared());
-    if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) return false;
+    if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) {
+      return false;
+    }
     // Set the code object on the function object.
     function->ReplaceCode(function->shared()->code());
     builtins->set_javascript_builtin_code(id, shared->code());
@@ -2088,7 +2128,7 @@
           break;
         }
         case CALLBACKS: {
-          LookupResult result;
+          LookupResult result(isolate());
           to->LocalLookup(descs->GetKey(i), &result);
           // If the property is already there we skip it
           if (result.IsProperty()) continue;
@@ -2126,7 +2166,7 @@
       if (properties->IsKey(raw_key)) {
         ASSERT(raw_key->IsString());
         // If the property is already there we skip it.
-        LookupResult result;
+        LookupResult result(isolate());
         to->LocalLookup(String::cast(raw_key), &result);
         if (result.IsProperty()) continue;
         // Set the property.
diff --git a/src/builtins.cc b/src/builtins.cc
index d513200..e758b9a 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -1507,6 +1507,14 @@
   KeyedStoreIC::GenerateNonStrictArguments(masm);
 }
 
+static void Generate_TransitionElementsSmiToDouble(MacroAssembler* masm) {
+  KeyedStoreIC::GenerateTransitionElementsSmiToDouble(masm);
+}
+
+static void Generate_TransitionElementsDoubleToObject(MacroAssembler* masm) {
+  KeyedStoreIC::GenerateTransitionElementsDoubleToObject(masm);
+}
+
 #ifdef ENABLE_DEBUGGER_SUPPORT
 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
   Debug::GenerateLoadICDebugBreak(masm);
diff --git a/src/builtins.h b/src/builtins.h
index 31090d3..24059e7 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -167,6 +167,10 @@
                                     kStrictMode)                        \
   V(KeyedStoreIC_NonStrictArguments, KEYED_STORE_IC, MEGAMORPHIC,       \
                                      Code::kNoExtraICState)             \
+  V(TransitionElementsSmiToDouble,  BUILTIN, UNINITIALIZED,             \
+                                    Code::kNoExtraICState)              \
+  V(TransitionElementsDoubleToObject, BUILTIN, UNINITIALIZED,           \
+                                      Code::kNoExtraICState)            \
                                                                         \
   /* Uses KeyedLoadIC_Initialize; must be after in list. */             \
   V(FunctionCall,                   BUILTIN, UNINITIALIZED,             \
@@ -234,7 +238,6 @@
   V(DELETE, 2)                           \
   V(IN, 1)                               \
   V(INSTANCE_OF, 1)                      \
-  V(GET_KEYS, 0)                         \
   V(FILTER_KEY, 1)                       \
   V(CALL_NON_FUNCTION, 0)                \
   V(CALL_NON_FUNCTION_AS_CONSTRUCTOR, 0) \
diff --git a/src/checks.h b/src/checks.h
index 2f359f6..832f778 100644
--- a/src/checks.h
+++ b/src/checks.h
@@ -63,7 +63,9 @@
 
 // The CHECK macro checks that the given condition is true; if not, it
 // prints a message to stderr and aborts.
-#define CHECK(condition) CheckHelper(__FILE__, __LINE__, #condition, condition)
+#define CHECK(condition) do {                                             \
+    if (!(condition)) CheckHelper(__FILE__, __LINE__, #condition, false); \
+  } while (0)
 
 
 // Helper function used by the CHECK_EQ function when given int
@@ -257,11 +259,8 @@
     SEMI_STATIC_JOIN(__StaticAssertTypedef__, __LINE__)
 
 
-namespace v8 { namespace internal {
+extern bool FLAG_enable_slow_asserts;
 
-bool EnableSlowAsserts();
-
-} }  // namespace v8::internal
 
 // The ASSERT macro is equivalent to CHECK except that it only
 // generates code in debug builds.
@@ -273,7 +272,7 @@
 #define ASSERT_GE(v1, v2)    CHECK_GE(v1, v2)
 #define ASSERT_LT(v1, v2)    CHECK_LT(v1, v2)
 #define ASSERT_LE(v1, v2)    CHECK_LE(v1, v2)
-#define SLOW_ASSERT(condition) if (EnableSlowAsserts()) CHECK(condition)
+#define SLOW_ASSERT(condition) if (FLAG_enable_slow_asserts) CHECK(condition)
 #else
 #define ASSERT_RESULT(expr)     (expr)
 #define ASSERT(condition)      ((void) 0)
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 4bc2603..b437436 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -415,4 +415,29 @@
 }
 
 
+void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
+  Label fail;
+  if (!FLAG_trace_elements_transitions) {
+    if (to_ == FAST_ELEMENTS) {
+      if (from_ == FAST_SMI_ONLY_ELEMENTS) {
+        ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+      } else if (from_ == FAST_DOUBLE_ELEMENTS) {
+        ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
+      } else {
+        UNREACHABLE();
+      }
+      KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
+                                                       is_jsarray_,
+                                                       FAST_ELEMENTS);
+    } else if (from_ == FAST_SMI_ONLY_ELEMENTS && to_ == FAST_DOUBLE_ELEMENTS) {
+      ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+      KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm, is_jsarray_);
+    } else {
+      UNREACHABLE();
+    }
+  }
+  masm->bind(&fail);
+  KeyedStoreIC::GenerateRuntimeSetProperty(masm, strict_mode_);
+}
+
 } }  // namespace v8::internal
diff --git a/src/code-stubs.h b/src/code-stubs.h
index acfbd46..fc7000b 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -30,6 +30,7 @@
 
 #include "allocation.h"
 #include "globals.h"
+#include "codegen.h"
 
 namespace v8 {
 namespace internal {
@@ -69,7 +70,8 @@
   V(KeyedLoadElement)                    \
   V(KeyedStoreElement)                   \
   V(DebuggerStatement)                   \
-  V(StringDictionaryLookup)
+  V(StringDictionaryLookup)              \
+  V(ElementsTransitionAndStore)
 
 // List of code stubs only used on ARM platforms.
 #ifdef V8_TARGET_ARCH_ARM
@@ -362,6 +364,7 @@
 
   enum Mode {
     CLONE_ELEMENTS,
+    CLONE_DOUBLE_ELEMENTS,
     COPY_ON_WRITE_ELEMENTS
   };
 
@@ -380,8 +383,8 @@
 
   Major MajorKey() { return FastCloneShallowArray; }
   int MinorKey() {
-    ASSERT(mode_ == 0 || mode_ == 1);
-    return (length_ << 1) | mode_;
+    ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2);
+    return length_ * 3 +  mode_;
   }
 };
 
@@ -1025,6 +1028,42 @@
   Types types_;
 };
 
+
+class ElementsTransitionAndStoreStub : public CodeStub {
+ public:
+  ElementsTransitionAndStoreStub(ElementsKind from,
+                                 ElementsKind to,
+                                 bool is_jsarray,
+                                 StrictModeFlag strict_mode)
+      : from_(from),
+        to_(to),
+        is_jsarray_(is_jsarray),
+        strict_mode_(strict_mode) {}
+
+ private:
+  class FromBits:       public BitField<ElementsKind,    0, 8> {};
+  class ToBits:         public BitField<ElementsKind,    8, 8> {};
+  class IsJSArrayBits:  public BitField<bool,           16, 8> {};
+  class StrictModeBits: public BitField<StrictModeFlag, 24, 8> {};
+
+  Major MajorKey() { return ElementsTransitionAndStore; }
+  int MinorKey() {
+    return FromBits::encode(from_) |
+        ToBits::encode(to_) |
+        IsJSArrayBits::encode(is_jsarray_) |
+        StrictModeBits::encode(strict_mode_);
+  }
+
+  void Generate(MacroAssembler* masm);
+
+  ElementsKind from_;
+  ElementsKind to_;
+  bool is_jsarray_;
+  StrictModeFlag strict_mode_;
+
+  DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub);
+};
+
 } }  // namespace v8::internal
 
 #endif  // V8_CODE_STUBS_H_
diff --git a/src/codegen.h b/src/codegen.h
index e551abf..5360d3e 100644
--- a/src/codegen.h
+++ b/src/codegen.h
@@ -81,4 +81,19 @@
 #error Unsupported target architecture.
 #endif
 
+namespace v8 {
+namespace internal {
+
+class ElementsTransitionGenerator : public AllStatic {
+ public:
+  static void GenerateSmiOnlyToObject(MacroAssembler* masm);
+  static void GenerateSmiOnlyToDouble(MacroAssembler* masm, Label* fail);
+  static void GenerateDoubleToObject(MacroAssembler* masm, Label* fail);
+
+ private:
+  DISALLOW_COPY_AND_ASSIGN(ElementsTransitionGenerator);
+};
+
+} }  // namespace v8::internal
+
 #endif  // V8_CODEGEN_H_
diff --git a/src/weakmap.js b/src/collection.js
similarity index 65%
rename from src/weakmap.js
rename to src/collection.js
index 5fb5151..4e45885 100644
--- a/src/weakmap.js
+++ b/src/collection.js
@@ -26,12 +26,69 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 
-// This file relies on the fact that the following declaration has been made
-// in runtime.js:
-// const $Object = global.Object;
+const $Set = global.Set;
+const $Map = global.Map;
 const $WeakMap = global.WeakMap;
 
-// -------------------------------------------------------------------
+//-------------------------------------------------------------------
+
+function SetConstructor() {
+  if (%_IsConstructCall()) {
+    %SetInitialize(this);
+  } else {
+    return new $Set();
+  }
+}
+
+
+function SetAdd(key) {
+  return %SetAdd(this, key);
+}
+
+
+function SetHas(key) {
+  return %SetHas(this, key);
+}
+
+
+function SetDelete(key) {
+  return %SetDelete(this, key);
+}
+
+
+function MapConstructor() {
+  if (%_IsConstructCall()) {
+    %MapInitialize(this);
+  } else {
+    return new $Map();
+  }
+}
+
+
+function MapGet(key) {
+  return %MapGet(this, key);
+}
+
+
+function MapSet(key, value) {
+  return %MapSet(this, key, value);
+}
+
+
+function MapHas(key) {
+  return !IS_UNDEFINED(%MapGet(this, key));
+}
+
+
+function MapDelete(key) {
+  if (!IS_UNDEFINED(%MapGet(this, key))) {
+    %MapSet(this, key, void 0);
+    return true;
+  } else {
+    return false;
+  }
+}
+
 
 function WeakMapConstructor() {
   if (%_IsConstructCall()) {
@@ -82,6 +139,30 @@
 
 (function () {
   %CheckIsBootstrapping();
+
+  // Set up the Set and Map constructor function.
+  %SetCode($Set, SetConstructor);
+  %SetCode($Map, MapConstructor);
+
+  // Set up the constructor property on the Set and Map prototype object.
+  %SetProperty($Set.prototype, "constructor", $Set, DONT_ENUM);
+  %SetProperty($Map.prototype, "constructor", $Map, DONT_ENUM);
+
+  // Set up the non-enumerable functions on the Set prototype object.
+  InstallFunctionsOnHiddenPrototype($Set.prototype, DONT_ENUM, $Array(
+    "add", SetAdd,
+    "has", SetHas,
+    "delete", SetDelete
+  ));
+
+  // Set up the non-enumerable functions on the Map prototype object.
+  InstallFunctionsOnHiddenPrototype($Map.prototype, DONT_ENUM, $Array(
+    "get", MapGet,
+    "set", MapSet,
+    "has", MapHas,
+    "delete", MapDelete
+  ));
+
   // Set up the WeakMap constructor function.
   %SetCode($WeakMap, WeakMapConstructor);
 
diff --git a/src/compiler.cc b/src/compiler.cc
index 544d190..88db467 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -59,7 +59,6 @@
       script_(script),
       extension_(NULL),
       pre_parse_data_(NULL),
-      supports_deoptimization_(false),
       osr_ast_id_(AstNode::kNoNumber) {
   Initialize(NONOPT);
 }
@@ -74,7 +73,6 @@
       script_(Handle<Script>(Script::cast(shared_info->script()))),
       extension_(NULL),
       pre_parse_data_(NULL),
-      supports_deoptimization_(false),
       osr_ast_id_(AstNode::kNoNumber) {
   Initialize(BASE);
 }
@@ -90,7 +88,6 @@
       script_(Handle<Script>(Script::cast(shared_info_->script()))),
       extension_(NULL),
       pre_parse_data_(NULL),
-      supports_deoptimization_(false),
       osr_ast_id_(AstNode::kNoNumber) {
   Initialize(BASE);
 }
@@ -309,9 +306,9 @@
 
 
 static bool GenerateCode(CompilationInfo* info) {
-  return V8::UseCrankshaft() ?
-    MakeCrankshaftCode(info) :
-    FullCodeGenerator::MakeCode(info);
+  return info->IsCompilingForDebugging() || !V8::UseCrankshaft() ?
+      FullCodeGenerator::MakeCode(info) :
+      MakeCrankshaftCode(info);
 }
 
 
@@ -480,20 +477,22 @@
     // that would be compiled lazily anyway, so we skip the preparse step
     // in that case too.
     ScriptDataImpl* pre_data = input_pre_data;
-    bool harmony_scoping = natives != NATIVES_CODE && FLAG_harmony_scoping;
+    int flags = kNoParsingFlags;
+    if ((natives == NATIVES_CODE) || FLAG_allow_natives_syntax) {
+      flags |= kAllowNativesSyntax;
+    }
+    if (natives != NATIVES_CODE && FLAG_harmony_scoping) {
+      flags |= kHarmonyScoping;
+    }
     if (pre_data == NULL
         && source_length >= FLAG_min_preparse_length) {
       if (source->IsExternalTwoByteString()) {
         ExternalTwoByteStringUC16CharacterStream stream(
             Handle<ExternalTwoByteString>::cast(source), 0, source->length());
-        pre_data = ParserApi::PartialPreParse(&stream,
-                                              extension,
-                                              harmony_scoping);
+        pre_data = ParserApi::PartialPreParse(&stream, extension, flags);
       } else {
         GenericStringUC16CharacterStream stream(source, 0, source->length());
-        pre_data = ParserApi::PartialPreParse(&stream,
-                                              extension,
-                                              harmony_scoping);
+        pre_data = ParserApi::PartialPreParse(&stream, extension, flags);
       }
     }
 
@@ -559,7 +558,7 @@
     CompilationInfo info(script);
     info.MarkAsEval();
     if (is_global) info.MarkAsGlobal();
-    if (strict_mode == kStrictMode) info.MarkAsStrictMode();
+    info.SetStrictModeFlag(strict_mode);
     info.SetCallingContext(context);
     result = MakeFunctionInfo(&info);
     if (!result.is_null()) {
@@ -567,6 +566,7 @@
       // If caller is strict mode, the result must be strict as well,
       // but not the other way around. Consider:
       // eval("'use strict'; ...");
+      // TODO(keuchel): adapt this for extended mode.
       ASSERT(strict_mode == kNonStrictMode || result->strict_mode());
       compilation_cache->PutEval(source, context, is_global, result);
     }
@@ -598,10 +598,13 @@
     HistogramTimerScope timer(isolate->counters()->compile_lazy());
 
     // After parsing we know function's strict mode. Remember it.
-    if (info->function()->strict_mode()) {
-      shared->set_strict_mode(true);
-      info->MarkAsStrictMode();
-    }
+    StrictModeFlag strict_mode = info->function()->strict_mode_flag();
+    ASSERT(info->strict_mode_flag() == kNonStrictMode ||
+           info->strict_mode_flag() == strict_mode);
+    ASSERT(shared->strict_mode_flag() == kNonStrictMode ||
+           shared->strict_mode_flag() == strict_mode);
+    info->SetStrictModeFlag(strict_mode);
+    shared->set_strict_mode_flag(strict_mode);
 
     // Compile the code.
     if (!MakeCode(info)) {
@@ -681,7 +684,7 @@
   CompilationInfo info(script);
   info.SetFunction(literal);
   info.SetScope(literal->scope());
-  if (literal->scope()->is_strict_mode()) info.MarkAsStrictMode();
+  info.SetStrictModeFlag(literal->scope()->strict_mode_flag());
 
   LiveEditFunctionTracker live_edit_tracker(info.isolate(), literal);
   // Determine if the function can be lazily compiled. This is necessary to
@@ -747,7 +750,7 @@
       lit->has_only_simple_this_property_assignments(),
       *lit->this_property_assignments());
   function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
-  function_info->set_strict_mode(lit->strict_mode());
+  function_info->set_strict_mode_flag(lit->strict_mode_flag());
   function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
   function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
 }
diff --git a/src/compiler.h b/src/compiler.h
index 69ab27d..bedf5ee 100644
--- a/src/compiler.h
+++ b/src/compiler.h
@@ -52,7 +52,10 @@
   bool is_lazy() const { return IsLazy::decode(flags_); }
   bool is_eval() const { return IsEval::decode(flags_); }
   bool is_global() const { return IsGlobal::decode(flags_); }
-  bool is_strict_mode() const { return IsStrictMode::decode(flags_); }
+  bool is_strict_mode() const { return strict_mode_flag() == kStrictMode; }
+  StrictModeFlag strict_mode_flag() const {
+    return StrictModeFlagField::decode(flags_);
+  }
   bool is_in_loop() const { return IsInLoop::decode(flags_); }
   FunctionLiteral* function() const { return function_; }
   Scope* scope() const { return scope_; }
@@ -73,11 +76,10 @@
     ASSERT(!is_lazy());
     flags_ |= IsGlobal::encode(true);
   }
-  void MarkAsStrictMode() {
-    flags_ |= IsStrictMode::encode(true);
-  }
-  StrictModeFlag StrictMode() {
-    return is_strict_mode() ? kStrictMode : kNonStrictMode;
+  void SetStrictModeFlag(StrictModeFlag strict_mode_flag) {
+    ASSERT(StrictModeFlagField::decode(flags_) == kNonStrictMode ||
+           StrictModeFlagField::decode(flags_) == strict_mode_flag);
+    flags_ = StrictModeFlagField::update(flags_, strict_mode_flag);
   }
   void MarkAsInLoop() {
     ASSERT(is_lazy());
@@ -114,6 +116,19 @@
     ASSERT(IsOptimizing());
     osr_ast_id_ = osr_ast_id;
   }
+  void MarkCompilingForDebugging(Handle<Code> current_code) {
+    ASSERT(mode_ != OPTIMIZE);
+    ASSERT(current_code->kind() == Code::FUNCTION);
+    flags_ |= IsCompilingForDebugging::encode(true);
+    if (current_code->is_compiled_optimizable()) {
+      EnableDeoptimizationSupport();
+    } else {
+      mode_ = CompilationInfo::NONOPT;
+    }
+  }
+  bool IsCompilingForDebugging() {
+    return IsCompilingForDebugging::decode(flags_);
+  }
 
   bool has_global_object() const {
     return !closure().is_null() && (closure()->context()->global() != NULL);
@@ -133,10 +148,12 @@
   void DisableOptimization();
 
   // Deoptimization support.
-  bool HasDeoptimizationSupport() const { return supports_deoptimization_; }
+  bool HasDeoptimizationSupport() const {
+    return SupportsDeoptimization::decode(flags_);
+  }
   void EnableDeoptimizationSupport() {
     ASSERT(IsOptimizable());
-    supports_deoptimization_ = true;
+    flags_ |= SupportsDeoptimization::encode(true);
   }
 
   // Determine whether or not we can adaptively optimize.
@@ -171,8 +188,9 @@
     if (script_->type()->value() == Script::TYPE_NATIVE) {
       MarkAsNative();
     }
-    if (!shared_info_.is_null() && shared_info_->strict_mode()) {
-      MarkAsStrictMode();
+    if (!shared_info_.is_null()) {
+      ASSERT(strict_mode_flag() == kNonStrictMode);
+      SetStrictModeFlag(shared_info_->strict_mode_flag());
     }
   }
 
@@ -192,9 +210,14 @@
   // Flags that can be set for lazy compilation.
   class IsInLoop: public BitField<bool, 3, 1> {};
   // Strict mode - used in eager compilation.
-  class IsStrictMode: public BitField<bool, 4, 1> {};
+  class StrictModeFlagField: public BitField<StrictModeFlag, 4, 1> {};
   // Is this a function from our natives.
   class IsNative: public BitField<bool, 6, 1> {};
+  // Is this code being compiled with support for deoptimization..
+  class SupportsDeoptimization: public BitField<bool, 7, 1> {};
+  // If compiling for debugging produce just full code matching the
+  // initial mode setting.
+  class IsCompilingForDebugging: public BitField<bool, 8, 1> {};
 
 
   unsigned flags_;
@@ -223,7 +246,6 @@
 
   // Compilation mode flag and whether deoptimization is allowed.
   Mode mode_;
-  bool supports_deoptimization_;
   int osr_ast_id_;
 
   DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
diff --git a/src/contexts.cc b/src/contexts.cc
index 0cda430..b25ffac 100644
--- a/src/contexts.cc
+++ b/src/contexts.cc
@@ -174,6 +174,10 @@
             *attributes = READ_ONLY;
             *binding_flags = IMMUTABLE_CHECK_INITIALIZED;
             break;
+          case CONST_HARMONY:
+            *attributes = READ_ONLY;
+            *binding_flags = IMMUTABLE_CHECK_INITIALIZED_HARMONY;
+            break;
           case DYNAMIC:
           case DYNAMIC_GLOBAL:
           case DYNAMIC_LOCAL:
@@ -187,7 +191,8 @@
       // Check the slot corresponding to the intermediate context holding
       // only the function name variable.
       if (follow_context_chain && context->IsFunctionContext()) {
-        int function_index = scope_info->FunctionContextSlotIndex(*name);
+        VariableMode mode;
+        int function_index = scope_info->FunctionContextSlotIndex(*name, &mode);
         if (function_index >= 0) {
           if (FLAG_trace_contexts) {
             PrintF("=> found intermediate function in context slot %d\n",
@@ -195,7 +200,9 @@
           }
           *index = function_index;
           *attributes = READ_ONLY;
-          *binding_flags = IMMUTABLE_IS_INITIALIZED;
+          ASSERT(mode == CONST || mode == CONST_HARMONY);
+          *binding_flags = (mode == CONST)
+              ? IMMUTABLE_IS_INITIALIZED : IMMUTABLE_IS_INITIALIZED_HARMONY;
           return context;
         }
       }
@@ -255,7 +262,7 @@
     if (param_index >= 0) return false;
 
     // Check context only holding the function name variable.
-    index = scope_info->FunctionContextSlotIndex(*name);
+    index = scope_info->FunctionContextSlotIndex(*name, NULL);
     if (index >= 0) return false;
     context = context->previous();
   }
@@ -266,8 +273,7 @@
 }
 
 
-void Context::ComputeEvalScopeInfo(bool* outer_scope_calls_eval,
-                                   bool* outer_scope_calls_non_strict_eval) {
+void Context::ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval) {
   // Skip up the context chain checking all the function contexts to see
   // whether they call eval.
   Context* context = this;
@@ -275,14 +281,11 @@
     if (context->IsFunctionContext()) {
       Handle<SerializedScopeInfo> scope_info(
           context->closure()->shared()->scope_info());
-      if (scope_info->CallsEval()) {
-        *outer_scope_calls_eval = true;
-        if (!scope_info->IsStrictMode()) {
-          // No need to go further since the answers will not change from
-          // here.
-          *outer_scope_calls_non_strict_eval = true;
-          return;
-        }
+      if (scope_info->CallsEval() && !scope_info->IsStrictMode()) {
+        // No need to go further since the answers will not change from
+        // here.
+        *outer_scope_calls_non_strict_eval = true;
+        return;
       }
     }
     context = context->previous();
diff --git a/src/contexts.h b/src/contexts.h
index b80475f..7021ff8 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -46,24 +46,43 @@
 
 // ES5 10.2 defines lexical environments with mutable and immutable bindings.
 // Immutable bindings have two states, initialized and uninitialized, and
-// their state is changed by the InitializeImmutableBinding method.
+// their state is changed by the InitializeImmutableBinding method. The
+// BindingFlags enum represents information if a binding has definitely been
+// initialized. A mutable binding does not need to be checked and thus has
+// the BindingFlag MUTABLE_IS_INITIALIZED.
+//
+// There are two possibilities for immutable bindings
+//  * 'const' declared variables. They are initialized when evaluating the
+//    corresponding declaration statement. They need to be checked for being
+//    initialized and thus get the flag IMMUTABLE_CHECK_INITIALIZED.
+//  * The function name of a named function literal. The binding is immediately
+//    initialized when entering the function and thus does not need to be
+//    checked. it gets the BindingFlag IMMUTABLE_IS_INITIALIZED.
+// Accessing an uninitialized binding produces the undefined value.
 //
 // The harmony proposal for block scoped bindings also introduces the
-// uninitialized state for mutable bindings. A 'let' declared variable
-// is a mutable binding that is created uninitalized upon activation of its
-// lexical environment and it is initialized when evaluating its declaration
-// statement. Var declared variables are mutable bindings that are
-// immediately initialized upon creation. The BindingFlags enum represents
-// information if a binding has definitely been initialized. 'const' declared
-// variables are created as uninitialized immutable bindings.
-
-// In harmony mode accessing an uninitialized binding produces a reference
-// error.
+// uninitialized state for mutable bindings.
+//  * A 'let' declared variable. They are initialized when evaluating the
+//    corresponding declaration statement. They need to be checked for being
+//    initialized and thus get the flag MUTABLE_CHECK_INITIALIZED.
+//  * A 'var' declared variable. It is initialized immediately upon creation
+//    and thus doesn't need to be checked. It gets the flag
+//    MUTABLE_IS_INITIALIZED.
+//  * Catch bound variables, function parameters and variables introduced by
+//    function declarations are initialized immediately and do not need to be
+//    checked. Thus they get the flag MUTABLE_IS_INITIALIZED.
+// Immutable bindings in harmony mode get the _HARMONY flag variants. Accessing
+// an uninitialized binding produces a reference error.
+//
+// In V8 uninitialized bindings are set to the hole value upon creation and set
+// to a different value upon initialization.
 enum BindingFlags {
   MUTABLE_IS_INITIALIZED,
   MUTABLE_CHECK_INITIALIZED,
   IMMUTABLE_IS_INITIALIZED,
   IMMUTABLE_CHECK_INITIALIZED,
+  IMMUTABLE_IS_INITIALIZED_HARMONY,
+  IMMUTABLE_CHECK_INITIALIZED_HARMONY,
   MISSING_BINDING
 };
 
@@ -138,7 +157,9 @@
     to_complete_property_descriptor) \
   V(DERIVED_HAS_TRAP_INDEX, JSFunction, derived_has_trap) \
   V(DERIVED_GET_TRAP_INDEX, JSFunction, derived_get_trap) \
-  V(DERIVED_SET_TRAP_INDEX, JSFunction, derived_set_trap)
+  V(DERIVED_SET_TRAP_INDEX, JSFunction, derived_set_trap) \
+  V(PROXY_ENUMERATE, JSFunction, proxy_enumerate) \
+  V(RANDOM_SEED_INDEX, ByteArray, random_seed)
 
 // JSFunctions are pairs (context, function code), sometimes also called
 // closures. A Context object is used to represent function contexts and
@@ -194,7 +215,8 @@
     PREVIOUS_INDEX,
     // The extension slot is used for either the global object (in global
     // contexts), eval extension object (function contexts), subject of with
-    // (with contexts), or the variable name (catch contexts).
+    // (with contexts), or the variable name (catch contexts), the serialized
+    // scope info (block contexts).
     EXTENSION_INDEX,
     GLOBAL_INDEX,
     MIN_CONTEXT_SLOTS,
@@ -258,6 +280,8 @@
     DERIVED_HAS_TRAP_INDEX,
     DERIVED_GET_TRAP_INDEX,
     DERIVED_SET_TRAP_INDEX,
+    PROXY_ENUMERATE,
+    RANDOM_SEED_INDEX,
 
     // Properties from here are treated as weak references by the full GC.
     // Scavenge treats them as strong references.
@@ -385,8 +409,7 @@
 
   // Determine if any function scope in the context call eval and if
   // any of those calls are in non-strict mode.
-  void ComputeEvalScopeInfo(bool* outer_scope_calls_eval,
-                            bool* outer_scope_calls_non_strict_eval);
+  void ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval);
 
   // Code generation support.
   static int SlotOffset(int index) {
diff --git a/src/d8.cc b/src/d8.cc
index a516576..64ada2c 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -178,7 +178,8 @@
         // If all went well and the result wasn't undefined then print
         // the returned value.
         v8::String::Utf8Value str(result);
-        fwrite(*str, sizeof(**str), str.length(), stdout);
+        size_t count = fwrite(*str, sizeof(**str), str.length(), stdout);
+        (void) count;  // Silence GCC-4.5.x "unused result" warning.
         printf("\n");
       }
       return true;
diff --git a/src/debug.cc b/src/debug.cc
index 3d79485..dc9f297 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -87,19 +87,13 @@
 
 static Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind) {
   Isolate* isolate = Isolate::Current();
-  CALL_HEAP_FUNCTION(
-      isolate,
-      isolate->stub_cache()->ComputeCallDebugBreak(argc, kind),
-      Code);
+  return isolate->stub_cache()->ComputeCallDebugBreak(argc, kind);
 }
 
 
 static Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) {
   Isolate* isolate = Isolate::Current();
-  CALL_HEAP_FUNCTION(
-      isolate,
-      isolate->stub_cache()->ComputeCallDebugPrepareStepIn(argc, kind),
-      Code);
+  return isolate->stub_cache()->ComputeCallDebugPrepareStepIn(argc, kind);
 }
 
 
@@ -1727,50 +1721,203 @@
 }
 
 
+// Helper function to compile full code for debugging. This code will
+// have debug break slots and deoptimization
+// information. Deoptimization information is required in case that an
+// optimized version of this function is still activated on the
+// stack. It will also make sure that the full code is compiled with
+// the same flags as the previous version - that is flags which can
+// change the code generated. The current method of mapping from
+// already compiled full code without debug break slots to full code
+// with debug break slots depends on the generated code is otherwise
+// exactly the same.
+static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared,
+                                        Handle<Code> current_code) {
+  ASSERT(!current_code->has_debug_break_slots());
+
+  CompilationInfo info(shared);
+  info.MarkCompilingForDebugging(current_code);
+  ASSERT(!info.shared_info()->is_compiled());
+  ASSERT(!info.isolate()->has_pending_exception());
+
+  // Use compile lazy which will end up compiling the full code in the
+  // configuration configured above.
+  bool result = Compiler::CompileLazy(&info);
+  ASSERT(result != Isolate::Current()->has_pending_exception());
+  info.isolate()->clear_pending_exception();
+#if DEBUG
+  if (result) {
+    Handle<Code> new_code(shared->code());
+    ASSERT(new_code->has_debug_break_slots());
+    ASSERT(current_code->is_compiled_optimizable() ==
+           new_code->is_compiled_optimizable());
+    ASSERT(current_code->instruction_size() <= new_code->instruction_size());
+  }
+#endif
+  return result;
+}
+
+
 void Debug::PrepareForBreakPoints() {
   // If preparing for the first break point make sure to deoptimize all
   // functions as debugging does not work with optimized code.
   if (!has_break_points_) {
     Deoptimizer::DeoptimizeAll();
 
-    // We are going to iterate heap to find all functions without
-    // debug break slots.
-    isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+    Handle<Code> lazy_compile =
+        Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));
 
-    AssertNoAllocation no_allocation;
-    Builtins* builtins = isolate_->builtins();
-    Code* lazy_compile = builtins->builtin(Builtins::kLazyCompile);
+    // Keep the list of activated functions in a handlified list as it
+    // is used both in GC and non-GC code.
+    List<Handle<JSFunction> > active_functions(100);
 
-    // Find all non-optimized code functions with activation frames on
-    // the stack.
-    List<JSFunction*> active_functions(100);
-    for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) {
-      JavaScriptFrame* frame = it.frame();
-      if (frame->function()->IsJSFunction()) {
-        JSFunction* function = JSFunction::cast(frame->function());
-        if (function->code()->kind() == Code::FUNCTION)
-          active_functions.Add(function);
+    {
+      // We are going to iterate heap to find all functions without
+      // debug break slots.
+      isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+
+      // Ensure no GC in this scope as we are comparing raw pointer
+      // values and performing a heap iteration.
+      AssertNoAllocation no_allocation;
+
+      // Find all non-optimized code functions with activation frames on
+      // the stack.
+      for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) {
+        JavaScriptFrame* frame = it.frame();
+        if (frame->function()->IsJSFunction()) {
+          JSFunction* function = JSFunction::cast(frame->function());
+          if (function->code()->kind() == Code::FUNCTION &&
+              !function->code()->has_debug_break_slots())
+            active_functions.Add(Handle<JSFunction>(function));
+        }
+      }
+      // Sort the functions on the object pointer value to prepare for
+      // the binary search below.
+      active_functions.Sort(HandleObjectPointerCompare<JSFunction>);
+
+      // Scan the heap for all non-optimized functions which has no
+      // debug break slots.
+      HeapIterator iterator;
+      HeapObject* obj = NULL;
+      while (((obj = iterator.next()) != NULL)) {
+        if (obj->IsJSFunction()) {
+          JSFunction* function = JSFunction::cast(obj);
+          if (function->shared()->allows_lazy_compilation() &&
+              function->shared()->script()->IsScript() &&
+              function->code()->kind() == Code::FUNCTION &&
+              !function->code()->has_debug_break_slots()) {
+            bool has_activation =
+                SortedListBSearch<Handle<JSFunction> >(
+                    active_functions,
+                    Handle<JSFunction>(function),
+                    HandleObjectPointerCompare<JSFunction>) != -1;
+            if (!has_activation) {
+              function->set_code(*lazy_compile);
+              function->shared()->set_code(*lazy_compile);
+            }
+          }
+        }
       }
     }
-    active_functions.Sort();
 
-    // Scan the heap for all non-optimized functions which has no
-    // debug break slots.
-    HeapIterator iterator;
-    HeapObject* obj = NULL;
-    while (((obj = iterator.next()) != NULL)) {
-      if (obj->IsJSFunction()) {
-        JSFunction* function = JSFunction::cast(obj);
-        if (function->shared()->allows_lazy_compilation() &&
-            function->shared()->script()->IsScript() &&
-            function->code()->kind() == Code::FUNCTION &&
-            !function->code()->has_debug_break_slots()) {
-          bool has_activation =
-              SortedListBSearch<JSFunction*>(active_functions, function) != -1;
-          if (!has_activation) {
-            function->set_code(lazy_compile);
-            function->shared()->set_code(lazy_compile);
+    // Now the non-GC scope is left, and the sorting of the functions
+    // in active_function is not ensured any more. The code below does
+    // not rely on it.
+
+    // Now recompile all functions with activation frames and and
+    // patch the return address to run in the new compiled code.
+    for (int i = 0; i < active_functions.length(); i++) {
+      Handle<JSFunction> function = active_functions[i];
+      Handle<SharedFunctionInfo> shared(function->shared());
+      // If recompilation is not possible just skip it.
+      if (shared->is_toplevel() ||
+          !shared->allows_lazy_compilation() ||
+          shared->code()->kind() == Code::BUILTIN) {
+        continue;
+      }
+
+      // Make sure that the shared full code is compiled with debug
+      // break slots.
+      Handle<Code> current_code(function->code());
+      if (shared->code()->has_debug_break_slots()) {
+        // if the code is already recompiled to have break slots skip
+        // recompilation.
+        ASSERT(!function->code()->has_debug_break_slots());
+      } else {
+        // Try to compile the full code with debug break slots. If it
+        // fails just keep the current code.
+        ASSERT(shared->code() == *current_code);
+        ZoneScope zone_scope(isolate_, DELETE_ON_EXIT);
+        shared->set_code(*lazy_compile);
+        bool prev_force_debugger_active =
+            isolate_->debugger()->force_debugger_active();
+        isolate_->debugger()->set_force_debugger_active(true);
+        CompileFullCodeForDebugging(shared, current_code);
+        isolate_->debugger()->set_force_debugger_active(
+            prev_force_debugger_active);
+        if (!shared->is_compiled()) {
+          shared->set_code(*current_code);
+          continue;
+        }
+      }
+      Handle<Code> new_code(shared->code());
+
+      // Find the function and patch return address.
+      for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) {
+        JavaScriptFrame* frame = it.frame();
+        // If the current frame is for this function in its
+        // non-optimized form rewrite the return address to continue
+        // in the newly compiled full code with debug break slots.
+        if (frame->function()->IsJSFunction() &&
+            frame->function() == *function &&
+            frame->LookupCode()->kind() == Code::FUNCTION) {
+          intptr_t delta = frame->pc() - current_code->instruction_start();
+          int debug_break_slot_count = 0;
+          int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT);
+          for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
+            // Check if the pc in the new code with debug break
+            // slots is before this slot.
+            RelocInfo* info = it.rinfo();
+            int debug_break_slot_bytes =
+                debug_break_slot_count * Assembler::kDebugBreakSlotLength;
+            intptr_t new_delta =
+                info->pc() -
+                new_code->instruction_start() -
+                debug_break_slot_bytes;
+            if (new_delta > delta) {
+              break;
+            }
+
+            // Passed a debug break slot in the full code with debug
+            // break slots.
+            debug_break_slot_count++;
           }
+          int debug_break_slot_bytes =
+              debug_break_slot_count * Assembler::kDebugBreakSlotLength;
+          if (FLAG_trace_deopt) {
+            PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
+                   "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
+                   "for debugging, "
+                   "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
+                   reinterpret_cast<intptr_t>(
+                       current_code->instruction_start()),
+                   reinterpret_cast<intptr_t>(
+                       current_code->instruction_start()) +
+                       current_code->instruction_size(),
+                   current_code->instruction_size(),
+                   reinterpret_cast<intptr_t>(new_code->instruction_start()),
+                   reinterpret_cast<intptr_t>(new_code->instruction_start()) +
+                       new_code->instruction_size(),
+                   new_code->instruction_size(),
+                   reinterpret_cast<intptr_t>(frame->pc()),
+                   reinterpret_cast<intptr_t>(new_code->instruction_start()) +
+                       delta + debug_break_slot_bytes);
+          }
+
+          // Patch the return address to return into the code with
+          // debug break slots.
+          frame->set_pc(
+              new_code->instruction_start() + delta + debug_break_slot_bytes);
         }
       }
     }
@@ -1787,7 +1934,9 @@
   }
 
   // Ensure shared in compiled. Return false if this failed.
-  if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) return false;
+  if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) {
+    return false;
+  }
 
   // Create the debug info object.
   Handle<DebugInfo> debug_info = FACTORY->NewDebugInfo(shared);
@@ -2077,6 +2226,7 @@
       compiling_natives_(false),
       is_loading_debugger_(false),
       never_unload_debugger_(false),
+      force_debugger_active_(false),
       message_handler_(NULL),
       debugger_unload_pending_(false),
       host_dispatch_handler_(NULL),
@@ -2844,7 +2994,9 @@
 bool Debugger::IsDebuggerActive() {
   ScopedLock with(debugger_access_);
 
-  return message_handler_ != NULL || !event_listener_.is_null();
+  return message_handler_ != NULL ||
+      !event_listener_.is_null() ||
+      force_debugger_active_;
 }
 
 
diff --git a/src/debug.h b/src/debug.h
index f01ef39..3c37186 100644
--- a/src/debug.h
+++ b/src/debug.h
@@ -810,11 +810,15 @@
   }
 
   void set_compiling_natives(bool compiling_natives) {
-    Debugger::compiling_natives_ = compiling_natives;
+    compiling_natives_ = compiling_natives;
   }
   bool compiling_natives() const { return compiling_natives_; }
   void set_loading_debugger(bool v) { is_loading_debugger_ = v; }
   bool is_loading_debugger() const { return is_loading_debugger_; }
+  void set_force_debugger_active(bool force_debugger_active) {
+    force_debugger_active_ = force_debugger_active;
+  }
+  bool force_debugger_active() const { return force_debugger_active_; }
 
   bool IsDebuggerActive();
 
@@ -840,6 +844,7 @@
   bool compiling_natives_;  // Are we compiling natives?
   bool is_loading_debugger_;  // Are we loading the debugger?
   bool never_unload_debugger_;  // Can we unload the debugger?
+  bool force_debugger_active_;  // Activate debugger without event listeners.
   v8::Debug::MessageHandler2 message_handler_;
   bool debugger_unload_pending_;  // Was message handler cleared?
   v8::Debug::HostDispatchHandler host_dispatch_handler_;
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index b052275..a83493d 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -882,10 +882,12 @@
       unsigned output_offset =
           output->GetOffsetFromSlotIndex(this, output_index);
       if (FLAG_trace_osr) {
-        PrintF("    [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d]\n",
+        PrintF("    [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
                output_offset,
                input_value,
                *input_offset);
+        reinterpret_cast<Object*>(input_value)->ShortPrint();
+        PrintF("\n");
       }
       output->SetFrameSlot(output_offset, input_value);
       break;
@@ -1007,7 +1009,10 @@
   for (uint32_t i = 0; i < table_length; ++i) {
     uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize);
     Address pc_after = unoptimized_code->instruction_start() + pc_offset;
-    RevertStackCheckCodeAt(pc_after, check_code, replacement_code);
+    RevertStackCheckCodeAt(unoptimized_code,
+                           pc_after,
+                           check_code,
+                           replacement_code);
     stack_check_cursor += 2 * kIntSize;
   }
 }
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index 3cf7046..33580a1 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -186,7 +186,8 @@
 
   // Change all patched stack guard checks in the unoptimized code
   // back to a normal stack guard check.
-  static void RevertStackCheckCodeAt(Address pc_after,
+  static void RevertStackCheckCodeAt(Code* unoptimized_code,
+                                     Address pc_after,
                                      Code* check_code,
                                      Code* replacement_code);
 
diff --git a/src/factory.cc b/src/factory.cc
index 143b342..15f640e 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -59,13 +59,13 @@
 }
 
 
-Handle<FixedArray> Factory::NewFixedDoubleArray(int size,
-                                                PretenureFlag pretenure) {
+Handle<FixedDoubleArray> Factory::NewFixedDoubleArray(int size,
+                                                      PretenureFlag pretenure) {
   ASSERT(0 <= size);
   CALL_HEAP_FUNCTION(
       isolate(),
       isolate()->heap()->AllocateUninitializedFixedDoubleArray(size, pretenure),
-      FixedArray);
+      FixedDoubleArray);
 }
 
 
@@ -85,6 +85,14 @@
 }
 
 
+Handle<ObjectHashSet> Factory::NewObjectHashSet(int at_least_space_for) {
+  ASSERT(0 <= at_least_space_for);
+  CALL_HEAP_FUNCTION(isolate(),
+                     ObjectHashSet::Allocate(at_least_space_for),
+                     ObjectHashSet);
+}
+
+
 Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) {
   ASSERT(0 <= at_least_space_for);
   CALL_HEAP_FUNCTION(isolate(),
@@ -471,6 +479,12 @@
 }
 
 
+Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray(
+    Handle<FixedDoubleArray> array) {
+  CALL_HEAP_FUNCTION(isolate(), array->Copy(), FixedDoubleArray);
+}
+
+
 Handle<JSFunction> Factory::BaseNewFunctionFromSharedFunctionInfo(
     Handle<SharedFunctionInfo> function_info,
     Handle<Map> function_map,
@@ -497,16 +511,20 @@
       pretenure);
 
   result->set_context(*context);
-  int number_of_literals = function_info->num_literals();
-  Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure);
-  if (number_of_literals > 0) {
-    // Store the object, regexp and array functions in the literals
-    // array prefix.  These functions will be used when creating
-    // object, regexp and array literals in this function.
-    literals->set(JSFunction::kLiteralGlobalContextIndex,
-                  context->global_context());
+  if (!function_info->bound()) {
+    int number_of_literals = function_info->num_literals();
+    Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure);
+    if (number_of_literals > 0) {
+      // Store the object, regexp and array functions in the literals
+      // array prefix.  These functions will be used when creating
+      // object, regexp and array literals in this function.
+      literals->set(JSFunction::kLiteralGlobalContextIndex,
+                    context->global_context());
+    }
+    result->set_literals(*literals);
+  } else {
+    result->set_function_bindings(isolate()->heap()->empty_fixed_array());
   }
-  result->set_literals(*literals);
   result->set_next_function_link(isolate()->heap()->undefined_value());
 
   if (V8::UseCrankshaft() &&
@@ -821,10 +839,13 @@
   // Number of descriptors added to the result so far.
   int descriptor_count = 0;
 
+  // Ensure that marking will not progress and change color of objects.
+  DescriptorArray::WhitenessWitness witness(*result);
+
   // Copy the descriptors from the array.
   for (int i = 0; i < array->number_of_descriptors(); i++) {
     if (array->GetType(i) != NULL_DESCRIPTOR) {
-      result->CopyFrom(descriptor_count++, *array, i);
+      result->CopyFrom(descriptor_count++, *array, i, witness);
     }
   }
 
@@ -844,7 +865,7 @@
     if (result->LinearSearch(*key, descriptor_count) ==
         DescriptorArray::kNotFound) {
       CallbacksDescriptor desc(*key, *entry, entry->property_attributes());
-      result->Set(descriptor_count, &desc);
+      result->Set(descriptor_count, &desc, witness);
       descriptor_count++;
     } else {
       duplicates++;
@@ -858,13 +879,13 @@
     Handle<DescriptorArray> new_result =
         NewDescriptorArray(number_of_descriptors);
     for (int i = 0; i < number_of_descriptors; i++) {
-      new_result->CopyFrom(i, *result, i);
+      new_result->CopyFrom(i, *result, i, witness);
     }
     result = new_result;
   }
 
   // Sort the result before returning.
-  result->Sort();
+  result->Sort(witness);
   return result;
 }
 
diff --git a/src/factory.h b/src/factory.h
index a3615f2..2073ce3 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -50,7 +50,7 @@
       PretenureFlag pretenure = NOT_TENURED);
 
   // Allocate a new uninitialized fixed double array.
-  Handle<FixedArray> NewFixedDoubleArray(
+  Handle<FixedDoubleArray> NewFixedDoubleArray(
       int size,
       PretenureFlag pretenure = NOT_TENURED);
 
@@ -58,6 +58,8 @@
 
   Handle<StringDictionary> NewStringDictionary(int at_least_space_for);
 
+  Handle<ObjectHashSet> NewObjectHashSet(int at_least_space_for);
+
   Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for);
 
   Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors);
@@ -222,6 +224,9 @@
 
   Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);
 
+  Handle<FixedDoubleArray> CopyFixedDoubleArray(
+      Handle<FixedDoubleArray> array);
+
   // Numbers (eg, literals) are pretenured by the parser.
   Handle<Object> NewNumber(double value,
                            PretenureFlag pretenure = NOT_TENURED);
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 58fab14..ee6ef01 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -100,7 +100,8 @@
 DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
 DEFINE_bool(harmony_scoping, false, "enable harmony block scoping")
 DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
-DEFINE_bool(harmony_weakmaps, false, "enable harmony weak maps")
+DEFINE_bool(harmony_collections, false,
+            "enable harmony collections (sets, maps, and weak maps)")
 DEFINE_bool(harmony, false, "enable all harmony features")
 
 // Flags for experimental implementation features.
@@ -186,6 +187,8 @@
 DEFINE_bool(expose_externalize_string, false,
             "expose externalize string extension")
 DEFINE_int(stack_trace_limit, 10, "number of stack frames to capture")
+DEFINE_bool(builtins_in_stack_traces, false,
+            "show built-in functions in stack traces")
 DEFINE_bool(disable_native_files, false, "disable builtin natives files")
 
 // builtins-ia32.cc
@@ -527,6 +530,9 @@
 #define FLAG FLAG_READONLY
 #endif
 
+// elements.cc
+DEFINE_bool(trace_elements_transitions, false, "trace elements transitions")
+
 // code-stubs.cc
 DEFINE_bool(print_code_stubs, false, "print code stubs")
 
diff --git a/src/frames.cc b/src/frames.cc
index 412a59c..7c4c573 100644
--- a/src/frames.cc
+++ b/src/frames.cc
@@ -711,6 +711,69 @@
 }
 
 
+void JavaScriptFrame::PrintTop(FILE* file,
+                               bool print_args,
+                               bool print_line_number) {
+  // constructor calls
+  HandleScope scope;
+  AssertNoAllocation no_allocation;
+  JavaScriptFrameIterator it;
+  while (!it.done()) {
+    if (it.frame()->is_java_script()) {
+      JavaScriptFrame* frame = it.frame();
+      if (frame->IsConstructor()) PrintF(file, "new ");
+      // function name
+      Object* fun = frame->function();
+      if (fun->IsJSFunction()) {
+        SharedFunctionInfo* shared = JSFunction::cast(fun)->shared();
+        shared->DebugName()->ShortPrint(file);
+        if (print_line_number) {
+          Address pc = frame->pc();
+          Code* code = Code::cast(
+              v8::internal::Isolate::Current()->heap()->FindCodeObject(pc));
+          int source_pos = code->SourcePosition(pc);
+          Object* maybe_script = shared->script();
+          if (maybe_script->IsScript()) {
+            Handle<Script> script(Script::cast(maybe_script));
+            int line = GetScriptLineNumberSafe(script, source_pos) + 1;
+            Object* script_name_raw = script->name();
+            if (script_name_raw->IsString()) {
+              String* script_name = String::cast(script->name());
+              SmartArrayPointer<char> c_script_name =
+                  script_name->ToCString(DISALLOW_NULLS,
+                                         ROBUST_STRING_TRAVERSAL);
+              PrintF(file, " at %s:%d", *c_script_name, line);
+            } else {
+              PrintF(file, "at <unknown>:%d", line);
+            }
+          } else {
+            PrintF(file, " at <unknown>:<unknown>");
+          }
+        }
+      } else {
+        fun->ShortPrint(file);
+      }
+
+      if (print_args) {
+        // function arguments
+        // (we are intentionally only printing the actually
+        // supplied parameters, not all parameters required)
+        PrintF(file, "(this=");
+        frame->receiver()->ShortPrint(file);
+        const int length = frame->ComputeParametersCount();
+        for (int i = 0; i < length; i++) {
+          PrintF(file, ", ");
+          frame->GetParameter(i)->ShortPrint(file);
+        }
+        PrintF(file, ")");
+      }
+      break;
+    }
+    it.Advance();
+  }
+}
+
+
 void FrameSummary::Print() {
   PrintF("receiver: ");
   receiver_->ShortPrint();
diff --git a/src/frames.h b/src/frames.h
index ca19b05..778b803 100644
--- a/src/frames.h
+++ b/src/frames.h
@@ -512,6 +512,8 @@
     return static_cast<JavaScriptFrame*>(frame);
   }
 
+  static void PrintTop(FILE* file, bool print_args, bool print_line_number);
+
  protected:
   inline explicit JavaScriptFrame(StackFrameIterator* iterator);
 
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index 083675d..27c509f 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -289,11 +289,12 @@
 #ifdef ENABLE_DEBUGGER_SUPPORT
   code->set_has_debug_break_slots(
       info->isolate()->debugger()->IsDebuggerActive());
+  code->set_compiled_optimizable(info->IsOptimizable());
 #endif  // ENABLE_DEBUGGER_SUPPORT
   code->set_allow_osr_at_loop_nesting_level(0);
   code->set_stack_check_table_offset(table_offset);
   CodeGenerator::PrintCode(code, info);
-  info->SetCode(code);  // may be an empty handle.
+  info->SetCode(code);  // May be an empty handle.
 #ifdef ENABLE_GDB_JIT_INTERFACE
   if (FLAG_gdbjit && !code.is_null()) {
     GDBJITLineInfo* lineinfo =
@@ -520,8 +521,8 @@
       if (var->IsUnallocated()) {
         array->set(j++, *(var->name()));
         if (decl->fun() == NULL) {
-          if (var->mode() == CONST) {
-            // In case this is const property use the hole.
+          if (var->binding_needs_init()) {
+            // In case this binding needs initialization use the hole.
             array->set_the_hole(j++);
           } else {
             array->set_undefined(j++);
@@ -546,11 +547,10 @@
 
 
 int FullCodeGenerator::DeclareGlobalsFlags() {
-  int flags = 0;
-  if (is_eval()) flags |= kDeclareGlobalsEvalFlag;
-  if (is_strict_mode()) flags |= kDeclareGlobalsStrictModeFlag;
-  if (is_native()) flags |= kDeclareGlobalsNativeFlag;
-  return flags;
+  ASSERT(DeclareGlobalsStrictModeFlag::is_valid(strict_mode_flag()));
+  return DeclareGlobalsEvalFlag::encode(is_eval()) |
+      DeclareGlobalsStrictModeFlag::encode(strict_mode_flag()) |
+      DeclareGlobalsNativeFlag::encode(is_native());
 }
 
 
diff --git a/src/full-codegen.h b/src/full-codegen.h
index 081192a..9132502 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -577,9 +577,11 @@
   Handle<Script> script() { return info_->script(); }
   bool is_eval() { return info_->is_eval(); }
   bool is_native() { return info_->is_native(); }
-  bool is_strict_mode() { return function()->strict_mode(); }
+  bool is_strict_mode() {
+    return strict_mode_flag() == kStrictMode;
+  }
   StrictModeFlag strict_mode_flag() {
-    return is_strict_mode() ? kStrictMode : kNonStrictMode;
+    return function()->strict_mode_flag();
   }
   FunctionLiteral* function() { return info_->function(); }
   Scope* scope() { return scope_; }
diff --git a/src/globals.h b/src/globals.h
index d0c78d6..cbe7abd 100644
--- a/src/globals.h
+++ b/src/globals.h
@@ -230,6 +230,9 @@
 
 const int kDoubleSizeLog2 = 3;
 
+// Size of the state of a the random number generator.
+const int kRandomStateSize = 2 * kIntSize;
+
 #if V8_HOST_ARCH_64_BIT
 const int kPointerSizeLog2 = 3;
 const intptr_t kIntptrSignBit = V8_INT64_C(0x8000000000000000);
diff --git a/src/handles.cc b/src/handles.cc
index 57f5d1b..62851f3 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -376,24 +376,6 @@
 }
 
 
-Handle<Object> GetProperty(Handle<JSReceiver> obj,
-                           Handle<String> name,
-                           LookupResult* result) {
-  PropertyAttributes attributes;
-  Isolate* isolate = Isolate::Current();
-  CALL_HEAP_FUNCTION(isolate,
-                     obj->GetProperty(*obj, result, *name, &attributes),
-                     Object);
-}
-
-
-Handle<Object> GetElement(Handle<Object> obj,
-                          uint32_t index) {
-  Isolate* isolate = Isolate::Current();
-  CALL_HEAP_FUNCTION(isolate, Runtime::GetElement(obj, index), Object);
-}
-
-
 Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver,
                                           Handle<JSObject> holder,
                                           Handle<String> name,
@@ -504,6 +486,14 @@
 }
 
 
+Handle<Object> TransitionElementsKind(Handle<JSObject> object,
+                                      ElementsKind to_kind) {
+  CALL_HEAP_FUNCTION(object->GetIsolate(),
+                     object->TransitionElementsKind(to_kind),
+                     Object);
+}
+
+
 Handle<JSObject> Copy(Handle<JSObject> obj) {
   Isolate* isolate = obj->GetIsolate();
   CALL_HEAP_FUNCTION(isolate,
@@ -701,7 +691,7 @@
 
 
 // Compute the property keys from the interceptor.
-v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver,
+v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver,
                                                  Handle<JSObject> object) {
   Isolate* isolate = receiver->GetIsolate();
   Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
@@ -723,7 +713,7 @@
 
 
 // Compute the element keys from the interceptor.
-v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver,
+v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver,
                                                    Handle<JSObject> object) {
   Isolate* isolate = receiver->GetIsolate();
   Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
@@ -754,8 +744,9 @@
 }
 
 
-Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
-                                          KeyCollectionType type) {
+Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object,
+                                          KeyCollectionType type,
+                                          bool* threw) {
   USE(ContainsOnlyValidKeys);
   Isolate* isolate = object->GetIsolate();
   Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
@@ -770,6 +761,16 @@
   for (Handle<Object> p = object;
        *p != isolate->heap()->null_value();
        p = Handle<Object>(p->GetPrototype(), isolate)) {
+    if (p->IsJSProxy()) {
+      Handle<JSProxy> proxy(JSProxy::cast(*p), isolate);
+      Handle<Object> args[] = { proxy };
+      Handle<Object> names = Execution::Call(
+          isolate->proxy_enumerate(), object, ARRAY_SIZE(args), args, threw);
+      if (*threw) return content;
+      content = AddKeysFromJSArray(content, Handle<JSArray>::cast(names));
+      break;
+    }
+
     Handle<JSObject> current(JSObject::cast(*p), isolate);
 
     // Check access rights if required.
@@ -836,11 +837,11 @@
 }
 
 
-Handle<JSArray> GetKeysFor(Handle<JSObject> object) {
+Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw) {
   Isolate* isolate = object->GetIsolate();
   isolate->counters()->for_in()->Increment();
-  Handle<FixedArray> elements = GetKeysInFixedArrayFor(object,
-                                                       INCLUDE_PROTOS);
+  Handle<FixedArray> elements =
+      GetKeysInFixedArrayFor(object, INCLUDE_PROTOS, threw);
   return isolate->factory()->NewJSArrayWithElements(elements);
 }
 
@@ -890,8 +891,24 @@
 }
 
 
+Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table,
+                                       Handle<Object> key) {
+  CALL_HEAP_FUNCTION(table->GetIsolate(),
+                     table->Add(*key),
+                     ObjectHashSet);
+}
+
+
+Handle<ObjectHashSet> ObjectHashSetRemove(Handle<ObjectHashSet> table,
+                                          Handle<Object> key) {
+  CALL_HEAP_FUNCTION(table->GetIsolate(),
+                     table->Remove(*key),
+                     ObjectHashSet);
+}
+
+
 Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
-                                               Handle<JSReceiver> key,
+                                               Handle<Object> key,
                                                Handle<Object> value) {
   CALL_HEAP_FUNCTION(table->GetIsolate(),
                      table->Put(*key, *value),
@@ -899,53 +916,4 @@
 }
 
 
-bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
-                    ClearExceptionFlag flag) {
-  return shared->is_compiled() || CompileLazyShared(shared, flag);
-}
-
-
-static bool CompileLazyHelper(CompilationInfo* info,
-                              ClearExceptionFlag flag) {
-  // Compile the source information to a code object.
-  ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
-  ASSERT(!info->isolate()->has_pending_exception());
-  bool result = Compiler::CompileLazy(info);
-  ASSERT(result != Isolate::Current()->has_pending_exception());
-  if (!result && flag == CLEAR_EXCEPTION) {
-    info->isolate()->clear_pending_exception();
-  }
-  return result;
-}
-
-
-bool CompileLazyShared(Handle<SharedFunctionInfo> shared,
-                       ClearExceptionFlag flag) {
-  CompilationInfo info(shared);
-  return CompileLazyHelper(&info, flag);
-}
-
-
-bool CompileLazy(Handle<JSFunction> function, ClearExceptionFlag flag) {
-  bool result = true;
-  if (function->shared()->is_compiled()) {
-    function->ReplaceCode(function->shared()->code());
-    function->shared()->set_code_age(0);
-  } else {
-    CompilationInfo info(function);
-    result = CompileLazyHelper(&info, flag);
-    ASSERT(!result || function->is_compiled());
-  }
-  return result;
-}
-
-
-bool CompileOptimized(Handle<JSFunction> function,
-                      int osr_ast_id,
-                      ClearExceptionFlag flag) {
-  CompilationInfo info(function);
-  info.SetOptimizing(osr_ast_id);
-  return CompileLazyHelper(&info, flag);
-}
-
 } }  // namespace v8::internal
diff --git a/src/handles.h b/src/handles.h
index d5521f8..06e47fc 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -240,20 +240,15 @@
                              Handle<Object> value,
                              StrictModeFlag strict_mode);
 
+Handle<Object> TransitionElementsKind(Handle<JSObject> object,
+                                      ElementsKind to_kind);
+
 Handle<Object> GetProperty(Handle<JSReceiver> obj,
                            const char* name);
 
 Handle<Object> GetProperty(Handle<Object> obj,
                            Handle<Object> key);
 
-Handle<Object> GetProperty(Handle<JSReceiver> obj,
-                           Handle<String> name,
-                           LookupResult* result);
-
-
-Handle<Object> GetElement(Handle<Object> obj,
-                          uint32_t index);
-
 Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver,
                                           Handle<JSObject> holder,
                                           Handle<String> name,
@@ -300,18 +295,19 @@
 
 // Computes the enumerable keys from interceptors. Used for debug mirrors and
 // by GetKeysInFixedArrayFor below.
-v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver,
+v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver,
                                                  Handle<JSObject> object);
-v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver,
+v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver,
                                                    Handle<JSObject> object);
 
 enum KeyCollectionType { LOCAL_ONLY, INCLUDE_PROTOS };
 
 // Computes the enumerable keys for a JSObject. Used for implementing
 // "for (n in object) { }".
-Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
-                                          KeyCollectionType type);
-Handle<JSArray> GetKeysFor(Handle<JSObject> object);
+Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object,
+                                          KeyCollectionType type,
+                                          bool* threw);
+Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw);
 Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
                                        bool cache_result);
 
@@ -346,26 +342,16 @@
 
 Handle<Object> PreventExtensions(Handle<JSObject> object);
 
+Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table,
+                                       Handle<Object> key);
+
+Handle<ObjectHashSet> ObjectHashSetRemove(Handle<ObjectHashSet> table,
+                                          Handle<Object> key);
+
 Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
-                                               Handle<JSReceiver> key,
+                                               Handle<Object> key,
                                                Handle<Object> value);
 
-// Does lazy compilation of the given function. Returns true on success and
-// false if the compilation resulted in a stack overflow.
-enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION };
-
-bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
-                    ClearExceptionFlag flag);
-
-bool CompileLazyShared(Handle<SharedFunctionInfo> shared,
-                       ClearExceptionFlag flag);
-
-bool CompileLazy(Handle<JSFunction> function, ClearExceptionFlag flag);
-
-bool CompileOptimized(Handle<JSFunction> function,
-                      int osr_ast_id,
-                      ClearExceptionFlag flag);
-
 class NoHandleAllocation BASE_EMBEDDED {
  public:
 #ifndef DEBUG
diff --git a/src/heap-inl.h b/src/heap-inl.h
index 4bd893e..aaf2927 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -359,7 +359,6 @@
 
 
 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
-  ASSERT(IsAligned(byte_size, kPointerSize));
   CopyWords(reinterpret_cast<Object**>(dst),
             reinterpret_cast<Object**>(src),
             byte_size / kPointerSize);
@@ -591,7 +590,9 @@
 
 void ExternalStringTable::ShrinkNewStrings(int position) {
   new_space_strings_.Rewind(position);
-  Verify();
+  if (FLAG_verify_heap) {
+    Verify();
+  }
 }
 
 
diff --git a/src/heap.cc b/src/heap.cc
index c6efd62..bbb9d3e 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -693,7 +693,9 @@
     PROFILE(isolate_, CodeMovingGCEvent());
   }
 
-  VerifySymbolTable();
+  if (FLAG_verify_heap) {
+    VerifySymbolTable();
+  }
   if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
     ASSERT(!allocation_allowed_);
     GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
@@ -789,7 +791,9 @@
     GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
     global_gc_epilogue_callback_();
   }
-  VerifySymbolTable();
+  if (FLAG_verify_heap) {
+    VerifySymbolTable();
+  }
 
   return next_gc_likely_to_collect_more;
 }
@@ -983,7 +987,7 @@
 
 void Heap::Scavenge() {
 #ifdef DEBUG
-  if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
+  if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
 #endif
 
   gc_state_ = SCAVENGE;
@@ -1112,7 +1116,9 @@
 
 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
     ExternalStringTableUpdaterCallback updater_func) {
-  external_string_table_.Verify();
+  if (FLAG_verify_heap) {
+    external_string_table_.Verify();
+  }
 
   if (external_string_table_.new_space_strings_.is_empty()) return;
 
@@ -1443,9 +1449,9 @@
                                     HeapObject** slot,
                                     HeapObject* object,
                                     int object_size) {
-    ASSERT((size_restriction != SMALL) ||
-           (object_size <= Page::kMaxHeapObjectSize));
-    ASSERT(object->Size() == object_size);
+    SLOW_ASSERT((size_restriction != SMALL) ||
+                (object_size <= Page::kMaxHeapObjectSize));
+    SLOW_ASSERT(object->Size() == object_size);
 
     Heap* heap = map->GetHeap();
     if (heap->ShouldBePromoted(object->address(), object_size)) {
@@ -1678,9 +1684,9 @@
 
 
 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
-  ASSERT(HEAP->InFromSpace(object));
+  SLOW_ASSERT(HEAP->InFromSpace(object));
   MapWord first_word = object->map_word();
-  ASSERT(!first_word.IsForwardingAddress());
+  SLOW_ASSERT(!first_word.IsForwardingAddress());
   Map* map = first_word.ToMap();
   map->GetHeap()->DoScavengeObject(map, p, object);
 }
@@ -2910,7 +2916,9 @@
 
   ASSERT(buffer->IsFlat());
 #if DEBUG
-  buffer->StringVerify();
+  if (FLAG_verify_heap) {
+    buffer->StringVerify();
+  }
 #endif
 
   Object* result;
@@ -3156,7 +3164,9 @@
   code->CopyFrom(desc);
 
 #ifdef DEBUG
-  code->Verify();
+  if (FLAG_verify_heap) {
+    code->Verify();
+  }
 #endif
   return code;
 }
@@ -3236,7 +3246,9 @@
   new_code->Relocate(new_addr - old_addr);
 
 #ifdef DEBUG
-  code->Verify();
+  if (FLAG_verify_heap) {
+    code->Verify();
+  }
 #endif
   return new_code;
 }
@@ -3269,7 +3281,7 @@
   function->set_code(shared->code());
   function->set_prototype_or_initial_map(prototype);
   function->set_context(undefined_value());
-  function->set_literals(empty_fixed_array());
+  function->set_literals_or_bindings(empty_fixed_array());
   function->set_next_function_link(undefined_value());
 }
 
@@ -3434,22 +3446,22 @@
       // Inline constructor can only handle inobject properties.
       fun->shared()->ForbidInlineConstructor();
     } else {
-      Object* descriptors_obj;
+      DescriptorArray* descriptors;
       { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count);
-        if (!maybe_descriptors_obj->ToObject(&descriptors_obj)) {
+        if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) {
           return maybe_descriptors_obj;
         }
       }
-      DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj);
+      DescriptorArray::WhitenessWitness witness(descriptors);
       for (int i = 0; i < count; i++) {
         String* name = fun->shared()->GetThisPropertyAssignmentName(i);
         ASSERT(name->IsSymbol());
         FieldDescriptor field(name, i, NONE);
         field.SetEnumerationIndex(i);
-        descriptors->Set(i, &field);
+        descriptors->Set(i, &field, witness);
       }
       descriptors->SetNextEnumerationIndex(count);
-      descriptors->SortUnchecked();
+      descriptors->SortUnchecked(witness);
 
       // The descriptors may contain duplicates because the compiler does not
       // guarantee the uniqueness of property names (it would have required
@@ -3688,13 +3700,15 @@
 MaybeObject* Heap::CopyJSObject(JSObject* source) {
   // Never used to copy functions.  If functions need to be copied we
   // have to be careful to clear the literals array.
-  ASSERT(!source->IsJSFunction());
+  SLOW_ASSERT(!source->IsJSFunction());
 
   // Make the clone.
   Map* map = source->map();
   int object_size = map->instance_size();
   Object* clone;
 
+  WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
+
   // If we're forced to always allocate, we use the general allocation
   // functions which may leave us with an object in old space.
   if (always_allocate()) {
@@ -3711,10 +3725,11 @@
                  JSObject::kHeaderSize,
                  (object_size - JSObject::kHeaderSize) / kPointerSize);
   } else {
+    wb_mode = SKIP_WRITE_BARRIER;
     { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
       if (!maybe_clone->ToObject(&clone)) return maybe_clone;
     }
-    ASSERT(InNewSpace(clone));
+    SLOW_ASSERT(InNewSpace(clone));
     // Since we know the clone is allocated in new space, we can copy
     // the contents without worrying about updating the write barrier.
     CopyBlock(HeapObject::cast(clone)->address(),
@@ -3722,7 +3737,8 @@
               object_size);
   }
 
-  ASSERT(JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
+  SLOW_ASSERT(
+      JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
   FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
   FixedArray* properties = FixedArray::cast(source->properties());
   // Update elements if necessary.
@@ -3738,7 +3754,7 @@
       }
       if (!maybe_elem->ToObject(&elem)) return maybe_elem;
     }
-    JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem));
+    JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
   }
   // Update properties if necessary.
   if (properties->length() > 0) {
@@ -3746,7 +3762,7 @@
     { MaybeObject* maybe_prop = CopyFixedArray(properties);
       if (!maybe_prop->ToObject(&prop)) return maybe_prop;
     }
-    JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
+    JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
   }
   // Return the new clone.
   return clone;
@@ -4802,12 +4818,12 @@
                  HeapObject::cast(object));
         Object* new_object = *slot;
         if (InNewSpace(new_object)) {
-          ASSERT(Heap::InToSpace(new_object));
-          ASSERT(new_object->IsHeapObject());
+          SLOW_ASSERT(Heap::InToSpace(new_object));
+          SLOW_ASSERT(new_object->IsHeapObject());
           store_buffer_.EnterDirectlyIntoStoreBuffer(
               reinterpret_cast<Address>(slot));
         }
-        ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
+        SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
       } else if (record_slots &&
                  MarkCompactCollector::IsOnEvacuationCandidate(object)) {
         mark_compact_collector()->RecordSlot(slot, slot, object);
@@ -5361,6 +5377,7 @@
 
 bool Heap::Setup(bool create_heap_objects) {
 #ifdef DEBUG
+  allocation_timeout_ = FLAG_gc_interval;
   debug_utils_ = new HeapDebugUtils(this);
 #endif
 
@@ -5446,7 +5463,7 @@
   // The large object code space may contain code or data.  We set the memory
   // to be non-executable here for safety, but this means we need to enable it
   // explicitly when allocating large code objects.
-  lo_space_ = new LargeObjectSpace(this, LO_SPACE);
+  lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE);
   if (lo_space_ == NULL) return false;
   if (!lo_space_->Setup()) return false;
   if (create_heap_objects) {
@@ -5762,56 +5779,51 @@
 class UnreachableObjectsFilter : public HeapObjectsFilter {
  public:
   UnreachableObjectsFilter() {
-    MarkUnreachableObjects();
+    MarkReachableObjects();
+  }
+
+  ~UnreachableObjectsFilter() {
+    Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits();
   }
 
   bool SkipObject(HeapObject* object) {
-    if (IntrusiveMarking::IsMarked(object)) {
-      IntrusiveMarking::ClearMark(object);
-      return true;
-    } else {
-      return false;
-    }
+    MarkBit mark_bit = Marking::MarkBitFrom(object);
+    return !mark_bit.Get();
   }
 
  private:
-  class UnmarkingVisitor : public ObjectVisitor {
+  class MarkingVisitor : public ObjectVisitor {
    public:
-    UnmarkingVisitor() : list_(10) {}
+    MarkingVisitor() : marking_stack_(10) {}
 
     void VisitPointers(Object** start, Object** end) {
       for (Object** p = start; p < end; p++) {
         if (!(*p)->IsHeapObject()) continue;
         HeapObject* obj = HeapObject::cast(*p);
-        if (IntrusiveMarking::IsMarked(obj)) {
-          IntrusiveMarking::ClearMark(obj);
-          list_.Add(obj);
+        MarkBit mark_bit = Marking::MarkBitFrom(obj);
+        if (!mark_bit.Get()) {
+          mark_bit.Set();
+          marking_stack_.Add(obj);
         }
       }
     }
 
-    bool can_process() { return !list_.is_empty(); }
-
-    void ProcessNext() {
-      HeapObject* obj = list_.RemoveLast();
-      obj->Iterate(this);
+    void TransitiveClosure() {
+      while (!marking_stack_.is_empty()) {
+        HeapObject* obj = marking_stack_.RemoveLast();
+        obj->Iterate(this);
+      }
     }
 
    private:
-    List<HeapObject*> list_;
+    List<HeapObject*> marking_stack_;
   };
 
-  void MarkUnreachableObjects() {
-    HeapIterator iterator;
-    for (HeapObject* obj = iterator.next();
-         obj != NULL;
-         obj = iterator.next()) {
-      IntrusiveMarking::SetMark(obj);
-    }
-    UnmarkingVisitor visitor;
-    HEAP->IterateRoots(&visitor, VISIT_ALL);
-    while (visitor.can_process())
-      visitor.ProcessNext();
+  void MarkReachableObjects() {
+    Heap* heap = Isolate::Current()->heap();
+    MarkingVisitor visitor;
+    heap->IterateRoots(&visitor, VISIT_ALL);
+    visitor.TransitiveClosure();
   }
 
   AssertNoAllocation no_alloc;
@@ -5839,13 +5851,8 @@
 
 void HeapIterator::Init() {
   // Start the iteration.
-  space_iterator_ = filtering_ == kNoFiltering ? new SpaceIterator :
-      new SpaceIterator(Isolate::Current()->heap()->
-                        GcSafeSizeOfOldObjectFunction());
+  space_iterator_ = new SpaceIterator;
   switch (filtering_) {
-    case kFilterFreeListNodes:
-      // TODO(gc): Not handled.
-      break;
     case kFilterUnreachable:
       filter_ = new UnreachableObjectsFilter;
       break;
@@ -6350,7 +6357,9 @@
     old_space_strings_[last++] = old_space_strings_[i];
   }
   old_space_strings_.Rewind(last);
-  Verify();
+  if (FLAG_verify_heap) {
+    Verify();
+  }
 }
 
 
diff --git a/src/heap.h b/src/heap.h
index 6fb2d18..7c0b0ea 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -64,18 +64,31 @@
   V(Oddball, null_value, NullValue)                                            \
   V(Oddball, true_value, TrueValue)                                            \
   V(Oddball, false_value, FalseValue)                                          \
-  V(Oddball, arguments_marker, ArgumentsMarker)                                \
-  V(Oddball, frame_alignment_marker, FrameAlignmentMarker)                     \
+  V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
+  V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
+  V(Map, meta_map, MetaMap)                                                    \
+  V(Map, ascii_symbol_map, AsciiSymbolMap)                                     \
+  V(Map, ascii_string_map, AsciiStringMap)                                     \
   V(Map, heap_number_map, HeapNumberMap)                                       \
   V(Map, global_context_map, GlobalContextMap)                                 \
   V(Map, fixed_array_map, FixedArrayMap)                                       \
+  V(Map, code_map, CodeMap)                                                    \
   V(Map, serialized_scope_info_map, SerializedScopeInfoMap)                    \
   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
   V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
-  V(Map, meta_map, MetaMap)                                                    \
   V(Map, hash_table_map, HashTableMap)                                         \
+  V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
+  V(ByteArray, empty_byte_array, EmptyByteArray)                               \
+  V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray)         \
+  V(String, empty_string, EmptyString)                                         \
+  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
   V(Smi, stack_limit, StackLimit)                                              \
+  V(Oddball, frame_alignment_marker, FrameAlignmentMarker)                     \
+  V(Oddball, arguments_marker, ArgumentsMarker)                                \
+  /* The first 32 roots above this line should be boring from a GC point of */ \
+  /* view.  This means they are never in new space and never on a page that */ \
+  /* is being compacted.                                                    */ \
   V(FixedArray, number_string_cache, NumberStringCache)                        \
   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
@@ -83,19 +96,12 @@
   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
   V(FixedArray, string_split_cache, StringSplitCache)                          \
   V(Object, termination_exception, TerminationException)                       \
-  V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
-  V(ByteArray, empty_byte_array, EmptyByteArray)                               \
-  V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray)         \
-  V(String, empty_string, EmptyString)                                         \
-  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
   V(Map, string_map, StringMap)                                                \
-  V(Map, ascii_string_map, AsciiStringMap)                                     \
   V(Map, symbol_map, SymbolMap)                                                \
   V(Map, cons_string_map, ConsStringMap)                                       \
   V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
   V(Map, sliced_string_map, SlicedStringMap)                                   \
   V(Map, sliced_ascii_string_map, SlicedAsciiStringMap)                        \
-  V(Map, ascii_symbol_map, AsciiSymbolMap)                                     \
   V(Map, cons_symbol_map, ConsSymbolMap)                                       \
   V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap)                            \
   V(Map, external_symbol_map, ExternalSymbolMap)                               \
@@ -120,10 +126,7 @@
   V(Map, catch_context_map, CatchContextMap)                                   \
   V(Map, with_context_map, WithContextMap)                                     \
   V(Map, block_context_map, BlockContextMap)                                   \
-  V(Map, code_map, CodeMap)                                                    \
   V(Map, oddball_map, OddballMap)                                              \
-  V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
-  V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
   V(Map, message_object_map, JSMessageObjectMap)                               \
   V(Map, foreign_map, ForeignMap)                                              \
   V(HeapNumber, nan_value, NanValue)                                           \
@@ -1097,7 +1100,7 @@
   inline void SetLastScriptId(Object* last_script_id);
 
   // Generated code can embed this address to get access to the roots.
-  Object** roots_address() { return roots_; }
+  Object** roots_array_start() { return roots_; }
 
   Address* store_buffer_top_address() {
     return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
@@ -1419,6 +1422,9 @@
   // around a GC).
   inline void CompletelyClearInstanceofCache();
 
+  // The roots that have an index less than this are always in old space.
+  static const int kOldSpaceRoots = 0x20;
+
  private:
   Heap();
 
@@ -1474,7 +1480,10 @@
   int unflattened_strings_length_;
 
 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
-  inline void set_##name(type* value) {                                 \
+  inline void set_##name(type* value) {                                        \
+    /* The deserializer makes use of the fact that these common roots are */   \
+    /* never in new space and never on a page that is being compacted.    */   \
+    ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value));  \
     roots_[k##camel_name##RootIndex] = value;                                  \
   }
   ROOT_LIST(ROOT_ACCESSOR)
@@ -1954,7 +1963,6 @@
  public:
   enum HeapObjectsFiltering {
     kNoFiltering,
-    kFilterFreeListNodes,
     kFilterUnreachable
   };
 
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index fd0c3bb..6f46509 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -587,11 +587,10 @@
     HBasicBlock* other_block = other_operand->block();
     if (cur_block == other_block) {
       if (!other_operand->IsPhi()) {
-        HInstruction* cur = cur_block->first();
+        HInstruction* cur = this->previous();
         while (cur != NULL) {
-          ASSERT(cur != this);  // We should reach other_operand before!
           if (cur == other_operand) break;
-          cur = cur->next();
+          cur = cur->previous();
         }
         // Must reach other operand in the same block!
         ASSERT(cur == other_operand);
@@ -783,12 +782,21 @@
 
 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
   value()->PrintNameTo(stream);
-  stream->Add(" == ");
-  stream->Add(type_literal_->GetFlatContent().ToAsciiVector());
+  stream->Add(" == %o", *type_literal_);
   HControlInstruction::PrintDataTo(stream);
 }
 
 
+HValue* HConstant::Canonicalize() {
+  return HasNoUses() && !IsBlockEntry() ? NULL : this;
+}
+
+
+HValue* HTypeof::Canonicalize() {
+  return HasNoUses() && !IsBlockEntry() ? NULL : this;
+}
+
+
 void HTypeof::PrintDataTo(StringStream* stream) {
   value()->PrintNameTo(stream);
 }
@@ -1138,15 +1146,16 @@
 
 
 void HSimulate::PrintDataTo(StringStream* stream) {
-  stream->Add("id=%d ", ast_id());
-  if (pop_count_ > 0) stream->Add("pop %d", pop_count_);
+  stream->Add("id=%d", ast_id());
+  if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
   if (values_.length() > 0) {
     if (pop_count_ > 0) stream->Add(" /");
     for (int i = 0; i < values_.length(); ++i) {
-      if (!HasAssignedIndexAt(i)) {
-        stream->Add(" push ");
-      } else {
+      if (i > 0) stream->Add(",");
+      if (HasAssignedIndexAt(i)) {
         stream->Add(" var[%d] = ", GetAssignedIndexAt(i));
+      } else {
+        stream->Add(" push ");
       }
       values_[i]->PrintNameTo(stream);
     }
@@ -1227,7 +1236,10 @@
 
 
 bool HArrayLiteral::IsCopyOnWrite() const {
-  return constant_elements()->map() == HEAP->fixed_cow_array_map();
+  Handle<FixedArray> constant_elements = this->constant_elements();
+  FixedArrayBase* constant_elements_values =
+      FixedArrayBase::cast(constant_elements->get(1));
+  return constant_elements_values->map() == HEAP->fixed_cow_array_map();
 }
 
 
@@ -1392,7 +1404,7 @@
        i < types->length() && types_.length() < kMaxLoadPolymorphism;
        ++i) {
     Handle<Map> map = types->at(i);
-    LookupResult lookup;
+    LookupResult lookup(map->GetIsolate());
     map->LookupInDescriptors(NULL, *name, &lookup);
     if (lookup.IsProperty()) {
       switch (lookup.type()) {
@@ -1445,14 +1457,14 @@
 
 void HLoadNamedFieldPolymorphic::PrintDataTo(StringStream* stream) {
   object()->PrintNameTo(stream);
-  stream->Add(" .");
+  stream->Add(".");
   stream->Add(*String::cast(*name())->ToCString());
 }
 
 
 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
   object()->PrintNameTo(stream);
-  stream->Add(" .");
+  stream->Add(".");
   stream->Add(*String::cast(*name())->ToCString());
 }
 
@@ -1549,10 +1561,10 @@
 void HStoreNamedField::PrintDataTo(StringStream* stream) {
   object()->PrintNameTo(stream);
   stream->Add(".");
-  ASSERT(name()->IsString());
   stream->Add(*String::cast(*name())->ToCString());
   stream->Add(" = ");
   value()->PrintNameTo(stream);
+  stream->Add(" @%d%s", offset(), is_in_object() ? "[in-object]" : "");
   if (!transition().is_null()) {
     stream->Add(" (transition map %p)", *transition());
   }
@@ -1633,6 +1645,12 @@
 }
 
 
+void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
+  object()->PrintNameTo(stream);
+  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
+}
+
+
 void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
   stream->Add("[%p]", *cell());
   if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
@@ -1746,6 +1764,12 @@
 }
 
 
+HType HChange::CalculateInferredType() {
+  if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
+  return type();
+}
+
+
 HType HBitwiseBinaryOperation::CalculateInferredType() {
   return HType::TaggedNumber();
 }
@@ -1801,6 +1825,31 @@
 }
 
 
+HType HStringCharFromCode::CalculateInferredType() {
+  return HType::String();
+}
+
+
+HType HArrayLiteral::CalculateInferredType() {
+  return HType::JSArray();
+}
+
+
+HType HObjectLiteral::CalculateInferredType() {
+  return HType::JSObject();
+}
+
+
+HType HRegExpLiteral::CalculateInferredType() {
+  return HType::JSObject();
+}
+
+
+HType HFunctionLiteral::CalculateInferredType() {
+  return HType::JSObject();
+}
+
+
 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero(
     BitVector* visited) {
   visited->Add(id());
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 6b43f53..65fc4df 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -171,6 +171,7 @@
   V(Throw)                                     \
   V(ToFastProperties)                          \
   V(ToInt32)                                   \
+  V(TransitionElementsKind)                    \
   V(Typeof)                                    \
   V(TypeofIsAndBranch)                         \
   V(UnaryMathOperation)                        \
@@ -397,6 +398,11 @@
     return type_ == kUninitialized;
   }
 
+  bool IsHeapObject() {
+    ASSERT(type_ != kUninitialized);
+    return IsHeapNumber() || IsString() || IsNonPrimitive();
+  }
+
   static HType TypeFromValue(Handle<Object> value);
 
   const char* ToString();
@@ -1101,12 +1107,14 @@
     ASSERT(!value->representation().IsNone() && !to.IsNone());
     ASSERT(!value->representation().Equals(to));
     set_representation(to);
+    set_type(HType::TaggedNumber());
     SetFlag(kUseGVN);
     if (deoptimize_on_undefined) SetFlag(kDeoptimizeOnUndefined);
     if (is_truncating) SetFlag(kTruncatingToInt32);
   }
 
   virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+  virtual HType CalculateInferredType();
 
   Representation from() { return value()->representation(); }
   Representation to() { return representation(); }
@@ -1340,7 +1348,7 @@
 
 class HThisFunction: public HTemplateInstruction<0> {
  public:
-  HThisFunction() {
+  explicit HThisFunction(Handle<JSFunction> closure) : closure_(closure) {
     set_representation(Representation::Tagged());
     SetFlag(kUseGVN);
   }
@@ -1349,10 +1357,18 @@
     return Representation::None();
   }
 
+  Handle<JSFunction> closure() const { return closure_; }
+
   DECLARE_CONCRETE_INSTRUCTION(ThisFunction)
 
  protected:
-  virtual bool DataEquals(HValue* other) { return true; }
+  virtual bool DataEquals(HValue* other) {
+    HThisFunction* b = HThisFunction::cast(other);
+    return *closure() == *b->closure();
+  }
+
+ private:
+  Handle<JSFunction> closure_;
 };
 
 
@@ -2280,6 +2296,7 @@
   }
 
   virtual bool EmitAtUses() { return !representation().IsDouble(); }
+  virtual HValue* Canonicalize();
   virtual void PrintDataTo(StringStream* stream);
   virtual HType CalculateInferredType();
   bool IsInteger() const { return handle_->IsSmi(); }
@@ -3260,6 +3277,13 @@
 };
 
 
+static inline bool StoringValueNeedsWriteBarrier(HValue* value) {
+  return !value->type().IsBoolean()
+      && !value->type().IsSmi()
+      && !(value->IsConstant() && HConstant::cast(value)->ImmortalImmovable());
+}
+
+
 class HStoreGlobalCell: public HUnaryOperation {
  public:
   HStoreGlobalCell(HValue* value,
@@ -3275,6 +3299,9 @@
   bool RequiresHoleCheck() {
     return !details_.IsDontDelete() || details_.IsReadOnly();
   }
+  bool NeedsWriteBarrier() {
+    return StoringValueNeedsWriteBarrier(value());
+  }
 
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
@@ -3355,13 +3382,6 @@
 };
 
 
-static inline bool StoringValueNeedsWriteBarrier(HValue* value) {
-  return !value->type().IsBoolean()
-      && !value->type().IsSmi()
-      && !(value->IsConstant() && HConstant::cast(value)->ImmortalImmovable());
-}
-
-
 class HStoreContextSlot: public HTemplateInstruction<2> {
  public:
   HStoreContextSlot(HValue* context, int slot_index, HValue* value)
@@ -3700,9 +3720,9 @@
                      HValue* object,
                      Handle<String> name,
                      HValue* value,
-                     bool strict_mode)
+                     StrictModeFlag strict_mode_flag)
       : name_(name),
-        strict_mode_(strict_mode) {
+        strict_mode_flag_(strict_mode_flag) {
     SetOperandAt(0, object);
     SetOperandAt(1, value);
     SetOperandAt(2, context);
@@ -3713,7 +3733,7 @@
   HValue* value() { return OperandAt(1); }
   HValue* context() { return OperandAt(2); }
   Handle<String> name() { return name_; }
-  bool strict_mode() { return strict_mode_; }
+  StrictModeFlag strict_mode_flag() { return strict_mode_flag_; }
 
   virtual void PrintDataTo(StringStream* stream);
 
@@ -3725,7 +3745,7 @@
 
  private:
   Handle<String> name_;
-  bool strict_mode_;
+  StrictModeFlag strict_mode_flag_;
 };
 
 
@@ -3886,6 +3906,44 @@
 };
 
 
+class HTransitionElementsKind: public HTemplateInstruction<1> {
+ public:
+  HTransitionElementsKind(HValue* object,
+                          Handle<Map> original_map,
+                          Handle<Map> transitioned_map)
+      : original_map_(original_map),
+        transitioned_map_(transitioned_map) {
+    SetOperandAt(0, object);
+    SetFlag(kUseGVN);
+    SetFlag(kDependsOnMaps);
+    set_representation(Representation::Tagged());
+  }
+
+  virtual Representation RequiredInputRepresentation(int index) {
+    return Representation::Tagged();
+  }
+
+  HValue* object() { return OperandAt(0); }
+  Handle<Map> original_map() { return original_map_; }
+  Handle<Map> transitioned_map() { return transitioned_map_; }
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind)
+
+ protected:
+  virtual bool DataEquals(HValue* other) {
+    HTransitionElementsKind* instr = HTransitionElementsKind::cast(other);
+    return original_map_.is_identical_to(instr->original_map()) &&
+        transitioned_map_.is_identical_to(instr->transitioned_map());
+  }
+
+ private:
+  Handle<Map> original_map_;
+  Handle<Map> transitioned_map_;
+};
+
+
 class HStringAdd: public HBinaryOperation {
  public:
   HStringAdd(HValue* context, HValue* left, HValue* right)
@@ -3948,7 +4006,7 @@
   HStringCharFromCode(HValue* context, HValue* char_code) {
     SetOperandAt(0, context);
     SetOperandAt(1, char_code);
-     set_representation(Representation::Tagged());
+    set_representation(Representation::Tagged());
     SetFlag(kUseGVN);
   }
 
@@ -3957,6 +4015,7 @@
         ? Representation::Tagged()
         : Representation::Integer32();
   }
+  virtual HType CalculateInferredType();
 
   HValue* context() { return OperandAt(0); }
   HValue* value() { return OperandAt(1); }
@@ -4034,6 +4093,7 @@
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
   }
+  virtual HType CalculateInferredType();
 
   DECLARE_CONCRETE_INSTRUCTION(ArrayLiteral)
 
@@ -4068,6 +4128,7 @@
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
   }
+  virtual HType CalculateInferredType();
 
   DECLARE_CONCRETE_INSTRUCTION(ObjectLiteral)
 
@@ -4097,6 +4158,7 @@
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
   }
+  virtual HType CalculateInferredType();
 
   DECLARE_CONCRETE_INSTRUCTION(RegExpLiteral)
 
@@ -4121,6 +4183,7 @@
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::Tagged();
   }
+  virtual HType CalculateInferredType();
 
   DECLARE_CONCRETE_INSTRUCTION(FunctionLiteral)
 
@@ -4144,6 +4207,7 @@
   HValue* context() { return OperandAt(0); }
   HValue* value() { return OperandAt(1); }
 
+  virtual HValue* Canonicalize();
   virtual void PrintDataTo(StringStream* stream);
 
   virtual Representation RequiredInputRepresentation(int index) {
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 2d471cc..1460db8 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -164,10 +164,11 @@
 }
 
 
-void HBasicBlock::Goto(HBasicBlock* block) {
+void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) {
   if (block->IsInlineReturnTarget()) {
     AddInstruction(new(zone()) HLeaveInlined);
     last_environment_ = last_environment()->outer();
+    if (drop_extra) last_environment_->Drop(1);
   }
   AddSimulate(AstNode::kNoNumber);
   HGoto* instr = new(zone()) HGoto(block);
@@ -175,11 +176,14 @@
 }
 
 
-void HBasicBlock::AddLeaveInlined(HValue* return_value, HBasicBlock* target) {
+void HBasicBlock::AddLeaveInlined(HValue* return_value,
+                                  HBasicBlock* target,
+                                  bool drop_extra) {
   ASSERT(target->IsInlineReturnTarget());
   ASSERT(return_value != NULL);
   AddInstruction(new(zone()) HLeaveInlined);
   last_environment_ = last_environment()->outer();
+  if (drop_extra) last_environment_->Drop(1);
   last_environment()->Push(return_value);
   AddSimulate(AstNode::kNoNumber);
   HGoto* instr = new(zone()) HGoto(target);
@@ -541,7 +545,7 @@
 HGraphBuilder::HGraphBuilder(CompilationInfo* info,
                              TypeFeedbackOracle* oracle)
     : function_state_(NULL),
-      initial_function_state_(this, info, oracle),
+      initial_function_state_(this, info, oracle, false),
       ast_context_(NULL),
       break_scope_(NULL),
       graph_(NULL),
@@ -1499,6 +1503,9 @@
         block->block_id() < dominated->block_id() &&
         visited_on_paths_.Add(block->block_id())) {
       side_effects |= block_side_effects_[block->block_id()];
+      if (block->IsLoopHeader()) {
+        side_effects |= loop_side_effects_[block->block_id()];
+      }
       side_effects |= CollectSideEffectsOnPathsToDominatedBlock(
           dominator, block);
     }
@@ -2005,11 +2012,13 @@
 // a (possibly inlined) function.
 FunctionState::FunctionState(HGraphBuilder* owner,
                              CompilationInfo* info,
-                             TypeFeedbackOracle* oracle)
+                             TypeFeedbackOracle* oracle,
+                             bool drop_extra)
     : owner_(owner),
       compilation_info_(info),
       oracle_(oracle),
       call_context_(NULL),
+      drop_extra_(drop_extra),
       function_return_(NULL),
       test_context_(NULL),
       outer_(owner->function_state()) {
@@ -2168,8 +2177,8 @@
   instr->SetSuccessorAt(0, empty_true);
   instr->SetSuccessorAt(1, empty_false);
   owner()->current_block()->Finish(instr);
-  empty_true->Goto(if_true());
-  empty_false->Goto(if_false());
+  empty_true->Goto(if_true(), owner()->function_state()->drop_extra());
+  empty_false->Goto(if_false(), owner()->function_state()->drop_extra());
   owner()->set_current_block(NULL);
 }
 
@@ -2190,8 +2199,8 @@
   HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
   builder->current_block()->Finish(test);
 
-  empty_true->Goto(if_true());
-  empty_false->Goto(if_false());
+  empty_true->Goto(if_true(), owner()->function_state()->drop_extra());
+  empty_false->Goto(if_false(), owner()->function_state()->drop_extra());
   builder->set_current_block(NULL);
 }
 
@@ -2652,12 +2661,14 @@
                       test->if_false());
     } else if (context->IsEffect()) {
       CHECK_ALIVE(VisitForEffect(stmt->expression()));
-      current_block()->Goto(function_return());
+      current_block()->Goto(function_return(), function_state()->drop_extra());
     } else {
       ASSERT(context->IsValue());
       CHECK_ALIVE(VisitForValue(stmt->expression()));
       HValue* return_value = environment()->Pop();
-      current_block()->AddLeaveInlined(return_value, function_return());
+      current_block()->AddLeaveInlined(return_value,
+                                       function_return(),
+                                       function_state()->drop_extra());
     }
     set_current_block(NULL);
   }
@@ -3156,7 +3167,7 @@
         return ast_context()->ReturnInstruction(instr, expr->id());
       }
 
-      LookupResult lookup;
+      LookupResult lookup(isolate());
       GlobalPropertyAccess type =
           LookupGlobalProperty(variable, &lookup, false);
 
@@ -3276,7 +3287,7 @@
                                 literal,
                                 name,
                                 value,
-                                function_strict_mode());
+                                function_strict_mode_flag());
             AddInstruction(store);
             AddSimulate(key->id());
           } else {
@@ -3337,11 +3348,8 @@
     HValue* value = Pop();
     if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
 
-    // Load the elements array before the first store.
-    if (elements == NULL)  {
-      elements = new(zone()) HLoadElements(literal);
-      AddInstruction(elements);
-    }
+    elements = new(zone()) HLoadElements(literal);
+    AddInstruction(elements);
 
     HValue* key = AddInstruction(
         new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)),
@@ -3365,10 +3373,10 @@
     set_current_block(check_smi_only_elements);
     HCompareConstantEqAndBranch* smi_elements_check =
         new(zone()) HCompareConstantEqAndBranch(elements_kind,
-                                                FAST_SMI_ONLY_ELEMENTS,
+                                                FAST_ELEMENTS,
                                                 Token::EQ_STRICT);
-    smi_elements_check->SetSuccessorAt(0, store_generic);
-    smi_elements_check->SetSuccessorAt(1, store_fast_edgesplit2);
+    smi_elements_check->SetSuccessorAt(0, store_fast_edgesplit2);
+    smi_elements_check->SetSuccessorAt(1, store_generic);
     current_block()->Finish(smi_elements_check);
     store_fast_edgesplit2->Finish(new(zone()) HGoto(store_fast));
 
@@ -3457,7 +3465,7 @@
                          object,
                          name,
                          value,
-                         function_strict_mode());
+                         function_strict_mode_flag());
 }
 
 
@@ -3471,7 +3479,7 @@
   Handle<String> name = Handle<String>::cast(key->handle());
   ASSERT(!name.is_null());
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   SmallMapList* types = expr->GetReceiverTypes();
   bool is_monomorphic = expr->IsMonomorphic() &&
       ComputeStoredField(types->first(), name, &lookup);
@@ -3495,7 +3503,7 @@
   HBasicBlock* join = NULL;
   for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
     Handle<Map> map = types->at(i);
-    LookupResult lookup;
+    LookupResult lookup(isolate());
     if (ComputeStoredField(map, name, &lookup)) {
       if (count == 0) {
         AddInstruction(new(zone()) HCheckNonSmi(object));  // Only needed once.
@@ -3578,7 +3586,7 @@
     ASSERT(!name.is_null());
 
     SmallMapList* types = expr->GetReceiverTypes();
-    LookupResult lookup;
+    LookupResult lookup(isolate());
 
     if (expr->IsMonomorphic()) {
       instr = BuildStoreNamed(object, value, expr);
@@ -3623,7 +3631,7 @@
                                                    HValue* value,
                                                    int position,
                                                    int ast_id) {
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true);
   if (type == kUseCell) {
     Handle<GlobalObject> global(info()->global_object());
@@ -3642,7 +3650,7 @@
                                         global_object,
                                         var->name(),
                                         value,
-                                        function_strict_mode());
+                                        function_strict_mode_flag());
     instr->set_position(position);
     AddInstruction(instr);
     ASSERT(instr->HasSideEffects());
@@ -3938,7 +3946,7 @@
                                             Property* expr,
                                             Handle<Map> map,
                                             Handle<String> name) {
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   map->LookupInDescriptors(NULL, *name, &lookup);
   if (lookup.IsProperty() && lookup.type() == FIELD) {
     return BuildLoadNamedField(obj,
@@ -4037,11 +4045,8 @@
 HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
                                                            HValue* key,
                                                            HValue* val,
-                                                           Expression* expr,
+                                                           Handle<Map> map,
                                                            bool is_store) {
-  ASSERT(expr->IsMonomorphic());
-  Handle<Map> map = expr->GetMonomorphicReceiverType();
-  AddInstruction(new(zone()) HCheckNonSmi(object));
   HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map));
   bool fast_smi_only_elements = map->has_fast_smi_only_elements();
   bool fast_elements = map->has_fast_elements();
@@ -4091,7 +4096,6 @@
                                                       bool* has_side_effects) {
   *has_side_effects = false;
   AddInstruction(new(zone()) HCheckNonSmi(object));
-  AddInstruction(HCheckInstanceType::NewIsSpecObject(object));
   SmallMapList* maps = prop->GetReceiverTypes();
   bool todo_external_array = false;
 
@@ -4101,15 +4105,55 @@
     type_todo[i] = false;
   }
 
+  // Elements_kind transition support.
+  MapHandleList transition_target(maps->length());
+  // Collect possible transition targets.
+  MapHandleList possible_transitioned_maps(maps->length());
   for (int i = 0; i < maps->length(); ++i) {
-    ASSERT(maps->at(i)->IsMap());
-    type_todo[maps->at(i)->elements_kind()] = true;
-    if (maps->at(i)->elements_kind()
-        >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) {
-      todo_external_array = true;
+    Handle<Map> map = maps->at(i);
+    ElementsKind elements_kind = map->elements_kind();
+    if (elements_kind == FAST_DOUBLE_ELEMENTS ||
+        elements_kind == FAST_ELEMENTS) {
+      possible_transitioned_maps.Add(map);
+    }
+  }
+  // Get transition target for each map (NULL == no transition).
+  for (int i = 0; i < maps->length(); ++i) {
+    Handle<Map> map = maps->at(i);
+    Handle<Map> transitioned_map =
+        map->FindTransitionedMap(&possible_transitioned_maps);
+    transition_target.Add(transitioned_map);
+  }
+
+  int num_untransitionable_maps = 0;
+  Handle<Map> untransitionable_map;
+  for (int i = 0; i < maps->length(); ++i) {
+    Handle<Map> map = maps->at(i);
+    ASSERT(map->IsMap());
+    if (!transition_target.at(i).is_null()) {
+      object = AddInstruction(new(zone()) HTransitionElementsKind(
+          object, map, transition_target.at(i)));
+    } else {
+      type_todo[map->elements_kind()] = true;
+      if (map->elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) {
+        todo_external_array = true;
+      }
+      num_untransitionable_maps++;
+      untransitionable_map = map;
     }
   }
 
+  // If only one map is left after transitioning, handle this case
+  // monomorphically.
+  if (num_untransitionable_maps == 1) {
+    HInstruction* instr = AddInstruction(BuildMonomorphicElementAccess(
+        object, key, val, untransitionable_map, is_store));
+    *has_side_effects |= instr->HasSideEffects();
+    instr->set_position(position);
+    return is_store ? NULL : instr;
+  }
+
+  AddInstruction(HCheckInstanceType::NewIsSpecObject(object));
   HBasicBlock* join = graph()->CreateBasicBlock();
 
   HInstruction* elements_kind_instr =
@@ -4241,7 +4285,9 @@
   ASSERT(!expr->IsPropertyName());
   HInstruction* instr = NULL;
   if (expr->IsMonomorphic()) {
-    instr = BuildMonomorphicElementAccess(obj, key, val, expr, is_store);
+    Handle<Map> map = expr->GetMonomorphicReceiverType();
+    AddInstruction(new(zone()) HCheckNonSmi(obj));
+    instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store);
   } else if (expr->GetReceiverTypes() != NULL &&
              !expr->GetReceiverTypes()->is_empty()) {
     return HandlePolymorphicElementAccess(
@@ -4269,7 +4315,7 @@
                          object,
                          key,
                          value,
-                         function_strict_mode());
+                         function_strict_mode_flag());
 }
 
 bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
@@ -4511,7 +4557,7 @@
 }
 
 
-bool HGraphBuilder::TryInline(Call* expr) {
+bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
   if (!FLAG_use_inlining) return false;
 
   // The function call we are inlining is a method call if the call
@@ -4539,9 +4585,9 @@
     return false;
   }
 
-  CompilationInfo* outer_info = info();
 #if !defined(V8_TARGET_ARCH_IA32)
   // Target must be able to use caller's context.
+  CompilationInfo* outer_info = info();
   if (target->context() != outer_info->closure()->context() ||
       outer_info->scope()->contains_with() ||
       outer_info->scope()->num_heap_slots() > 0) {
@@ -4555,9 +4601,7 @@
   HEnvironment* env = environment();
   int current_level = 1;
   while (env->outer() != NULL) {
-    if (current_level == (FLAG_limit_inlining
-                          ? Compiler::kMaxInliningLevels
-                          : 2 * Compiler::kMaxInliningLevels)) {
+    if (current_level == Compiler::kMaxInliningLevels) {
       TraceInline(target, caller, "inline depth limit reached");
       return false;
     }
@@ -4566,9 +4610,13 @@
   }
 
   // Don't inline recursive functions.
-  if (*target_shared == outer_info->closure()->shared()) {
-    TraceInline(target, caller, "target is recursive");
-    return false;
+  for (FunctionState* state = function_state();
+       state != NULL;
+       state = state->outer()) {
+    if (state->compilation_info()->closure()->shared() == *target_shared) {
+      TraceInline(target, caller, "target is recursive");
+      return false;
+    }
   }
 
   // We don't want to add more than a certain number of nodes from inlining.
@@ -4665,7 +4713,10 @@
       Handle<Code>(target_shared->code()),
       Handle<Context>(target->context()->global_context()),
       isolate());
-  FunctionState target_state(this, &target_info, &target_oracle);
+  // The function state is new-allocated because we need to delete it
+  // in two different places.
+  FunctionState* target_state =
+      new FunctionState(this, &target_info, &target_oracle, drop_extra);
 
   HConstant* undefined = graph()->GetConstantUndefined();
   HEnvironment* inner_env =
@@ -4699,6 +4750,7 @@
     TraceInline(target, caller, "inline graph construction failed");
     target_shared->DisableOptimization(*target);
     inline_bailout_ = true;
+    delete target_state;
     return true;
   }
 
@@ -4714,9 +4766,11 @@
       ASSERT(function_return() != NULL);
       ASSERT(call_context()->IsEffect() || call_context()->IsValue());
       if (call_context()->IsEffect()) {
-        current_block()->Goto(function_return());
+        current_block()->Goto(function_return(), drop_extra);
       } else {
-        current_block()->AddLeaveInlined(undefined, function_return());
+        current_block()->AddLeaveInlined(undefined,
+                                         function_return(),
+                                         drop_extra);
       }
     } else {
       // The graph builder assumes control can reach both branches of a
@@ -4724,13 +4778,14 @@
       // simply jumping to the false target.
       //
       // TODO(3168478): refactor to avoid this.
+      ASSERT(call_context()->IsTest());
       HBasicBlock* empty_true = graph()->CreateBasicBlock();
       HBasicBlock* empty_false = graph()->CreateBasicBlock();
       HBranch* test = new(zone()) HBranch(undefined, empty_true, empty_false);
       current_block()->Finish(test);
 
-      empty_true->Goto(inlined_test_context()->if_true());
-      empty_false->Goto(inlined_test_context()->if_false());
+      empty_true->Goto(inlined_test_context()->if_true(), drop_extra);
+      empty_false->Goto(inlined_test_context()->if_false(), drop_extra);
     }
   }
 
@@ -4742,19 +4797,21 @@
     // Pop the return test context from the expression context stack.
     ASSERT(ast_context() == inlined_test_context());
     ClearInlinedTestContext();
+    delete target_state;
 
     // Forward to the real test context.
     if (if_true->HasPredecessor()) {
       if_true->SetJoinId(expr->id());
       HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
-      if_true->Goto(true_target);
+      if_true->Goto(true_target, function_state()->drop_extra());
     }
     if (if_false->HasPredecessor()) {
       if_false->SetJoinId(expr->id());
       HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
-      if_false->Goto(false_target);
+      if_false->Goto(false_target, function_state()->drop_extra());
     }
     set_current_block(NULL);
+    return true;
 
   } else if (function_return()->HasPredecessor()) {
     function_return()->SetJoinId(expr->id());
@@ -4762,7 +4819,7 @@
   } else {
     set_current_block(NULL);
   }
-
+  delete target_state;
   return true;
 }
 
@@ -5014,7 +5071,7 @@
       // If there is a global property cell for the name at compile time and
       // access check is not enabled we assume that the function will not change
       // and generate optimized code for calling the function.
-      LookupResult lookup;
+      LookupResult lookup(isolate());
       GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false);
       if (type == kUseCell &&
           !info()->global_object()->IsAccessCheckNeeded()) {
@@ -5069,32 +5126,17 @@
       PushAndAdd(receiver);
       CHECK_ALIVE(VisitExpressions(expr->arguments()));
       AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
-      if (TryInline(expr)) {
-        // The function is lingering in the deoptimization environment.
-        // Handle it by case analysis on the AST context.
-        if (ast_context()->IsEffect()) {
-          Drop(1);
-        } else if (ast_context()->IsValue()) {
-          HValue* result = Pop();
-          Drop(1);
-          Push(result);
-        } else if (ast_context()->IsTest()) {
-          TestContext* context = TestContext::cast(ast_context());
-          if (context->if_true()->HasPredecessor()) {
-            context->if_true()->last_environment()->Drop(1);
-          }
-          if (context->if_false()->HasPredecessor()) {
-            context->if_true()->last_environment()->Drop(1);
-          }
-        } else {
-          UNREACHABLE();
-        }
+      if (TryInline(expr, true)) {   // Drop function from environment.
         return;
       } else {
         call = PreProcessCall(new(zone()) HInvokeFunction(context,
                                                           function,
                                                           argument_count));
+        call->set_position(expr->position());
+        AddInstruction(call);
+        AddSimulate(expr->id());
         Drop(1);  // The function.
+        return ast_context()->ReturnValue(call);
       }
 
     } else {
@@ -5304,7 +5346,6 @@
 
 
 void HGraphBuilder::VisitNot(UnaryOperation* expr) {
-  // TODO(svenpanne) Perhaps a switch/virtual function is nicer here.
   if (ast_context()->IsTest()) {
     TestContext* context = TestContext::cast(ast_context());
     VisitForControl(expr->expression(),
@@ -5791,38 +5832,68 @@
 
 
 void HGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
-                                               Expression* sub_expr,
+                                               HTypeof* typeof_expr,
                                                Handle<String> check) {
-  CHECK_ALIVE(VisitForTypeOf(sub_expr));
-  HValue* value = Pop();
+  // Note: The HTypeof itself is removed during canonicalization, if possible.
+  HValue* value = typeof_expr->value();
   HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check);
   instr->set_position(expr->position());
   return ast_context()->ReturnControl(instr, expr->id());
 }
 
 
-bool HGraphBuilder::TryLiteralCompare(CompareOperation* expr) {
-  Expression *sub_expr;
-  Handle<String> check;
-  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
-    HandleLiteralCompareTypeof(expr, sub_expr, check);
+static bool MatchLiteralCompareNil(HValue* left,
+                                   Token::Value op,
+                                   HValue* right,
+                                   Handle<Object> nil,
+                                   HValue** expr) {
+  if (left->IsConstant() &&
+      HConstant::cast(left)->handle().is_identical_to(nil) &&
+      Token::IsEqualityOp(op)) {
+    *expr = right;
     return true;
   }
-
-  if (expr->IsLiteralCompareUndefined(&sub_expr)) {
-    HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
-    return true;
-  }
-
-  if (expr->IsLiteralCompareNull(&sub_expr)) {
-    HandleLiteralCompareNil(expr, sub_expr, kNullValue);
-    return true;
-  }
-
   return false;
 }
 
 
+static bool MatchLiteralCompareTypeof(HValue* left,
+                                      Token::Value op,
+                                      HValue* right,
+                                      HTypeof** typeof_expr,
+                                      Handle<String>* check) {
+  if (left->IsTypeof() &&
+      Token::IsEqualityOp(op) &&
+      right->IsConstant() &&
+      HConstant::cast(right)->HasStringValue()) {
+    *typeof_expr = HTypeof::cast(left);
+    *check = Handle<String>::cast(HConstant::cast(right)->handle());
+    return true;
+  }
+  return false;
+}
+
+
+static bool IsLiteralCompareTypeof(HValue* left,
+                                   Token::Value op,
+                                   HValue* right,
+                                   HTypeof** typeof_expr,
+                                   Handle<String>* check) {
+  return MatchLiteralCompareTypeof(left, op, right, typeof_expr, check) ||
+      MatchLiteralCompareTypeof(right, op, left, typeof_expr, check);
+}
+
+
+static bool IsLiteralCompareNil(HValue* left,
+                                Token::Value op,
+                                HValue* right,
+                                Handle<Object> nil,
+                                HValue** expr) {
+  return MatchLiteralCompareNil(left, op, right, nil, expr) ||
+      MatchLiteralCompareNil(right, op, left, nil, expr);
+}
+
+
 void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
@@ -5840,11 +5911,9 @@
     return ast_context()->ReturnControl(instr, expr->id());
   }
 
-  // Check for special cases that compare against literals.
-  if (TryLiteralCompare(expr)) return;
-
   TypeInfo type_info = oracle()->CompareType(expr);
   // Check if this expression was ever executed according to type feedback.
+  // Note that for the special typeof/null/undefined cases we get unknown here.
   if (type_info.IsUninitialized()) {
     AddInstruction(new(zone()) HSoftDeoptimize);
     current_block()->MarkAsDeoptimizing();
@@ -5859,6 +5928,20 @@
   HValue* left = Pop();
   Token::Value op = expr->op();
 
+  HTypeof* typeof_expr = NULL;
+  Handle<String> check;
+  if (IsLiteralCompareTypeof(left, op, right, &typeof_expr, &check)) {
+    return HandleLiteralCompareTypeof(expr, typeof_expr, check);
+  }
+  HValue* sub_expr = NULL;
+  Factory* f = graph()->isolate()->factory();
+  if (IsLiteralCompareNil(left, op, right, f->undefined_value(), &sub_expr)) {
+    return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
+  }
+  if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) {
+    return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
+  }
+
   if (op == Token::INSTANCEOF) {
     // Check to see if the rhs of the instanceof is a global function not
     // residing in new space. If it is we assume that the function will stay the
@@ -5871,7 +5954,7 @@
         !info()->global_object()->IsAccessCheckNeeded()) {
       Handle<String> name = proxy->name();
       Handle<GlobalObject> global(info()->global_object());
-      LookupResult lookup;
+      LookupResult lookup(isolate());
       global->Lookup(*name, &lookup);
       if (lookup.IsProperty() &&
           lookup.type() == NORMAL &&
@@ -5947,13 +6030,11 @@
 
 
 void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
-                                            Expression* sub_expr,
+                                            HValue* value,
                                             NilValue nil) {
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
   ASSERT(current_block()->HasPredecessor());
-  CHECK_ALIVE(VisitForValue(sub_expr));
-  HValue* value = Pop();
   EqualityKind kind =
       expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality;
   HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil);
@@ -5966,7 +6047,8 @@
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
   ASSERT(current_block()->HasPredecessor());
-  HThisFunction* self = new(zone()) HThisFunction;
+  HThisFunction* self = new(zone()) HThisFunction(
+      function_state()->compilation_info()->closure());
   return ast_context()->ReturnInstruction(self, expr->id());
 }
 
@@ -5979,7 +6061,9 @@
 void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
                                       VariableMode mode,
                                       FunctionLiteral* function) {
-  if (mode == LET) return Bailout("unsupported let declaration");
+  if (mode == LET || mode == CONST_HARMONY) {
+    return Bailout("unsupported harmony declaration");
+  }
   Variable* var = proxy->var();
   switch (var->location()) {
     case Variable::UNALLOCATED:
diff --git a/src/hydrogen.h b/src/hydrogen.h
index b66042c..2d08dc8 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -121,7 +121,7 @@
 
   void Finish(HControlInstruction* last);
   void FinishExit(HControlInstruction* instruction);
-  void Goto(HBasicBlock* block);
+  void Goto(HBasicBlock* block, bool drop_extra = false);
 
   int PredecessorIndexOf(HBasicBlock* predecessor) const;
   void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
@@ -133,7 +133,9 @@
 
   // Add the inlined function exit sequence, adding an HLeaveInlined
   // instruction and updating the bailout environment.
-  void AddLeaveInlined(HValue* return_value, HBasicBlock* target);
+  void AddLeaveInlined(HValue* return_value,
+                       HBasicBlock* target,
+                       bool drop_extra = false);
 
   // If a target block is tagged as an inline function return, all
   // predecessors should contain the inlined exit sequence:
@@ -603,16 +605,18 @@
 };
 
 
-class FunctionState BASE_EMBEDDED {
+class FunctionState {
  public:
   FunctionState(HGraphBuilder* owner,
                 CompilationInfo* info,
-                TypeFeedbackOracle* oracle);
+                TypeFeedbackOracle* oracle,
+                bool drop_extra);
   ~FunctionState();
 
   CompilationInfo* compilation_info() { return compilation_info_; }
   TypeFeedbackOracle* oracle() { return oracle_; }
   AstContext* call_context() { return call_context_; }
+  bool drop_extra() { return drop_extra_; }
   HBasicBlock* function_return() { return function_return_; }
   TestContext* test_context() { return test_context_; }
   void ClearInlinedTestContext() {
@@ -632,6 +636,10 @@
   // inlined. NULL when not inlining.
   AstContext* call_context_;
 
+  // Indicate if we have to drop an extra value from the environment on
+  // return from inlined functions.
+  bool drop_extra_;
+
   // When inlining in an effect of value context, this is the return block.
   // It is NULL otherwise.  When inlining in a test context, there are a
   // pair of return blocks in the context.  When not inlining, there is no
@@ -728,6 +736,8 @@
 
   TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
 
+  FunctionState* function_state() const { return function_state_; }
+
  private:
   // Type of a member function that generates inline code for a native function.
   typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call);
@@ -746,7 +756,6 @@
   static const int kMaxSourceSize = 600;
 
   // Simple accessors.
-  FunctionState* function_state() const { return function_state_; }
   void set_function_state(FunctionState* state) { function_state_ = state; }
 
   AstContext* ast_context() const { return ast_context_; }
@@ -769,8 +778,8 @@
   void ClearInlinedTestContext() {
     function_state()->ClearInlinedTestContext();
   }
-  bool function_strict_mode() {
-    return function_state()->compilation_info()->is_strict_mode();
+  StrictModeFlag function_strict_mode_flag() {
+    return function_state()->compilation_info()->strict_mode_flag();
   }
 
   // Generators for inline runtime functions.
@@ -883,7 +892,7 @@
   // Try to optimize fun.apply(receiver, arguments) pattern.
   bool TryCallApply(Call* expr);
 
-  bool TryInline(Call* expr);
+  bool TryInline(Call* expr, bool drop_extra = false);
   bool TryInlineBuiltinFunction(Call* expr,
                                 HValue* receiver,
                                 Handle<Map> receiver_map,
@@ -912,12 +921,11 @@
                                   HValue* receiver,
                                   SmallMapList* types,
                                   Handle<String> name);
-  bool TryLiteralCompare(CompareOperation* expr);
   void HandleLiteralCompareTypeof(CompareOperation* expr,
-                                  Expression* sub_expr,
+                                  HTypeof* typeof_expr,
                                   Handle<String> check);
   void HandleLiteralCompareNil(CompareOperation* expr,
-                               Expression* sub_expr,
+                               HValue* value,
                                NilValue nil);
 
   HStringCharCodeAt* BuildStringCharCodeAt(HValue* context,
@@ -951,7 +959,7 @@
   HInstruction* BuildMonomorphicElementAccess(HValue* object,
                                               HValue* key,
                                               HValue* val,
-                                              Expression* expr,
+                                              Handle<Map> map,
                                               bool is_store);
   HValue* HandlePolymorphicElementAccess(HValue* object,
                                          HValue* key,
diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h
index 446aa3e..2e9fcb6 100644
--- a/src/ia32/assembler-ia32-inl.h
+++ b/src/ia32/assembler-ia32-inl.h
@@ -88,10 +88,10 @@
 }
 
 
-void RelocInfo::set_target_address(Address target) {
+void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
   Assembler::set_target_address_at(pc_, target);
   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
-  if (host() != NULL && IsCodeTarget(rmode_)) {
+  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
     Object* target_code = Code::GetCodeFromTargetAddress(target);
     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
         host(), this, HeapObject::cast(target_code));
@@ -117,11 +117,13 @@
 }
 
 
-void RelocInfo::set_target_object(Object* target) {
+void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   Memory::Object_at(pc_) = target;
   CPU::FlushICache(pc_, sizeof(Address));
-  if (host() != NULL && target->IsHeapObject()) {
+  if (mode == UPDATE_WRITE_BARRIER &&
+      host() != NULL &&
+      target->IsHeapObject()) {
     host()->GetHeap()->incremental_marking()->RecordWrite(
         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
   }
@@ -151,12 +153,13 @@
 }
 
 
-void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) {
+void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
+                                WriteBarrierMode mode) {
   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
   Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
   Memory::Address_at(pc_) = address;
   CPU::FlushICache(pc_, sizeof(Address));
-  if (host() != NULL) {
+  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
     // TODO(1550) We are passing NULL as a slot because cell can never be on
     // evacuation candidate.
     host()->GetHeap()->incremental_marking()->RecordWrite(
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index 53ade3a..70e342d 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -915,10 +915,6 @@
 }
 
 
-// Number of empty elements to allocate for an empty array.
-static const int kPreallocatedArrayElements = 4;
-
-
 // Allocate an empty JSArray. The allocated array is put into the result
 // register. If the parameter initial_capacity is larger than zero an elements
 // backing store is allocated with this size and filled with the hole values.
@@ -929,10 +925,9 @@
                                  Register scratch1,
                                  Register scratch2,
                                  Register scratch3,
-                                 int initial_capacity,
                                  Label* gc_required) {
-  ASSERT(initial_capacity >= 0);
-
+  const int initial_capacity = JSArray::kPreallocatedArrayElements;
+  STATIC_ASSERT(initial_capacity >= 0);
   // Load the initial map from the array function.
   __ mov(scratch1, FieldOperand(array_function,
                                 JSFunction::kPrototypeOrInitialMapOffset));
@@ -990,7 +985,6 @@
   // Fill the FixedArray with the hole value. Inline the code if short.
   // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
   static const int kLoopUnfoldLimit = 4;
-  STATIC_ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
   if (initial_capacity <= kLoopUnfoldLimit) {
     // Use a scratch register here to have only one reloc info when unfolding
     // the loop.
@@ -1153,7 +1147,6 @@
                        ebx,
                        ecx,
                        edi,
-                       kPreallocatedArrayElements,
                        &prepare_generic_code_call);
   __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1);
   __ pop(ebx);
@@ -1182,7 +1175,7 @@
     __ mov(eax, Operand(esp, i * kPointerSize));
     __ mov(Operand(esp, (i + 1) * kPointerSize), eax);
   }
-  __ add(esp, Immediate(2 * kPointerSize));  // Drop two stack slots.
+  __ Drop(2);  // Drop two stack slots.
   __ push(Immediate(0));  // Treat this as a call with argc of zero.
   __ jmp(&empty_array);
 
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 1e886e2..d7d1d9c 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -34,6 +34,7 @@
 #include "isolate.h"
 #include "jsregexp.h"
 #include "regexp-macro-assembler.h"
+#include "stub-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -238,7 +239,12 @@
   // [esp + (3 * kPointerSize)]: literals array.
 
   // All sizes here are multiples of kPointerSize.
-  int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
+  int elements_size = 0;
+  if (length_ > 0) {
+    elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
+        ? FixedDoubleArray::SizeFor(length_)
+        : FixedArray::SizeFor(length_);
+  }
   int size = JSArray::kSize + elements_size;
 
   // Load boilerplate object into ecx and check if we need to create a
@@ -261,6 +267,9 @@
     if (mode_ == CLONE_ELEMENTS) {
       message = "Expected (writable) fixed array";
       expected_map = factory->fixed_array_map();
+    } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
+      message = "Expected (writable) fixed double array";
+      expected_map = factory->fixed_double_array_map();
     } else {
       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
       message = "Expected copy-on-write fixed array";
@@ -293,9 +302,24 @@
     __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
 
     // Copy the elements array.
-    for (int i = 0; i < elements_size; i += kPointerSize) {
-      __ mov(ebx, FieldOperand(ecx, i));
-      __ mov(FieldOperand(edx, i), ebx);
+    if (mode_ == CLONE_ELEMENTS) {
+      for (int i = 0; i < elements_size; i += kPointerSize) {
+        __ mov(ebx, FieldOperand(ecx, i));
+        __ mov(FieldOperand(edx, i), ebx);
+      }
+    } else {
+      ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS);
+      int i;
+      for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
+        __ mov(ebx, FieldOperand(ecx, i));
+        __ mov(FieldOperand(edx, i), ebx);
+      }
+      while (i < elements_size) {
+        __ fld_d(FieldOperand(ecx, i));
+        __ fstp_d(FieldOperand(edx, i));
+        i += kDoubleSize;
+      }
+      ASSERT(i == elements_size);
     }
   }
 
@@ -3858,11 +3882,11 @@
   Register scratch = scratch2;
 
   // Load the number string cache.
-  ExternalReference roots_address =
-      ExternalReference::roots_address(masm->isolate());
+  ExternalReference roots_array_start =
+      ExternalReference::roots_array_start(masm->isolate());
   __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
   __ mov(number_string_cache,
-         Operand::StaticArray(scratch, times_pointer_size, roots_address));
+         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
   // Make the hash mask from the length of the number string cache. It
   // contains two elements (number and string) for each cache entry.
   __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
@@ -4830,8 +4854,8 @@
   static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
   static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
 
-  ExternalReference roots_address =
-      ExternalReference::roots_address(masm->isolate());
+  ExternalReference roots_array_start =
+      ExternalReference::roots_array_start(masm->isolate());
 
   ASSERT_EQ(object.code(), InstanceofStub::left().code());
   ASSERT_EQ(function.code(), InstanceofStub::right().code());
@@ -4853,22 +4877,23 @@
     // Look up the function and the map in the instanceof cache.
     Label miss;
     __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
-    __ cmp(function,
-           Operand::StaticArray(scratch, times_pointer_size, roots_address));
+    __ cmp(function, Operand::StaticArray(scratch,
+                                          times_pointer_size,
+                                          roots_array_start));
     __ j(not_equal, &miss, Label::kNear);
     __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
     __ cmp(map, Operand::StaticArray(
-        scratch, times_pointer_size, roots_address));
+        scratch, times_pointer_size, roots_array_start));
     __ j(not_equal, &miss, Label::kNear);
     __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
     __ mov(eax, Operand::StaticArray(
-        scratch, times_pointer_size, roots_address));
+        scratch, times_pointer_size, roots_array_start));
     __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
     __ bind(&miss);
   }
 
   // Get the prototype of the function.
-  __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
+  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
 
   // Check that the function prototype is a JS object.
   __ JumpIfSmi(prototype, &slow);
@@ -4878,9 +4903,10 @@
   // map and function. The cached answer will be set when it is known below.
   if (!HasCallSiteInlineCheck()) {
   __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
-  __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
+  __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
+         map);
   __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
-  __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
+  __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
          function);
   } else {
     // The constants for the code patching are based on no push instructions
@@ -4917,7 +4943,7 @@
     __ Set(eax, Immediate(0));
     __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
     __ mov(Operand::StaticArray(scratch,
-                                times_pointer_size, roots_address), eax);
+                                times_pointer_size, roots_array_start), eax);
   } else {
     // Get return address and delta to inlined map check.
     __ mov(eax, factory->true_value());
@@ -4939,7 +4965,7 @@
     __ Set(eax, Immediate(Smi::FromInt(1)));
     __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
     __ mov(Operand::StaticArray(
-        scratch, times_pointer_size, roots_address), eax);
+        scratch, times_pointer_size, roots_array_start), eax);
   } else {
     // Get return address and delta to inlined map check.
     __ mov(eax, factory->false_value());
@@ -5728,11 +5754,11 @@
 
   // Load the symbol table.
   Register symbol_table = c2;
-  ExternalReference roots_address =
-      ExternalReference::roots_address(masm->isolate());
+  ExternalReference roots_array_start =
+      ExternalReference::roots_array_start(masm->isolate());
   __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
   __ mov(symbol_table,
-         Operand::StaticArray(scratch, times_pointer_size, roots_address));
+         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
 
   // Calculate capacity mask from the symbol table capacity.
   Register mask = scratch2;
@@ -6515,7 +6541,67 @@
 // must always call a backup property check that is complete.
 // This function is safe to call if the receiver has fast properties.
 // Name must be a symbol and receiver must be a heap object.
-MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
+void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
+                                                        Label* miss,
+                                                        Label* done,
+                                                        Register properties,
+                                                        Handle<String> name,
+                                                        Register r0) {
+  ASSERT(name->IsSymbol());
+
+  // If names of slots in range from 1 to kProbes - 1 for the hash value are
+  // not equal to the name and kProbes-th slot is not used (its name is the
+  // undefined value), it guarantees the hash table doesn't contain the
+  // property. It's true even if some slots represent deleted properties
+  // (their names are the null value).
+  for (int i = 0; i < kInlinedProbes; i++) {
+    // Compute the masked index: (hash + i + i * i) & mask.
+    Register index = r0;
+    // Capacity is smi 2^n.
+    __ mov(index, FieldOperand(properties, kCapacityOffset));
+    __ dec(index);
+    __ and_(index,
+            Immediate(Smi::FromInt(name->Hash() +
+                                   StringDictionary::GetProbeOffset(i))));
+
+    // Scale the index by multiplying by the entry size.
+    ASSERT(StringDictionary::kEntrySize == 3);
+    __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
+    Register entity_name = r0;
+    // Having undefined at this place means the name is not contained.
+    ASSERT_EQ(kSmiTagSize, 1);
+    __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
+                                kElementsStartOffset - kHeapObjectTag));
+    __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
+    __ j(equal, done);
+
+    // Stop if found the property.
+    __ cmp(entity_name, Handle<String>(name));
+    __ j(equal, miss);
+
+    // Check if the entry name is not a symbol.
+    __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
+    __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
+              kIsSymbolMask);
+    __ j(zero, miss);
+  }
+
+  StringDictionaryLookupStub stub(properties,
+                                  r0,
+                                  r0,
+                                  StringDictionaryLookupStub::NEGATIVE_LOOKUP);
+  __ push(Immediate(Handle<Object>(name)));
+  __ push(Immediate(name->Hash()));
+  __ CallStub(&stub);
+  __ test(r0, r0);
+  __ j(not_zero, miss);
+  __ jmp(done);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
     MacroAssembler* masm,
     Label* miss,
     Label* done,
@@ -6749,6 +6835,13 @@
   { ebx, edx, ecx, EMIT_REMEMBERED_SET},
   // KeyedStoreStubCompiler::GenerateStoreFastElement.
   { edi, edx, ecx, EMIT_REMEMBERED_SET},
+  // ElementsTransitionGenerator::GenerateSmiOnlyToObject
+  // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+  // and ElementsTransitionGenerator::GenerateDoubleToObject
+  { edx, ebx, edi, EMIT_REMEMBERED_SET},
+  // ElementsTransitionGenerator::GenerateDoubleToObject
+  { eax, edx, esi, EMIT_REMEMBERED_SET},
+  { edx, eax, edi, EMIT_REMEMBERED_SET},
   // Null termination.
   { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
 };
@@ -6991,7 +7084,6 @@
   // Fall through when we need to inform the incremental marker.
 }
 
-
 #undef __
 
 } }  // namespace v8::internal
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index 2a7d316..8775344 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -421,7 +421,16 @@
 
   void Generate(MacroAssembler* masm);
 
-  MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup(
+  static void GenerateNegativeLookup(MacroAssembler* masm,
+                                     Label* miss,
+                                     Label* done,
+                                     Register properties,
+                                     Handle<String> name,
+                                     Register r0);
+
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
+  MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup(
       MacroAssembler* masm,
       Label* miss,
       Label* done,
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index f901b6f..7dc302b 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -30,6 +30,7 @@
 #if defined(V8_TARGET_ARCH_IA32)
 
 #include "codegen.h"
+#include "macro-assembler.h"
 
 namespace v8 {
 namespace internal {
@@ -265,6 +266,263 @@
 
 #undef __
 
+// -------------------------------------------------------------------------
+// Code generators
+
+#define __ ACCESS_MASM(masm)
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- eax    : value
+  //  -- ebx    : target map
+  //  -- ecx    : key
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+  // Set transitioned map.
+  __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
+  __ RecordWriteField(edx,
+                      HeapObject::kMapOffset,
+                      ebx,
+                      edi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+}
+
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- eax    : value
+  //  -- ebx    : target map
+  //  -- ecx    : key
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+  Label loop, entry, convert_hole, gc_required;
+  __ push(eax);
+  __ push(ebx);
+
+  __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+  __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
+
+  // Allocate new FixedDoubleArray.
+  // edx: receiver
+  // edi: length of source FixedArray (smi-tagged)
+  __ lea(esi, Operand(edi, times_4, FixedDoubleArray::kHeaderSize));
+  __ AllocateInNewSpace(esi, eax, ebx, no_reg, &gc_required, TAG_OBJECT);
+
+  // eax: destination FixedDoubleArray
+  // edi: number of elements
+  // edx: receiver
+  __ mov(FieldOperand(eax, HeapObject::kMapOffset),
+         Immediate(masm->isolate()->factory()->fixed_double_array_map()));
+  __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
+  __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
+  // Replace receiver's backing store with newly created FixedDoubleArray.
+  __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
+  __ mov(ebx, eax);
+  __ RecordWriteField(edx,
+                      JSObject::kElementsOffset,
+                      ebx,
+                      edi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+
+  __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
+
+  // Prepare for conversion loop.
+  ExternalReference canonical_the_hole_nan_reference =
+      ExternalReference::address_of_the_hole_nan();
+  XMMRegister the_hole_nan = xmm1;
+  if (CpuFeatures::IsSupported(SSE2)) {
+    CpuFeatures::Scope use_sse2(SSE2);
+    __ movdbl(the_hole_nan,
+              Operand::StaticVariable(canonical_the_hole_nan_reference));
+  }
+  __ jmp(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  // Restore registers before jumping into runtime.
+  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+  __ pop(ebx);
+  __ pop(eax);
+  __ jmp(fail);
+
+  // Convert and copy elements
+  // esi: source FixedArray
+  // edi: number of elements to convert/copy
+  __ bind(&loop);
+  __ sub(edi, Immediate(Smi::FromInt(1)));
+  __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
+  // ebx: current element from source
+  // edi: index of current element
+  __ JumpIfNotSmi(ebx, &convert_hole);
+
+  // Normal smi, convert it to double and store.
+  __ SmiUntag(ebx);
+  if (CpuFeatures::IsSupported(SSE2)) {
+    CpuFeatures::Scope fscope(SSE2);
+    __ cvtsi2sd(xmm0, ebx);
+    __ movdbl(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
+              xmm0);
+  } else {
+    __ push(ebx);
+    __ fild_s(Operand(esp, 0));
+    __ pop(ebx);
+    __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
+  }
+  __ jmp(&entry);
+
+  // Found hole, store hole_nan_as_double instead.
+  __ bind(&convert_hole);
+  if (CpuFeatures::IsSupported(SSE2)) {
+    CpuFeatures::Scope use_sse2(SSE2);
+    __ movdbl(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
+              the_hole_nan);
+  } else {
+    __ fld_d(Operand::StaticVariable(canonical_the_hole_nan_reference));
+    __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
+  }
+
+  __ bind(&entry);
+  __ test(edi, edi);
+  __ j(not_zero, &loop);
+
+  __ pop(ebx);
+  __ pop(eax);
+  // eax: value
+  // ebx: target map
+  // Set transitioned map.
+  __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
+  __ RecordWriteField(edx,
+                      HeapObject::kMapOffset,
+                      ebx,
+                      edi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Restore esi.
+  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+}
+
+
+void ElementsTransitionGenerator::GenerateDoubleToObject(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- eax    : value
+  //  -- ebx    : target map
+  //  -- ecx    : key
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+  Label loop, entry, convert_hole, gc_required;
+  __ push(eax);
+  __ push(edx);
+  __ push(ebx);
+
+  __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+  __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
+
+  // Allocate new FixedArray.
+  // ebx: length of source FixedDoubleArray (smi-tagged)
+  __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
+  __ AllocateInNewSpace(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
+
+  // eax: destination FixedArray
+  // ebx: number of elements
+  __ mov(FieldOperand(eax, HeapObject::kMapOffset),
+         Immediate(masm->isolate()->factory()->fixed_array_map()));
+  __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
+  __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+
+  __ jmp(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+  __ pop(ebx);
+  __ pop(edx);
+  __ pop(eax);
+  __ jmp(fail);
+
+  // Box doubles into heap numbers.
+  // edi: source FixedDoubleArray
+  // eax: destination FixedArray
+  __ bind(&loop);
+  __ sub(ebx, Immediate(Smi::FromInt(1)));
+  // ebx: index of current element (smi-tagged)
+  uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
+  __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
+  __ j(equal, &convert_hole);
+
+  // Non-hole double, copy value into a heap number.
+  __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
+  // edx: new heap number
+  if (CpuFeatures::IsSupported(SSE2)) {
+    CpuFeatures::Scope fscope(SSE2);
+    __ movdbl(xmm0,
+              FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
+    __ movdbl(FieldOperand(edx, HeapNumber::kValueOffset), xmm0);
+  } else {
+    __ mov(esi, FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
+    __ mov(FieldOperand(edx, HeapNumber::kValueOffset), esi);
+    __ mov(esi, FieldOperand(edi, ebx, times_4, offset));
+    __ mov(FieldOperand(edx, HeapNumber::kValueOffset + kPointerSize), esi);
+  }
+  __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
+  __ mov(esi, ebx);
+  __ RecordWriteArray(eax,
+                      edx,
+                      esi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  __ jmp(&entry, Label::kNear);
+
+  // Replace the-hole NaN with the-hole pointer.
+  __ bind(&convert_hole);
+  __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
+         masm->isolate()->factory()->the_hole_value());
+
+  __ bind(&entry);
+  __ test(ebx, ebx);
+  __ j(not_zero, &loop);
+
+  __ pop(ebx);
+  __ pop(edx);
+  // ebx: target map
+  // edx: receiver
+  // Set transitioned map.
+  __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
+  __ RecordWriteField(edx,
+                      HeapObject::kMapOffset,
+                      ebx,
+                      edi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Replace receiver's backing store with newly created and filled FixedArray.
+  __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
+  __ RecordWriteField(edx,
+                      JSObject::kElementsOffset,
+                      eax,
+                      edi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+
+  // Restore registers.
+  __ pop(eax);
+  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+}
+
+#undef __
+
 } }  // namespace v8::internal
 
 #endif  // V8_TARGET_ARCH_IA32
diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc
index 02cc4eb..db6c16b 100644
--- a/src/ia32/deoptimizer-ia32.cc
+++ b/src/ia32/deoptimizer-ia32.cc
@@ -258,16 +258,13 @@
   Assembler::set_target_address_at(call_target_address,
                                    replacement_code->entry());
 
-  RelocInfo rinfo(call_target_address,
-                  RelocInfo::CODE_TARGET,
-                  0,
-                  unoptimized_code);
-  unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode(
-      unoptimized_code, &rinfo, replacement_code);
+  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+      unoptimized_code, call_target_address, replacement_code);
 }
 
 
-void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
+void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
+                                         Address pc_after,
                                          Code* check_code,
                                          Code* replacement_code) {
   Address call_target_address = pc_after - kIntSize;
@@ -283,8 +280,8 @@
   Assembler::set_target_address_at(call_target_address,
                                    check_code->entry());
 
-  check_code->GetHeap()->incremental_marking()->
-      RecordCodeTargetPatch(call_target_address, check_code);
+  check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+      unoptimized_code, call_target_address, check_code);
 }
 
 
diff --git a/src/ia32/disasm-ia32.cc b/src/ia32/disasm-ia32.cc
index 04edc5f..da22390 100644
--- a/src/ia32/disasm-ia32.cc
+++ b/src/ia32/disasm-ia32.cc
@@ -179,6 +179,10 @@
  public:
   InstructionTable();
   const InstructionDesc& Get(byte x) const { return instructions_[x]; }
+  static InstructionTable* get_instance() {
+    static InstructionTable table;
+    return &table;
+  }
 
  private:
   InstructionDesc instructions_[256];
@@ -259,15 +263,13 @@
 }
 
 
-static InstructionTable instruction_table;
-
-
 // The IA32 disassembler implementation.
 class DisassemblerIA32 {
  public:
   DisassemblerIA32(const NameConverter& converter,
                    bool abort_on_unimplemented = true)
       : converter_(converter),
+        instruction_table_(InstructionTable::get_instance()),
         tmp_buffer_pos_(0),
         abort_on_unimplemented_(abort_on_unimplemented) {
     tmp_buffer_[0] = '\0';
@@ -281,11 +283,11 @@
 
  private:
   const NameConverter& converter_;
+  InstructionTable* instruction_table_;
   v8::internal::EmbeddedVector<char, 128> tmp_buffer_;
   unsigned int tmp_buffer_pos_;
   bool abort_on_unimplemented_;
 
-
   enum {
     eax = 0,
     ecx = 1,
@@ -884,7 +886,7 @@
   }
   bool processed = true;  // Will be set to false if the current instruction
                           // is not in 'instructions' table.
-  const InstructionDesc& idesc = instruction_table.Get(*data);
+  const InstructionDesc& idesc = instruction_table_->Get(*data);
   switch (idesc.type) {
     case ZERO_OPERANDS_INSTR:
       AppendToBuffer(idesc.mnem);
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 33d5cab..de5dc06 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -266,7 +266,10 @@
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
         int ignored = 0;
-        EmitDeclaration(scope()->function(), CONST, NULL, &ignored);
+        VariableProxy* proxy = scope()->function();
+        ASSERT(proxy->var()->mode() == CONST ||
+               proxy->var()->mode() == CONST_HARMONY);
+        EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -711,6 +714,8 @@
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
   Variable* variable = proxy->var();
+  bool binding_needs_init =
+      mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
       ++(*global_count);
@@ -722,7 +727,7 @@
         Comment cmnt(masm_, "[ Declaration");
         VisitForAccumulatorValue(function);
         __ mov(StackOperand(variable), result_register());
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         Comment cmnt(masm_, "[ Declaration");
         __ mov(StackOperand(variable),
                Immediate(isolate()->factory()->the_hole_value()));
@@ -754,7 +759,7 @@
                                   EMIT_REMEMBERED_SET,
                                   OMIT_SMI_CHECK);
         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         Comment cmnt(masm_, "[ Declaration");
         __ mov(ContextOperand(esi, variable->index()),
                Immediate(isolate()->factory()->the_hole_value()));
@@ -767,9 +772,13 @@
       Comment cmnt(masm_, "[ Declaration");
       __ push(esi);
       __ push(Immediate(variable->name()));
-      // Declaration nodes are always introduced in one of three modes.
-      ASSERT(mode == VAR || mode == CONST || mode == LET);
-      PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE;
+      // Declaration nodes are always introduced in one of four modes.
+      ASSERT(mode == VAR ||
+             mode == CONST ||
+             mode == CONST_HARMONY ||
+             mode == LET);
+      PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
+          ? READ_ONLY : NONE;
       __ push(Immediate(Smi::FromInt(attr)));
       // Push initial value, if any.
       // Note: For variables we must not push an initial value (such as
@@ -778,7 +787,7 @@
       increment_stack_height(3);
       if (function != NULL) {
         VisitForStackValue(function);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         __ push(Immediate(isolate()->factory()->the_hole_value()));
         increment_stack_height();
       } else {
@@ -920,11 +929,17 @@
   __ push(eax);
   increment_stack_height();
 
+  // Check for proxies.
+  Label call_runtime;
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
+  __ j(below_equal, &call_runtime);
+
   // Check cache validity in generated code. This is a fast case for
   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   // guarantee cache validity, call the runtime system to check cache
   // validity or get the property names in a fixed array.
-  Label next, call_runtime;
+  Label next;
   __ mov(ecx, eax);
   __ bind(&next);
 
@@ -995,9 +1010,17 @@
   __ jmp(&loop);
 
   // We got a fixed array in register eax. Iterate through that.
+  Label non_proxy;
   __ bind(&fixed_array);
-  __ push(Immediate(Smi::FromInt(0)));  // Map (0) - force slow check.
-  __ push(eax);
+  __ mov(ebx, Immediate(Smi::FromInt(1)));  // Smi indicates slow check
+  __ mov(ecx, Operand(esp, 0 * kPointerSize));  // Get enumerated object
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
+  __ j(above, &non_proxy);
+  __ mov(ebx, Immediate(Smi::FromInt(0)));  // Zero indicates proxy
+  __ bind(&non_proxy);
+  __ push(ebx);  // Smi
+  __ push(eax);  // Array
   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
   __ push(eax);  // Fixed array length (as smi).
   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
@@ -1014,17 +1037,23 @@
   __ mov(ebx, Operand(esp, 2 * kPointerSize));
   __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
 
-  // Get the expected map from the stack or a zero map in the
+  // Get the expected map from the stack or a smi in the
   // permanent slow case into register edx.
   __ mov(edx, Operand(esp, 3 * kPointerSize));
 
   // Check if the expected map still matches that of the enumerable.
-  // If not, we have to filter the key.
+  // If not, we may have to filter the key.
   Label update_each;
   __ mov(ecx, Operand(esp, 4 * kPointerSize));
   __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
   __ j(equal, &update_each, Label::kNear);
 
+  // For proxies, no filtering is done.
+  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
+  ASSERT(Smi::FromInt(0) == 0);
+  __ test(edx, edx);
+  __ j(zero, &update_each);
+
   // Convert the entry to a string or null if it isn't a property
   // anymore. If the property has been removed while iterating, we
   // just skip it.
@@ -1079,7 +1108,7 @@
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(info->strict_mode_flag());
     __ push(Immediate(info));
     __ CallStub(&stub);
   } else {
@@ -1109,7 +1138,7 @@
   Scope* s = scope();
   while (s != NULL) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
                Immediate(0));
@@ -1123,7 +1152,7 @@
     // If no outer scope calls eval, we do not need to check more
     // context extensions.  If we have reached an eval scope, we check
     // all extensions from this point.
-    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
     s = s->outer_scope();
   }
 
@@ -1168,7 +1197,7 @@
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
                Immediate(0));
@@ -1206,12 +1235,13 @@
     Variable* local = var->local_if_not_shadowed();
     __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
     if (local->mode() == CONST ||
+        local->mode() == CONST_HARMONY ||
         local->mode() == LET) {
       __ cmp(eax, isolate()->factory()->the_hole_value());
       __ j(not_equal, done);
       if (local->mode() == CONST) {
         __ mov(eax, isolate()->factory()->undefined_value());
-      } else {  // LET
+      } else {  // LET || CONST_HARMONY
         __ push(Immediate(var->name()));
         __ CallRuntime(Runtime::kThrowReferenceError, 1);
       }
@@ -1247,7 +1277,7 @@
       Comment cmnt(masm_, var->IsContextSlot()
                               ? "Context variable"
                               : "Stack variable");
-      if (var->mode() != LET && var->mode() != CONST) {
+      if (!var->binding_needs_init()) {
         context()->Plug(var);
       } else {
         // Let and const need a read barrier.
@@ -1255,10 +1285,14 @@
         GetVar(eax, var);
         __ cmp(eax, isolate()->factory()->the_hole_value());
         __ j(not_equal, &done, Label::kNear);
-        if (var->mode() == LET) {
+        if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+          // Throw a reference error when using an uninitialized let/const
+          // binding in harmony mode.
           __ push(Immediate(var->name()));
           __ CallRuntime(Runtime::kThrowReferenceError, 1);
-        } else {  // CONST
+        } else {
+          // Uninitalized const bindings outside of harmony mode are unholed.
+          ASSERT(var->mode() == CONST);
           __ mov(eax, isolate()->factory()->undefined_value());
         }
         __ bind(&done);
@@ -1448,12 +1482,18 @@
 
   ZoneList<Expression*>* subexprs = expr->values();
   int length = subexprs->length();
+  Handle<FixedArray> constant_elements = expr->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+  Handle<FixedArrayBase> constant_elements_values(
+      FixedArrayBase::cast(constant_elements->get(1)));
 
   __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
   __ push(Immediate(Smi::FromInt(expr->literal_index())));
-  __ push(Immediate(expr->constant_elements()));
-  if (expr->constant_elements()->map() ==
+  __ push(Immediate(constant_elements));
+  if (constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     ASSERT(expr->depth() == 1);
     FastCloneShallowArrayStub stub(
@@ -1465,8 +1505,14 @@
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
-    FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
+    ASSERT(constant_elements_kind == FAST_ELEMENTS ||
+           constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+           FLAG_smi_only_arrays);
+    FastCloneShallowArrayStub::Mode mode =
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, length);
     __ CallStub(&stub);
   }
 
@@ -1492,22 +1538,61 @@
 
     // Store the subexpression value in the array's elements.
     __ mov(ebx, Operand(esp, 0));  // Copy of array literal.
+    __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
     __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
     int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-    __ mov(FieldOperand(ebx, offset), result_register());
 
-    Label no_map_change;
-    __ JumpIfSmi(result_register(), &no_map_change);
+    Label element_done;
+    Label double_elements;
+    Label smi_element;
+    Label slow_elements;
+    Label fast_elements;
+    __ CheckFastElements(edi, &double_elements);
+
+    // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+    __ JumpIfSmi(result_register(), &smi_element);
+    __ CheckFastSmiOnlyElements(edi, &fast_elements, Label::kNear);
+
+    // Store into the array literal requires a elements transition. Call into
+    // the runtime.
+    __ bind(&slow_elements);
+    __ push(Operand(esp, 0));  // Copy of array literal.
+    __ push(Immediate(Smi::FromInt(i)));
+    __ push(result_register());
+    __ push(Immediate(Smi::FromInt(NONE)));  // PropertyAttributes
+    __ push(Immediate(Smi::FromInt(strict_mode_flag())));  // Strict mode.
+    __ CallRuntime(Runtime::kSetProperty, 5);
+    __ jmp(&element_done);
+
+    // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+    __ bind(&double_elements);
+    __ mov(ecx, Immediate(Smi::FromInt(i)));
+    __ StoreNumberToDoubleElements(result_register(),
+                                   ebx,
+                                   ecx,
+                                   edx,
+                                   xmm0,
+                                   &slow_elements,
+                                   false);
+    __ jmp(&element_done);
+
+    // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+    __ bind(&fast_elements);
+    __ mov(FieldOperand(ebx, offset), result_register());
     // Update the write barrier for the array store.
     __ RecordWriteField(ebx, offset, result_register(), ecx,
                         kDontSaveFPRegs,
                         EMIT_REMEMBERED_SET,
                         OMIT_SMI_CHECK);
-    __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
-    __ CheckFastSmiOnlyElements(edi, &no_map_change, Label::kNear);
-    __ push(Operand(esp, 0));
-    __ CallRuntime(Runtime::kNonSmiElementStored, 1);
-    __ bind(&no_map_change);
+    __ jmp(&element_done);
+
+    // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+    // FAST_ELEMENTS, and value is Smi.
+    __ bind(&smi_element);
+    __ mov(FieldOperand(ebx, offset), result_register());
+    // Fall through
+
+    __ bind(&element_done);
 
     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   }
@@ -1890,8 +1975,9 @@
       }
     }
 
-  } else if (var->mode() != CONST) {
-    // Assignment to var or initializing assignment to let.
+  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
+    // Assignment to var or initializing assignment to let/const
+    // in harmony mode.
     if (var->IsStackAllocated() || var->IsContextSlot()) {
       MemOperand location = VarOperand(var, ecx);
       if (FLAG_debug_code && op == Token::INIT_LET) {
@@ -2104,7 +2190,7 @@
     flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
   }
   CallFunctionStub stub(arg_count, flags);
-  __ CallStub(&stub);
+  __ CallStub(&stub, expr->id());
   if (record_call_target) {
     // There is a one element cache in the instruction stream.
 #ifdef DEBUG
@@ -2781,9 +2867,10 @@
   __ bind(&heapnumber_allocated);
 
   __ PrepareCallCFunction(1, ebx);
-  __ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
-  __ CallCFunction(ExternalReference::random_uint32_function(isolate()),
-                   1);
+  __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX));
+  __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
+  __ mov(Operand(esp, 0), eax);
+  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
 
   // Convert 32 random bits in eax to 0.(32 random bits) in a double
   // by computing:
@@ -4147,33 +4234,25 @@
         case Token::EQ_STRICT:
         case Token::EQ:
           cc = equal;
-          __ pop(edx);
           break;
         case Token::LT:
           cc = less;
-          __ pop(edx);
           break;
         case Token::GT:
-          // Reverse left and right sizes to obtain ECMA-262 conversion order.
-          cc = less;
-          __ mov(edx, result_register());
-          __ pop(eax);
+          cc = greater;
          break;
         case Token::LTE:
-          // Reverse left and right sizes to obtain ECMA-262 conversion order.
-          cc = greater_equal;
-          __ mov(edx, result_register());
-          __ pop(eax);
+          cc = less_equal;
           break;
         case Token::GTE:
           cc = greater_equal;
-          __ pop(edx);
           break;
         case Token::IN:
         case Token::INSTANCEOF:
         default:
           UNREACHABLE();
       }
+      __ pop(edx);
       decrement_stack_height();
 
       bool inline_smi_code = ShouldInlineSmiCase(op);
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 8a98b17..1168932 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -860,10 +860,10 @@
 
 // The generated code does not accept smi keys.
 // The generated code falls through if both probes miss.
-static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
-                                          int argc,
-                                          Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
+                                               int argc,
+                                               Code::Kind kind,
+                                               Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- edx                 : receiver
@@ -873,11 +873,11 @@
   // Probe the stub cache.
   Code::Flags flags = Code::ComputeFlags(kind,
                                          MONOMORPHIC,
-                                         extra_ic_state,
+                                         extra_state,
                                          NORMAL,
                                          argc);
-  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
-                                                  eax);
+  Isolate* isolate = masm->isolate();
+  isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, eax);
 
   // If the stub cache probing failed, the receiver might be a value.
   // For value objects, we use the map of the prototype objects for
@@ -903,9 +903,9 @@
 
   // Check for boolean.
   __ bind(&non_string);
-  __ cmp(edx, FACTORY->true_value());
+  __ cmp(edx, isolate->factory()->true_value());
   __ j(equal, &boolean);
-  __ cmp(edx, FACTORY->false_value());
+  __ cmp(edx, isolate->factory()->false_value());
   __ j(not_equal, &miss);
   __ bind(&boolean);
   StubCompiler::GenerateLoadGlobalFunctionPrototype(
@@ -913,8 +913,7 @@
 
   // Probe the stub cache for the value object.
   __ bind(&probe);
-  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
-                                                  no_reg);
+  isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
   __ bind(&miss);
 }
 
@@ -944,8 +943,9 @@
                     NullCallWrapper(), CALL_AS_METHOD);
 }
 
+
 // The generated code falls through if the call should be handled by runtime.
-static void GenerateCallNormal(MacroAssembler* masm, int argc) {
+void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -969,10 +969,10 @@
 }
 
 
-static void GenerateCallMiss(MacroAssembler* masm,
-                             int argc,
-                             IC::UtilityId id,
-                             Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMiss(MacroAssembler* masm,
+                              int argc,
+                              IC::UtilityId id,
+                              Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -1029,7 +1029,7 @@
   }
 
   // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   ParameterCount actual(argc);
@@ -1043,7 +1043,7 @@
 
 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
                                  int argc,
-                                 Code::ExtraICState extra_ic_state) {
+                                 Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -1054,38 +1054,10 @@
 
   // Get the receiver of the function from the stack; 1 ~ return address.
   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
-  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
+  CallICBase::GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC,
+                                            extra_state);
 
-  GenerateMiss(masm, argc, extra_ic_state);
-}
-
-
-void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  //  -- ecx                 : name
-  //  -- esp[0]              : return address
-  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
-  //  -- ...
-  //  -- esp[(argc + 1) * 4] : receiver
-  // -----------------------------------
-
-  GenerateCallNormal(masm, argc);
-  GenerateMiss(masm, argc, Code::kNoExtraICState);
-}
-
-
-void CallIC::GenerateMiss(MacroAssembler* masm,
-                          int argc,
-                          Code::ExtraICState extra_ic_state) {
-  // ----------- S t a t e -------------
-  //  -- ecx                 : name
-  //  -- esp[0]              : return address
-  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
-  //  -- ...
-  //  -- esp[(argc + 1) * 4] : receiver
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
+  GenerateMiss(masm, argc, extra_state);
 }
 
 
@@ -1187,10 +1159,8 @@
 
   __ bind(&lookup_monomorphic_cache);
   __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
-  GenerateMonomorphicCacheProbe(masm,
-                                argc,
-                                Code::KEYED_CALL_IC,
-                                Code::kNoExtraICState);
+  CallICBase::GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC,
+                                            Code::kNoExtraICState);
   // Fall through on miss.
 
   __ bind(&slow_call);
@@ -1253,25 +1223,12 @@
   __ JumpIfSmi(ecx, &miss);
   Condition cond = masm->IsObjectStringType(ecx, eax, eax);
   __ j(NegateCondition(cond), &miss);
-  GenerateCallNormal(masm, argc);
+  CallICBase::GenerateNormal(masm, argc);
   __ bind(&miss);
   GenerateMiss(masm, argc);
 }
 
 
-void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  //  -- ecx                 : name
-  //  -- esp[0]              : return address
-  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
-  //  -- ...
-  //  -- esp[(argc + 1) * 4] : receiver
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
-}
-
-
 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax    : receiver
@@ -1580,6 +1537,51 @@
 }
 
 
+void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- ebx    : target map
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+  // Must return the modified receiver in eax.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+    __ mov(eax, edx);
+    __ Ret();
+    __ bind(&fail);
+  }
+
+  __ pop(ebx);
+  __ push(edx);
+  __ push(ebx);  // return address
+  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
+}
+
+
+void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- ebx    : target map
+  //  -- edx    : receiver
+  //  -- esp[0] : return address
+  // -----------------------------------
+  // Must return the modified receiver in eax.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
+    __ mov(eax, edx);
+    __ Ret();
+    __ bind(&fail);
+  }
+
+  __ pop(ebx);
+  __ push(edx);
+  __ push(ebx);  // return address
+  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
+}
+
+
 #undef __
 
 
@@ -1591,11 +1593,9 @@
     case Token::LT:
       return less;
     case Token::GT:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return less;
+      return greater;
     case Token::LTE:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return greater_equal;
+      return less_equal;
     case Token::GTE:
       return greater_equal;
     default:
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 9e1fd34..d4cbbce 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -355,6 +355,12 @@
 }
 
 
+double LCodeGen::ToDouble(LConstantOperand* op) const {
+  Handle<Object> value = chunk_->LookupLiteral(op);
+  return value->Number();
+}
+
+
 Immediate LCodeGen::ToImmediate(LOperand* op) {
   LConstantOperand* const_op = LConstantOperand::cast(op);
   Handle<Object> literal = chunk_->LookupLiteral(const_op);
@@ -1574,32 +1580,40 @@
 }
 
 
-void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
-  if (right->IsConstantOperand()) {
-    __ cmp(ToOperand(left), ToImmediate(right));
-  } else {
-    __ cmp(ToRegister(left), ToOperand(right));
-  }
-}
-
-
 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
   LOperand* left = instr->InputAt(0);
   LOperand* right = instr->InputAt(1);
   int false_block = chunk_->LookupDestination(instr->false_block_id());
   int true_block = chunk_->LookupDestination(instr->true_block_id());
-
-  if (instr->is_double()) {
-    // Don't base result on EFLAGS when a NaN is involved. Instead
-    // jump to the false block.
-    __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
-    __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
-  } else {
-    EmitCmpI(left, right);
-  }
-
   Condition cc = TokenToCondition(instr->op(), instr->is_double());
-  EmitBranch(true_block, false_block, cc);
+
+  if (left->IsConstantOperand() && right->IsConstantOperand()) {
+    // We can statically evaluate the comparison.
+    double left_val = ToDouble(LConstantOperand::cast(left));
+    double right_val = ToDouble(LConstantOperand::cast(right));
+    int next_block =
+      EvalComparison(instr->op(), left_val, right_val) ? true_block
+                                                       : false_block;
+    EmitGoto(next_block);
+  } else {
+    if (instr->is_double()) {
+      // Don't base result on EFLAGS when a NaN is involved. Instead
+      // jump to the false block.
+      __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
+      __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
+    } else {
+      if (right->IsConstantOperand()) {
+        __ cmp(ToRegister(left), ToImmediate(right));
+      } else if (left->IsConstantOperand()) {
+        __ cmp(ToOperand(right), ToImmediate(left));
+        // We transposed the operands. Reverse the condition.
+        cc = ReverseCondition(cc);
+      } else {
+        __ cmp(ToRegister(left), ToOperand(right));
+      }
+    }
+    EmitBranch(true_block, false_block, cc);
+  }
 }
 
 
@@ -2029,9 +2043,6 @@
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 
   Condition condition = ComputeCompareCondition(op);
-  if (op == Token::GT || op == Token::LTE) {
-    condition = ReverseCondition(condition);
-  }
   Label true_value, done;
   __ test(eax, Operand(eax));
   __ j(condition, &true_value, Label::kNear);
@@ -2116,12 +2127,18 @@
   __ mov(FieldOperand(object, offset), value);
 
   // Cells are always in the remembered set.
-  __ RecordWriteField(object,
-                      offset,
-                      value,
-                      address,
-                      kSaveFPRegs,
-                      OMIT_REMEMBERED_SET);
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+    __ RecordWriteField(object,
+                        offset,
+                        value,
+                        address,
+                        kSaveFPRegs,
+                        OMIT_REMEMBERED_SET,
+                        check_needed);
+  }
 }
 
 
@@ -2149,10 +2166,19 @@
   Register context = ToRegister(instr->context());
   Register value = ToRegister(instr->value());
   __ mov(ContextOperand(context, instr->slot_index()), value);
-  if (instr->needs_write_barrier()) {
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
     Register temp = ToRegister(instr->TempAt(0));
     int offset = Context::SlotOffset(instr->slot_index());
-    __ RecordWriteContextSlot(context, offset, value, temp, kSaveFPRegs);
+    __ RecordWriteContextSlot(context,
+                              offset,
+                              value,
+                              temp,
+                              kSaveFPRegs,
+                              EMIT_REMEMBERED_SET,
+                              check_needed);
   }
 }
 
@@ -2173,7 +2199,7 @@
                                                Register object,
                                                Handle<Map> type,
                                                Handle<String> name) {
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   type->LookupInDescriptors(NULL, *name, &lookup);
   ASSERT(lookup.IsProperty() &&
          (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
@@ -2614,7 +2640,7 @@
 
 void LCodeGen::DoThisFunction(LThisFunction* instr) {
   Register result = ToRegister(instr->result());
-  __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+  LoadHeapObject(result, instr->hydrogen()->closure());
 }
 
 
@@ -3146,21 +3172,36 @@
   }
 
   // Do the store.
+  HType type = instr->hydrogen()->value()->type();
+  SmiCheck check_needed =
+      type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
   if (instr->is_in_object()) {
     __ mov(FieldOperand(object, offset), value);
-    if (instr->needs_write_barrier()) {
+    if (instr->hydrogen()->NeedsWriteBarrier()) {
       Register temp = ToRegister(instr->TempAt(0));
       // Update the write barrier for the object for in-object properties.
-      __ RecordWriteField(object, offset, value, temp, kSaveFPRegs);
+      __ RecordWriteField(object,
+                          offset,
+                          value,
+                          temp,
+                          kSaveFPRegs,
+                          EMIT_REMEMBERED_SET,
+                          check_needed);
     }
   } else {
     Register temp = ToRegister(instr->TempAt(0));
     __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
     __ mov(FieldOperand(temp, offset), value);
-    if (instr->needs_write_barrier()) {
+    if (instr->hydrogen()->NeedsWriteBarrier()) {
       // Update the write barrier for the properties array.
       // object is used as a scratch register.
-      __ RecordWriteField(temp, offset, value, object, kSaveFPRegs);
+      __ RecordWriteField(temp,
+                          offset,
+                          value,
+                          object,
+                          kSaveFPRegs,
+                          EMIT_REMEMBERED_SET,
+                          check_needed);
     }
   }
 }
@@ -3259,13 +3300,21 @@
   }
 
   if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
     // Compute address of modified element and store it into key register.
     __ lea(key,
            FieldOperand(elements,
                         key,
                         times_pointer_size,
                         FixedArray::kHeaderSize));
-    __ RecordWrite(elements, key, value, kSaveFPRegs);
+    __ RecordWrite(elements,
+                   key,
+                   value,
+                   kSaveFPRegs,
+                   EMIT_REMEMBERED_SET,
+                   check_needed);
   }
 }
 
@@ -3303,6 +3352,48 @@
 }
 
 
+void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
+  Register object_reg = ToRegister(instr->object());
+  Register new_map_reg = ToRegister(instr->new_map_reg());
+
+  Handle<Map> from_map = instr->original_map();
+  Handle<Map> to_map = instr->transitioned_map();
+  ElementsKind from_kind = from_map->elements_kind();
+  ElementsKind to_kind = to_map->elements_kind();
+
+  Label not_applicable;
+  __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
+  __ j(not_equal, &not_applicable);
+  __ mov(new_map_reg, to_map);
+  if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    Register object_reg = ToRegister(instr->object());
+    __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
+    // Write barrier.
+    ASSERT_NE(instr->temp_reg(), NULL);
+    __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
+                        ToRegister(instr->temp_reg()), kDontSaveFPRegs);
+  } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
+      to_kind == FAST_DOUBLE_ELEMENTS) {
+    Register fixed_object_reg = ToRegister(instr->temp_reg());
+    ASSERT(fixed_object_reg.is(edx));
+    ASSERT(new_map_reg.is(ebx));
+    __ mov(fixed_object_reg, object_reg);
+    CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
+             RelocInfo::CODE_TARGET, instr);
+  } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    Register fixed_object_reg = ToRegister(instr->temp_reg());
+    ASSERT(fixed_object_reg.is(edx));
+    ASSERT(new_map_reg.is(ebx));
+    __ mov(fixed_object_reg, object_reg);
+    CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
+             RelocInfo::CODE_TARGET, instr);
+  } else {
+    UNREACHABLE();
+  }
+  __ bind(&not_applicable);
+}
+
+
 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
   class DeferredStringCharCodeAt: public LDeferredCode {
    public:
@@ -4095,11 +4186,17 @@
 
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   ASSERT(ToRegister(instr->context()).is(esi));
+
+  Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+
   // Setup the parameters to the stub/runtime call.
   __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
   __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
-  __ push(Immediate(instr->hydrogen()->constant_elements()));
+  __ push(Immediate(constant_elements));
 
   // Pick the right runtime function or stub to call.
   int length = instr->hydrogen()->length();
@@ -4115,7 +4212,9 @@
     CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
   } else {
     FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ELEMENTS;
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
     FastCloneShallowArrayStub stub(mode, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   }
@@ -4214,8 +4313,7 @@
   Handle<SharedFunctionInfo> shared_info = instr->shared_info();
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && shared_info->num_literals() == 0) {
-    FastNewClosureStub stub(
-        shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(shared_info->strict_mode_flag());
     __ push(Immediate(shared_info));
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   } else {
@@ -4247,12 +4345,11 @@
   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   Label* false_label = chunk_->GetAssemblyLabel(false_block);
 
-  Condition final_branch_condition = EmitTypeofIs(true_label,
-                                                  false_label,
-                                                  input,
-                                                  instr->type_literal());
-
-  EmitBranch(true_block, false_block, final_branch_condition);
+  Condition final_branch_condition =
+      EmitTypeofIs(true_label, false_label, input, instr->type_literal());
+  if (final_branch_condition != no_condition) {
+    EmitBranch(true_block, false_block, final_branch_condition);
+  }
 }
 
 
@@ -4319,11 +4416,8 @@
     final_branch_condition = zero;
 
   } else {
-    final_branch_condition = not_equal;
     __ jmp(false_label);
-    // A dead branch instruction will be generated after this point.
   }
-
   return final_branch_condition;
 }
 
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index 6037c08..412e418 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -131,8 +131,8 @@
   bool is_done() const { return status_ == DONE; }
   bool is_aborted() const { return status_ == ABORTED; }
 
-  int strict_mode_flag() const {
-    return info()->is_strict_mode() ? kStrictMode : kNonStrictMode;
+  StrictModeFlag strict_mode_flag() const {
+    return info()->strict_mode_flag();
   }
   bool dynamic_frame_alignment() const { return dynamic_frame_alignment_; }
   void set_dynamic_frame_alignment(bool value) {
@@ -227,6 +227,7 @@
   Register ToRegister(int index) const;
   XMMRegister ToDoubleRegister(int index) const;
   int ToInteger32(LConstantOperand* op) const;
+  double ToDouble(LConstantOperand* op) const;
   Operand BuildFastArrayOperand(LOperand* elements_pointer,
                                 LOperand* key,
                                 ElementsKind elements_kind,
@@ -261,7 +262,6 @@
   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
   void EmitGoto(int block);
   void EmitBranch(int left_block, int right_block, Condition cc);
-  void EmitCmpI(LOperand* left, LOperand* right);
   void EmitNumberUntagD(Register input,
                         XMMRegister result,
                         bool deoptimize_on_undefined,
@@ -270,8 +270,10 @@
   // Emits optimized code for typeof x == "y".  Modifies input register.
   // Returns the condition on which a final split to
   // true and false label should be made, to optimize fallthrough.
-  Condition EmitTypeofIs(Label* true_label, Label* false_label,
-                         Register input, Handle<String> type_name);
+  Condition EmitTypeofIs(Label* true_label,
+                         Label* false_label,
+                         Register input,
+                         Handle<String> type_name);
 
   // Emits optimized code for %_IsObject(x).  Preserves input register.
   // Returns the condition on which a final split to
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 856106c..626f899 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -452,6 +452,12 @@
 }
 
 
+void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
+  object()->PrintTo(stream);
+  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
+}
+
+
 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
   LInstructionGap* gap = new LInstructionGap(block);
   int index = -1;
@@ -1434,13 +1440,11 @@
 
 
 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
-  Token::Value op = instr->token();
   ASSERT(instr->left()->representation().IsTagged());
   ASSERT(instr->right()->representation().IsTagged());
-  bool reversed = (op == Token::GT || op == Token::LTE);
   LOperand* context = UseFixed(instr->context(), esi);
-  LOperand* left = UseFixed(instr->left(), reversed ? eax : edx);
-  LOperand* right = UseFixed(instr->right(), reversed ? edx : eax);
+  LOperand* left = UseFixed(instr->left(), edx);
+  LOperand* right = UseFixed(instr->right(), eax);
   LCmpT* result = new LCmpT(context, left, right);
   return MarkAsCall(DefineFixed(result, eax), instr);
 }
@@ -1452,15 +1456,22 @@
   if (r.IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->left());
+    LOperand* left = UseRegisterOrConstantAtStart(instr->left());
     LOperand* right = UseOrConstantAtStart(instr->right());
     return new LCmpIDAndBranch(left, right);
   } else {
     ASSERT(r.IsDouble());
     ASSERT(instr->left()->representation().IsDouble());
     ASSERT(instr->right()->representation().IsDouble());
-    LOperand* left = UseRegisterAtStart(instr->left());
-    LOperand* right = UseRegisterAtStart(instr->right());
+    LOperand* left;
+    LOperand* right;
+    if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
+      left = UseRegisterOrConstantAtStart(instr->left());
+      right = UseRegisterOrConstantAtStart(instr->right());
+    } else {
+      left = UseRegisterAtStart(instr->left());
+      right = UseRegisterAtStart(instr->right());
+    }
     return new LCmpIDAndBranch(left, right);
   }
 }
@@ -2033,6 +2044,27 @@
 }
 
 
+LInstruction* LChunkBuilder::DoTransitionElementsKind(
+    HTransitionElementsKind* instr) {
+  if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
+      instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+    LOperand* object = UseRegister(instr->object());
+    LOperand* new_map_reg = TempRegister();
+    LOperand* temp_reg = TempRegister();
+    LTransitionElementsKind* result =
+        new LTransitionElementsKind(object, new_map_reg, temp_reg);
+    return DefineSameAsFirst(result);
+  } else {
+    LOperand* object = UseFixed(instr->object(), eax);
+    LOperand* fixed_object_reg = FixedTemp(edx);
+    LOperand* new_map_reg = FixedTemp(ebx);
+    LTransitionElementsKind* result =
+        new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+    return MarkAsCall(DefineFixed(result, eax), instr);
+  }
+}
+
+
 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
   bool needs_write_barrier = instr->NeedsWriteBarrier();
 
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 3a06ac3..5f23afa 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -156,6 +156,7 @@
   V(ThisFunction)                               \
   V(Throw)                                      \
   V(ToFastProperties)                           \
+  V(TransitionElementsKind)                     \
   V(Typeof)                                     \
   V(TypeofIsAndBranch)                          \
   V(UnaryMathOperation)                         \
@@ -1295,7 +1296,6 @@
   LOperand* context() { return InputAt(0); }
   LOperand* value() { return InputAt(1); }
   int slot_index() { return hydrogen()->slot_index(); }
-  int needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); }
 
   virtual void PrintDataTo(StringStream* stream);
 };
@@ -1312,7 +1312,9 @@
 
 
 class LThisFunction: public LTemplateInstruction<1, 0, 0> {
+ public:
   DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
+  DECLARE_HYDROGEN_ACCESSOR(ThisFunction)
 };
 
 
@@ -1617,7 +1619,6 @@
   Handle<Object> name() const { return hydrogen()->name(); }
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
-  bool needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
 };
 
@@ -1639,7 +1640,8 @@
   LOperand* object() { return inputs_[1]; }
   LOperand* value() { return inputs_[2]; }
   Handle<Object> name() const { return hydrogen()->name(); }
-  bool strict_mode() { return hydrogen()->strict_mode(); }
+  StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
+  bool strict_mode() { return strict_mode_flag() == kStrictMode; }
 };
 
 
@@ -1733,6 +1735,30 @@
 };
 
 
+class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
+ public:
+  LTransitionElementsKind(LOperand* object,
+                          LOperand* new_map_temp,
+                          LOperand* temp_reg) {
+    inputs_[0] = object;
+    temps_[0] = new_map_temp;
+    temps_[1] = temp_reg;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
+                               "transition-elements-kind")
+  DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  LOperand* object() { return inputs_[0]; }
+  LOperand* new_map_reg() { return temps_[0]; }
+  LOperand* temp_reg() { return temps_[1]; }
+  Handle<Map> original_map() { return hydrogen()->original_map(); }
+  Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
+};
+
+
 class LStringAdd: public LTemplateInstruction<1, 3, 0> {
  public:
   LStringAdd(LOperand* context, LOperand* left, LOperand* right) {
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 3aaa22a..dd1ace9 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -352,7 +352,7 @@
 
 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
   // see ROOT_ACCESSOR macro in factory.h
-  Handle<Object> value(&isolate()->heap()->roots_address()[index]);
+  Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
   cmp(with, value);
 }
 
@@ -1492,6 +1492,19 @@
 }
 
 
+void MacroAssembler::BooleanBitTest(Register object,
+                                    int field_offset,
+                                    int bit_index) {
+  bit_index += kSmiTagSize + kSmiShiftSize;
+  ASSERT(IsPowerOf2(kBitsPerByte));
+  int byte_index = bit_index / kBitsPerByte;
+  int byte_bit_index = bit_index & (kBitsPerByte - 1);
+  test_b(FieldOperand(object, field_offset + byte_index),
+         static_cast<byte>(1 << byte_bit_index));
+}
+
+
+
 void MacroAssembler::NegativeZeroTest(Register result,
                                       Register op,
                                       Label* then_label) {
@@ -1522,7 +1535,8 @@
 void MacroAssembler::TryGetFunctionPrototype(Register function,
                                              Register result,
                                              Register scratch,
-                                             Label* miss) {
+                                             Label* miss,
+                                             bool miss_on_bound_function) {
   // Check that the receiver isn't a smi.
   JumpIfSmi(function, miss);
 
@@ -1530,6 +1544,15 @@
   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   j(not_equal, miss);
 
+  if (miss_on_bound_function) {
+    // If a bound function, go to miss label.
+    mov(scratch,
+        FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
+    BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
+                   SharedFunctionInfo::kBoundFunction);
+    j(not_zero, miss);
+  }
+
   // Make sure that the function has an instance prototype.
   Label non_instance;
   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
@@ -2064,23 +2087,16 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
-  ASSERT(function->is_compiled());
   // Get the function and setup the context.
   mov(edi, Immediate(Handle<JSFunction>(function)));
   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
 
   ParameterCount expected(function->shared()->formal_parameter_count());
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
-               expected, actual, flag, call_wrapper, call_kind);
-  } else {
-    Handle<Code> code(function->code());
-    InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
-               flag, call_wrapper, call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
+             expected, actual, flag, call_wrapper, call_kind);
 }
 
 
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index a1b42c2..8528c55 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -594,6 +594,9 @@
   // ---------------------------------------------------------------------------
   // Support functions.
 
+  // Check a boolean-bit of a Smi field.
+  void BooleanBitTest(Register object, int field_offset, int bit_index);
+
   // Check if result is zero and op is negative.
   void NegativeZeroTest(Register result, Register op, Label* then_label);
 
@@ -610,7 +613,8 @@
   void TryGetFunctionPrototype(Register function,
                                Register result,
                                Register scratch,
-                               Label* miss);
+                               Label* miss,
+                               bool miss_on_bound_function = false);
 
   // Generates code for reporting that an illegal operation has
   // occurred.
diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc
index 8b0b9ab..dbf01ab 100644
--- a/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/ia32/regexp-macro-assembler-ia32.cc
@@ -1141,6 +1141,11 @@
     frame_entry<const String*>(re_frame, kInputString) = *subject;
     frame_entry<const byte*>(re_frame, kInputStart) = new_address;
     frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
+  } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
+    // Subject string might have been a ConsString that underwent
+    // short-circuiting during GC. That will not change start_address but
+    // will change pointer inside the subject handle.
+    frame_entry<const String*>(re_frame, kInputString) = *subject;
   }
 
   return 0;
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 07cb14d..af53acd 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -107,12 +107,60 @@
 // must always call a backup property check that is complete.
 // This function is safe to call if the receiver has fast properties.
 // Name must be a symbol and receiver must be a heap object.
-static MaybeObject* GenerateDictionaryNegativeLookup(MacroAssembler* masm,
-                                                     Label* miss_label,
-                                                     Register receiver,
-                                                     String* name,
-                                                     Register r0,
-                                                     Register r1) {
+static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                             Label* miss_label,
+                                             Register receiver,
+                                             Handle<String> name,
+                                             Register r0,
+                                             Register r1) {
+  ASSERT(name->IsSymbol());
+  Counters* counters = masm->isolate()->counters();
+  __ IncrementCounter(counters->negative_lookups(), 1);
+  __ IncrementCounter(counters->negative_lookups_miss(), 1);
+
+  __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
+
+  const int kInterceptorOrAccessCheckNeededMask =
+      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
+
+  // Bail out if the receiver has a named interceptor or requires access checks.
+  __ test_b(FieldOperand(r0, Map::kBitFieldOffset),
+            kInterceptorOrAccessCheckNeededMask);
+  __ j(not_zero, miss_label);
+
+  // Check that receiver is a JSObject.
+  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
+  __ j(below, miss_label);
+
+  // Load properties array.
+  Register properties = r0;
+  __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
+
+  // Check that the properties array is a dictionary.
+  __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
+         Immediate(masm->isolate()->factory()->hash_table_map()));
+  __ j(not_equal, miss_label);
+
+  Label done;
+  StringDictionaryLookupStub::GenerateNegativeLookup(masm,
+                                                     miss_label,
+                                                     &done,
+                                                     properties,
+                                                     name,
+                                                     r1);
+  __ bind(&done);
+  __ DecrementCounter(counters->negative_lookups_miss(), 1);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+static MaybeObject* TryGenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                                        Label* miss_label,
+                                                        Register receiver,
+                                                        String* name,
+                                                        Register r0,
+                                                        Register r1) {
   ASSERT(name->IsSymbol());
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->negative_lookups(), 1);
@@ -143,12 +191,12 @@
 
   Label done;
   MaybeObject* result =
-      StringDictionaryLookupStub::GenerateNegativeLookup(masm,
-                                                         miss_label,
-                                                         &done,
-                                                         properties,
-                                                         name,
-                                                         r1);
+      StringDictionaryLookupStub::TryGenerateNegativeLookup(masm,
+                                                            miss_label,
+                                                            &done,
+                                                            properties,
+                                                            name,
+                                                            r1);
   if (result->IsFailure()) return result;
 
   __ bind(&done);
@@ -165,25 +213,23 @@
                               Register scratch,
                               Register extra,
                               Register extra2) {
-  Isolate* isolate = Isolate::Current();
   Label miss;
-  USE(extra2);  // The register extra2 is not used on the ia32 platform.
 
-  // Make sure that code is valid. The shifting code relies on the
-  // entry size being 8.
+  // Assert that code is valid.  The shifting code relies on the entry size
+  // being 8.
   ASSERT(sizeof(Entry) == 8);
 
-  // Make sure the flags does not name a specific type.
+  // Assert the flags do not name a specific type.
   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
 
-  // Make sure that there are no register conflicts.
+  // Assert that there are no register conflicts.
   ASSERT(!scratch.is(receiver));
   ASSERT(!scratch.is(name));
   ASSERT(!extra.is(receiver));
   ASSERT(!extra.is(name));
   ASSERT(!extra.is(scratch));
 
-  // Check scratch and extra registers are valid, and extra2 is unused.
+  // Assert scratch and extra registers are valid, and extra2 is unused.
   ASSERT(!scratch.is(no_reg));
   ASSERT(extra2.is(no_reg));
 
@@ -197,7 +243,7 @@
   __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
 
   // Probe the primary table.
-  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra);
+  ProbeTable(isolate(), masm, flags, kPrimary, name, scratch, extra);
 
   // Primary miss: Compute hash for secondary probe.
   __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -209,7 +255,7 @@
   __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
 
   // Probe the secondary table.
-  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra);
+  ProbeTable(isolate(), masm, flags, kSecondary, name, scratch, extra);
 
   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.
@@ -327,8 +373,10 @@
 // are loaded directly otherwise the property is loaded from the properties
 // fixed array.
 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
-                                            Register dst, Register src,
-                                            JSObject* holder, int index) {
+                                            Register dst,
+                                            Register src,
+                                            Handle<JSObject> holder,
+                                            int index) {
   // Adjust for the number of properties stored in the holder.
   index -= holder->map()->inobject_properties();
   if (index < 0) {
@@ -486,11 +534,11 @@
   CallInterceptorCompiler(StubCompiler* stub_compiler,
                           const ParameterCount& arguments,
                           Register name,
-                          Code::ExtraICState extra_ic_state)
+                          Code::ExtraICState extra_state)
       : stub_compiler_(stub_compiler),
         arguments_(arguments),
         name_(name),
-        extra_ic_state_(extra_ic_state) {}
+        extra_state_(extra_state) {}
 
   MaybeObject* Compile(MacroAssembler* masm,
                        JSObject* object,
@@ -614,7 +662,7 @@
           GenerateFastApiCall(masm, optimization, arguments_.immediate());
       if (result->IsFailure()) return result;
     } else {
-      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+      CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
           ? CALL_AS_FUNCTION
           : CALL_AS_METHOD;
       __ InvokeFunction(optimization.constant_function(), arguments_,
@@ -700,21 +748,16 @@
   StubCompiler* stub_compiler_;
   const ParameterCount& arguments_;
   Register name_;
-  Code::ExtraICState extra_ic_state_;
+  Code::ExtraICState extra_state_;
 };
 
 
 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
-  Code* code = NULL;
-  if (kind == Code::LOAD_IC) {
-    code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
-  } else {
-    code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
-  }
-
-  Handle<Code> ic(code);
-  __ jmp(ic, RelocInfo::CODE_TARGET);
+  Handle<Code> code = (kind == Code::LOAD_IC)
+      ? masm->isolate()->builtins()->LoadIC_Miss()
+      : masm->isolate()->builtins()->KeyedLoadIC_Miss();
+  __ jmp(code, RelocInfo::CODE_TARGET);
 }
 
 
@@ -729,9 +772,9 @@
 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
 // but may be destroyed if store is successful.
 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
-                                      JSObject* object,
+                                      Handle<JSObject> object,
                                       int index,
-                                      Map* transition,
+                                      Handle<Map> transition,
                                       Register receiver_reg,
                                       Register name_reg,
                                       Register scratch,
@@ -754,12 +797,12 @@
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
 
   // Perform map transition for the receiver if necessary.
-  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
+  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
     // The properties must be extended before we can store the value.
     // We jump to a runtime call that extends the properties array.
     __ pop(scratch);  // Return address.
     __ push(receiver_reg);
-    __ push(Immediate(Handle<Map>(transition)));
+    __ push(Immediate(transition));
     __ push(eax);
     __ push(scratch);
     __ TailCallExternalReference(
@@ -770,11 +813,11 @@
     return;
   }
 
-  if (transition != NULL) {
+  if (!transition.is_null()) {
     // Update the map of the object; no write barrier updating is
     // needed because the map is never in new space.
     __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset),
-           Immediate(Handle<Map>(transition)));
+           Immediate(transition));
   }
 
   // Adjust for the number of properties stored in the object. Even in the
@@ -820,7 +863,29 @@
 // Generate code to check that a global property cell is empty. Create
 // the property cell at compilation time if no cell exists for the
 // property.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
+static void GenerateCheckPropertyCell(MacroAssembler* masm,
+                                      Handle<GlobalObject> global,
+                                      Handle<String> name,
+                                      Register scratch,
+                                      Label* miss) {
+  Handle<JSGlobalPropertyCell> cell =
+      GlobalObject::EnsurePropertyCell(global, name);
+  ASSERT(cell->value()->IsTheHole());
+  Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
+  if (Serializer::enabled()) {
+    __ mov(scratch, Immediate(cell));
+    __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
+           Immediate(the_hole));
+  } else {
+    __ cmp(Operand::Cell(cell), Immediate(the_hole));
+  }
+  __ j(not_equal, miss);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell(
     MacroAssembler* masm,
     GlobalObject* global,
     String* name,
@@ -847,7 +912,29 @@
 
 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
 // from object to (but not including) holder.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
+static void GenerateCheckPropertyCells(MacroAssembler* masm,
+                                       Handle<JSObject> object,
+                                       Handle<JSObject> holder,
+                                       Handle<String> name,
+                                       Register scratch,
+                                       Label* miss) {
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    if (current->IsGlobalObject()) {
+      GenerateCheckPropertyCell(masm,
+                                Handle<GlobalObject>::cast(current),
+                                name,
+                                scratch,
+                                miss);
+    }
+    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
+  }
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells(
     MacroAssembler* masm,
     JSObject* object,
     JSObject* holder,
@@ -858,7 +945,7 @@
   while (current != holder) {
     if (current->IsGlobalObject()) {
       // Returns a cell or a failure.
-      MaybeObject* result = GenerateCheckPropertyCell(
+      MaybeObject* result = TryGenerateCheckPropertyCell(
           masm,
           GlobalObject::cast(current),
           name,
@@ -877,6 +964,120 @@
 #define __ ACCESS_MASM(masm())
 
 
+Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
+                                       Register object_reg,
+                                       Handle<JSObject> holder,
+                                       Register holder_reg,
+                                       Register scratch1,
+                                       Register scratch2,
+                                       Handle<String> name,
+                                       int save_at_depth,
+                                       Label* miss) {
+  // Make sure there's no overlap between holder and object registers.
+  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
+  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
+         && !scratch2.is(scratch1));
+
+  // Keep track of the current object in register reg.
+  Register reg = object_reg;
+  Handle<JSObject> current = object;
+  int depth = 0;
+
+  if (save_at_depth == depth) {
+    __ mov(Operand(esp, kPointerSize), reg);
+  }
+
+  // Traverse the prototype chain and check the maps in the prototype chain for
+  // fast and global objects or do negative lookup for normal objects.
+  while (!current.is_identical_to(holder)) {
+    ++depth;
+
+    // Only global objects and objects that do not require access
+    // checks are allowed in stubs.
+    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
+
+    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
+    if (!current->HasFastProperties() &&
+        !current->IsJSGlobalObject() &&
+        !current->IsJSGlobalProxy()) {
+      if (!name->IsSymbol()) {
+        name = factory()->LookupSymbol(name);
+      }
+      ASSERT(current->property_dictionary()->FindEntry(*name) ==
+             StringDictionary::kNotFound);
+
+      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
+                                       scratch1, scratch2);
+
+      __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+      __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
+    } else {
+      bool in_new_space = heap()->InNewSpace(*prototype);
+      Handle<Map> current_map(current->map());
+      if (in_new_space) {
+        // Save the map in scratch1 for later.
+        __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
+        __ cmp(scratch1, Immediate(current_map));
+      } else {
+        __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
+               Immediate(current_map));
+      }
+      // Branch on the result of the map check.
+      __ j(not_equal, miss);
+      // Check access rights to the global object.  This has to happen after
+      // the map check so that we know that the object is actually a global
+      // object.
+      if (current->IsJSGlobalProxy()) {
+        __ CheckAccessGlobalProxy(reg, scratch2, miss);
+      }
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+
+      if (in_new_space) {
+        // The prototype is in new space; we cannot store a reference to it
+        // in the code.  Load it from the map.
+        __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
+      } else {
+        // The prototype is in old space; load it directly.
+        __ mov(reg, prototype);
+      }
+    }
+
+    if (save_at_depth == depth) {
+      __ mov(Operand(esp, kPointerSize), reg);
+    }
+
+    // Go to the next object in the prototype chain.
+    current = prototype;
+  }
+  ASSERT(current.is_identical_to(holder));
+
+  // Log the check depth.
+  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
+
+  // Check the holder map.
+  __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
+         Immediate(Handle<Map>(holder->map())));
+  __ j(not_equal, miss);
+
+  // Perform security check for access to the global object.
+  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
+  if (holder->IsJSGlobalProxy()) {
+    __ CheckAccessGlobalProxy(reg, scratch1, miss);
+  }
+
+  // If we've skipped any global objects, it's not enough to verify that
+  // their maps haven't changed.  We also need to check that the property
+  // cell for the property is still empty.
+  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
+
+  // Return the register containing the holder.
+  return reg;
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
 Register StubCompiler::CheckPrototypes(JSObject* object,
                                        Register object_reg,
                                        JSObject* holder,
@@ -926,12 +1127,9 @@
       ASSERT(current->property_dictionary()->FindEntry(name) ==
              StringDictionary::kNotFound);
 
-      MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
-                                                                      miss,
-                                                                      reg,
-                                                                      name,
-                                                                      scratch1,
-                                                                      scratch2);
+      MaybeObject* negative_lookup =
+          TryGenerateDictionaryNegativeLookup(masm(), miss, reg, name,
+                                              scratch1, scratch2);
       if (negative_lookup->IsFailure()) {
         set_failure(Failure::cast(negative_lookup));
         return reg;
@@ -998,17 +1196,17 @@
   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   if (holder->IsJSGlobalProxy()) {
     __ CheckAccessGlobalProxy(reg, scratch1, miss);
-  };
+  }
 
   // If we've skipped any global objects, it's not enough to verify
   // that their maps haven't changed.  We also need to check that the
   // property cell for the property is still empty.
-  MaybeObject* result = GenerateCheckPropertyCells(masm(),
-                                                   object,
-                                                   holder,
-                                                   name,
-                                                   scratch1,
-                                                   miss);
+  MaybeObject* result = TryGenerateCheckPropertyCells(masm(),
+                                                      object,
+                                                      holder,
+                                                      name,
+                                                      scratch1,
+                                                      miss);
   if (result->IsFailure()) set_failure(Failure::cast(result));
 
   // Return the register containing the holder.
@@ -1016,22 +1214,21 @@
 }
 
 
-void StubCompiler::GenerateLoadField(JSObject* object,
-                                     JSObject* holder,
+void StubCompiler::GenerateLoadField(Handle<JSObject> object,
+                                     Handle<JSObject> holder,
                                      Register receiver,
                                      Register scratch1,
                                      Register scratch2,
                                      Register scratch3,
                                      int index,
-                                     String* name,
+                                     Handle<String> name,
                                      Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss);
 
   // Check the prototype chain.
-  Register reg =
-      CheckPrototypes(object, receiver, holder,
-                      scratch1, scratch2, scratch3, name, miss);
+  Register reg = CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
 
   // Get the value from the properties.
   GenerateFastPropertyLoad(masm(), eax, reg, holder, index);
@@ -1106,24 +1303,24 @@
 }
 
 
-void StubCompiler::GenerateLoadConstant(JSObject* object,
-                                        JSObject* holder,
+void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
+                                        Handle<JSObject> holder,
                                         Register receiver,
                                         Register scratch1,
                                         Register scratch2,
                                         Register scratch3,
-                                        Object* value,
-                                        String* name,
+                                        Handle<Object> value,
+                                        Handle<String> name,
                                         Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss);
 
   // Check that the maps haven't changed.
-  CheckPrototypes(object, receiver, holder,
-                  scratch1, scratch2, scratch3, name, miss);
+  CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
 
   // Return the constant value.
-  __ mov(eax, Handle<Object>(value));
+  __ mov(eax, value);
   __ ret(0);
 }
 
@@ -1223,7 +1420,8 @@
       // We found FIELD property in prototype chain of interceptor's holder.
       // Retrieve a field from field's holder.
       GenerateFastPropertyLoad(masm(), eax, holder_reg,
-                               lookup->holder(), lookup->GetFieldIndex());
+                               Handle<JSObject>(lookup->holder()),
+                               lookup->GetFieldIndex());
       __ ret(0);
     } else {
       // We found CALLBACKS property in prototype chain of interceptor's
@@ -1270,9 +1468,9 @@
 }
 
 
-void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
+void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
   if (kind_ == Code::KEYED_CALL_IC) {
-    __ cmp(ecx, Immediate(Handle<String>(name)));
+    __ cmp(ecx, Immediate(name));
     __ j(not_equal, miss);
   }
 }
@@ -1335,11 +1533,22 @@
 }
 
 
-MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj =
+void CallStubCompiler::GenerateMissBranch() {
+  Handle<Code> code =
       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
                                                kind_,
-                                               extra_ic_state_);
+                                               extra_state_);
+  __ jmp(code, RelocInfo::CODE_TARGET);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* CallStubCompiler::TryGenerateMissBranch() {
+  MaybeObject* maybe_obj =
+      isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(),
+                                                  kind_,
+                                                  extra_state_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1347,11 +1556,10 @@
 }
 
 
-MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField(
-    JSObject* object,
-    JSObject* holder,
-    int index,
-    String* name) {
+Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
+                                                int index,
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -1389,7 +1597,7 @@
   }
 
   // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
@@ -1397,8 +1605,7 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
-  if (maybe_result->IsFailure()) return maybe_result;
+  GenerateMissBranch();
 
   // Return the generated code.
   return GetCode(FIELD, name);
@@ -1425,7 +1632,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -1513,8 +1720,8 @@
       // the new element is non-Smi. For now, delegate to the builtin.
       Label no_fast_elements_check;
       __ JumpIfSmi(edi, &no_fast_elements_check);
-      __ mov(esi, FieldOperand(edx, HeapObject::kMapOffset));
-      __ CheckFastObjectElements(esi, &call_builtin, Label::kFar);
+      __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
+      __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
       __ bind(&no_fast_elements_check);
 
       // We could be lucky and the elements array could be at the top of
@@ -1582,11 +1789,11 @@
   }
 
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1610,7 +1817,7 @@
 
   Label miss, return_undefined, call_builtin;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -1665,11 +1872,11 @@
       1);
 
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1700,12 +1907,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1751,11 +1958,11 @@
   // Restore function name in ecx.
   __ Set(ecx, Immediate(Handle<String>(name)));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1786,12 +1993,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1839,11 +2046,11 @@
   // Restore function name in ecx.
   __ Set(ecx, Immediate(Handle<String>(name)));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1870,7 +2077,7 @@
   }
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ mov(edx, Operand(esp, 2 * kPointerSize));
@@ -1908,7 +2115,7 @@
   // Tail call the full function. We do not have to patch the receiver
   // because the function makes no use of it.
   __ bind(&slow);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
@@ -1916,11 +2123,11 @@
 
   __ bind(&miss);
   // ecx: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -1952,7 +2159,7 @@
   }
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ mov(edx, Operand(esp, 2 * kPointerSize));
@@ -2045,11 +2252,11 @@
 
   __ bind(&miss);
   // ecx: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2075,7 +2282,7 @@
   }
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ mov(edx, Operand(esp, 2 * kPointerSize));
@@ -2149,11 +2356,11 @@
 
   __ bind(&miss);
   // ecx: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2176,7 +2383,7 @@
 
   Label miss, miss_before_stack_reserved;
 
-  GenerateNameCheck(name, &miss_before_stack_reserved);
+  GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2210,11 +2417,11 @@
   __ add(esp, Immediate(kFastApiCallArguments * kPointerSize));
 
   __ bind(&miss_before_stack_reserved);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2243,7 +2450,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2339,7 +2546,7 @@
       UNREACHABLE();
   }
 
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
@@ -2347,11 +2554,11 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2367,18 +2574,18 @@
   // -----------------------------------
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
   // Get the receiver from the stack.
   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
 
-  CallInterceptorCompiler compiler(this, arguments(), ecx, extra_ic_state_);
+  CallInterceptorCompiler compiler(this, arguments(), ecx, extra_state_);
   MaybeObject* result = compiler.Compile(masm(),
                                          object,
                                          holder,
@@ -2408,7 +2615,7 @@
 
   // Invoke the function.
   __ mov(edi, eax);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
@@ -2416,11 +2623,11 @@
 
   // Handle load cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
@@ -2449,7 +2656,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
@@ -2470,40 +2677,32 @@
   // Jump to the cached code (tail call).
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1);
-  ASSERT(function->is_compiled());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
-                  expected, arguments(), JUMP_FUNCTION,
-                  NullCallWrapper(), call_kind);
-  } else {
-    Handle<Code> code(function->code());
-    __ InvokeCode(code, expected, arguments(),
-                  RelocInfo::CODE_TARGET, JUMP_FUNCTION,
-                  NullCallWrapper(), call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
+                expected, arguments(), JUMP_FUNCTION,
+                NullCallWrapper(), call_kind);
 
   // Handle call cache miss.
   __ bind(&miss);
   __ IncrementCounter(counters->call_global_inline_miss(), 1);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(NORMAL, name);
+  return TryGetCode(NORMAL, name);
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
+Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
                                                   int index,
-                                                  Map* transition,
-                                                  String* name) {
+                                                  Handle<Map> transition,
+                                                  Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : name
@@ -2513,27 +2712,23 @@
   Label miss;
 
   // Generate store field code.  Trashes the name register.
-  GenerateStoreField(masm(),
-                     object,
-                     index,
-                     transition,
-                     edx, ecx, ebx,
-                     &miss);
+  GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
 
   // Handle store cache miss.
   __ bind(&miss);
-  __ mov(ecx, Immediate(Handle<String>(name)));  // restore name
+  __ mov(ecx, Immediate(name));  // restore name
   Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
-                                                     AccessorInfo* callback,
-                                                     String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreCallback(
+    Handle<JSObject> object,
+    Handle<AccessorInfo> callback,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : name
@@ -2561,7 +2756,7 @@
 
   __ pop(ebx);  // remove the return address
   __ push(edx);  // receiver
-  __ push(Immediate(Handle<AccessorInfo>(callback)));  // callback info
+  __ push(Immediate(callback));  // callback info
   __ push(ecx);  // name
   __ push(eax);  // value
   __ push(ebx);  // restore return address
@@ -2581,8 +2776,9 @@
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
-                                                        String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
+    Handle<JSObject> receiver,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : name
@@ -2630,9 +2826,10 @@
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
-                                                   JSGlobalPropertyCell* cell,
-                                                   String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreGlobal(
+    Handle<GlobalObject> object,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : name
@@ -2647,7 +2844,7 @@
   __ j(not_equal, &miss);
 
   // Compute the cell operand to use.
-  __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
+  __ mov(ebx, Immediate(cell));
   Operand cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset);
 
   // Check that the value in the cell is not the hole. If it is, this
@@ -2691,10 +2888,10 @@
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
+Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
                                                        int index,
-                                                       Map* transition,
-                                                       String* name) {
+                                                       Handle<Map> transition,
+                                                       Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : key
@@ -2707,16 +2904,11 @@
   __ IncrementCounter(counters->keyed_store_field(), 1);
 
   // Check that the name has not changed.
-  __ cmp(ecx, Immediate(Handle<String>(name)));
+  __ cmp(ecx, Immediate(name));
   __ j(not_equal, &miss);
 
   // Generate store field code.  Trashes the name register.
-  GenerateStoreField(masm(),
-                     object,
-                     index,
-                     transition,
-                     edx, ecx, ebx,
-                     &miss);
+  GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
 
   // Handle store cache miss.
   __ bind(&miss);
@@ -2725,40 +2917,37 @@
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
+Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
+    Handle<Map> receiver_map) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : key
   //  -- edx    : receiver
   //  -- esp[0] : return address
   // -----------------------------------
-  Code* stub;
   ElementsKind elements_kind = receiver_map->elements_kind();
   bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
-  MaybeObject* maybe_stub =
-      KeyedStoreElementStub(is_jsarray, elements_kind).TryGetCode();
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  __ DispatchMap(edx,
-                 Handle<Map>(receiver_map),
-                 Handle<Code>(stub),
-                 DO_SMI_CHECK);
+  Handle<Code> stub =
+      KeyedStoreElementStub(is_jsarray, elements_kind).GetCode();
+
+  __ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
 
   Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, factory()->empty_string());
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic(
-    MapList* receiver_maps,
-    CodeList* handler_stubs,
-    MapList* transitioned_maps) {
+Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
+    MapHandleList* receiver_maps,
+    CodeHandleList* handler_stubs,
+    MapHandleList* transitioned_maps) {
   // ----------- S t a t e -------------
   //  -- eax    : value
   //  -- ecx    : key
@@ -2770,15 +2959,14 @@
   __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
   // ebx: receiver->map().
   for (int i = 0; i < receiver_maps->length(); ++i) {
-    Handle<Map> map(receiver_maps->at(i));
-    __ cmp(edi, map);
-    if (transitioned_maps->at(i) == NULL) {
-      __ j(equal, Handle<Code>(handler_stubs->at(i)));
+    __ cmp(edi, receiver_maps->at(i));
+    if (transitioned_maps->at(i).is_null()) {
+      __ j(equal, handler_stubs->at(i));
     } else {
       Label next_map;
       __ j(not_equal, &next_map, Label::kNear);
-      __ mov(ebx, Immediate(Handle<Map>(transitioned_maps->at(i))));
-      __ jmp(Handle<Code>(handler_stubs->at(i)), RelocInfo::CODE_TARGET);
+      __ mov(ebx, Immediate(transitioned_maps->at(i)));
+      __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
       __ bind(&next_map);
     }
   }
@@ -2787,13 +2975,13 @@
   __ jmp(miss_ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
-                                                      JSObject* object,
-                                                      JSObject* last) {
+Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
+                                                      Handle<JSObject> object,
+                                                      Handle<JSObject> last) {
   // ----------- S t a t e -------------
   //  -- eax    : receiver
   //  -- ecx    : name
@@ -2814,15 +3002,8 @@
   // If the last object in the prototype chain is a global object,
   // check that the global property cell is empty.
   if (last->IsGlobalObject()) {
-    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
-                                                  GlobalObject::cast(last),
-                                                  name,
-                                                  edx,
-                                                  &miss);
-    if (cell->IsFailure()) {
-      miss.Unuse();
-      return cell;
-    }
+    GenerateCheckPropertyCell(
+        masm(), Handle<GlobalObject>::cast(last), name, edx, &miss);
   }
 
   // Return undefined if maps of the full prototype chain are still the
@@ -2834,14 +3015,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, isolate()->heap()->empty_string());
+  return GetCode(NONEXISTENT, factory()->empty_string());
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : receiver
   //  -- ecx    : name
@@ -2880,14 +3061,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
-                                                   JSObject* holder,
-                                                   Object* value,
-                                                   String* name) {
+Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
+                                                   Handle<JSObject> holder,
+                                                   Handle<Object> value,
+                                                   Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : receiver
   //  -- ecx    : name
@@ -2914,7 +3095,7 @@
   // -----------------------------------
   Label miss;
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
   // TODO(368): Compile in the whole chain: all the interceptors in
@@ -2934,15 +3115,16 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
-                                                 GlobalObject* holder,
-                                                 JSGlobalPropertyCell* cell,
-                                                 String* name,
-                                                 bool is_dont_delete) {
+Handle<Code> LoadStubCompiler::CompileLoadGlobal(
+    Handle<JSObject> object,
+    Handle<GlobalObject> holder,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<String> name,
+    bool is_dont_delete) {
   // ----------- S t a t e -------------
   //  -- eax    : receiver
   //  -- ecx    : name
@@ -2953,7 +3135,7 @@
   // If the object is the holder then we know that it's a global
   // object which can only happen for contextual loads. In this case,
   // the receiver cannot be a smi.
-  if (object != holder) {
+  if (!object.is_identical_to(holder)) {
     __ JumpIfSmi(eax, &miss);
   }
 
@@ -2962,10 +3144,10 @@
 
   // Get the value from the cell.
   if (Serializer::enabled()) {
-    __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
+    __ mov(ebx, Immediate(cell));
     __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
   } else {
-    __ mov(ebx, Operand::Cell(Handle<JSGlobalPropertyCell>(cell)));
+    __ mov(ebx, Operand::Cell(cell));
   }
 
   // Check for deleted property if property can actually be deleted.
@@ -2991,9 +3173,9 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
-                                                     JSObject* receiver,
-                                                     JSObject* holder,
+Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
+                                                     Handle<JSObject> receiver,
+                                                     Handle<JSObject> holder,
                                                      int index) {
   // ----------- S t a t e -------------
   //  -- eax    : key
@@ -3006,7 +3188,7 @@
   __ IncrementCounter(counters->keyed_load_field(), 1);
 
   // Check that the name has not changed.
-  __ cmp(eax, Immediate(Handle<String>(name)));
+  __ cmp(eax, Immediate(name));
   __ j(not_equal, &miss);
 
   GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
@@ -3052,14 +3234,15 @@
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
-                                                        JSObject* receiver,
-                                                        JSObject* holder,
-                                                        Object* value) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
+    Handle<String> name,
+    Handle<JSObject> receiver,
+    Handle<JSObject> holder,
+    Handle<Object> value) {
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
@@ -3071,11 +3254,11 @@
   __ IncrementCounter(counters->keyed_load_constant_function(), 1);
 
   // Check that the name has not changed.
-  __ cmp(eax, Immediate(Handle<String>(name)));
+  __ cmp(eax, Immediate(name));
   __ j(not_equal, &miss);
 
-  GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
-                       value, name, &miss);
+  GenerateLoadConstant(
+      receiver, holder, edx, ebx, ecx, edi, value, name, &miss);
   __ bind(&miss);
   __ DecrementCounter(counters->keyed_load_constant_function(), 1);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
@@ -3102,7 +3285,7 @@
   __ cmp(eax, Immediate(Handle<String>(name)));
   __ j(not_equal, &miss);
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
   GenerateLoadInterceptor(receiver,
                           holder,
@@ -3119,11 +3302,12 @@
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
@@ -3135,7 +3319,7 @@
   __ IncrementCounter(counters->keyed_load_array_length(), 1);
 
   // Check that the name has not changed.
-  __ cmp(eax, Immediate(Handle<String>(name)));
+  __ cmp(eax, Immediate(name));
   __ j(not_equal, &miss);
 
   GenerateLoadArrayLength(masm(), edx, ecx, &miss);
@@ -3148,7 +3332,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
@@ -3160,7 +3345,7 @@
   __ IncrementCounter(counters->keyed_load_string_length(), 1);
 
   // Check that the name has not changed.
-  __ cmp(eax, Immediate(Handle<String>(name)));
+  __ cmp(eax, Immediate(name));
   __ j(not_equal, &miss);
 
   GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
@@ -3173,7 +3358,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
@@ -3185,7 +3371,7 @@
   __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
 
   // Check that the name has not changed.
-  __ cmp(eax, Immediate(Handle<String>(name)));
+  __ cmp(eax, Immediate(name));
   __ j(not_equal, &miss);
 
   GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
@@ -3198,31 +3384,29 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
+    Handle<Map> receiver_map) {
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
   //  -- esp[0] : return address
   // -----------------------------------
-  Code* stub;
+
   ElementsKind elements_kind = receiver_map->elements_kind();
-  MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  __ DispatchMap(edx,
-                 Handle<Map>(receiver_map),
-                 Handle<Code>(stub),
-                 DO_SMI_CHECK);
+  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
+
+  __ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
 
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, factory()->empty_string());
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic(
-    MapList* receiver_maps,
-    CodeList* handler_ics) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
+    MapHandleList* receiver_maps,
+    CodeHandleList* handler_ics) {
   // ----------- S t a t e -------------
   //  -- eax    : key
   //  -- edx    : receiver
@@ -3235,16 +3419,15 @@
   __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset));
   int receiver_count = receiver_maps->length();
   for (int current = 0; current < receiver_count; ++current) {
-    Handle<Map> map(receiver_maps->at(current));
-    __ cmp(map_reg, map);
-    __ j(equal, Handle<Code>(handler_ics->at(current)));
+    __ cmp(map_reg, receiver_maps->at(current));
+    __ j(equal, handler_ics->at(current));
   }
 
   __ bind(&miss);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
 }
 
 
diff --git a/src/ic.cc b/src/ic.cc
index d5056a9..fbe77b0 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -100,7 +100,11 @@
     PrintF("]\n");
   }
 }
-#endif
+#endif  // DEBUG
+
+
+#define TRACE_IC(type, name, old_state, new_target)             \
+  ASSERT((TraceIC(type, name, old_state, new_target), true))
 
 
 IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate) {
@@ -368,15 +372,13 @@
 }
 
 
-static void LookupForRead(Object* object,
-                          String* name,
+static void LookupForRead(Handle<Object> object,
+                          Handle<String> name,
                           LookupResult* lookup) {
-  AssertNoAllocation no_gc;  // pointers must stay valid
-
   // Skip all the objects with named interceptors, but
   // without actual getter.
   while (true) {
-    object->Lookup(name, lookup);
+    object->Lookup(*name, lookup);
     // Besides normal conditions (property not found or it's not
     // an interceptor), bail out if lookup is not cacheable: we won't
     // be able to IC it anyway and regular lookup should work fine.
@@ -386,18 +388,18 @@
       return;
     }
 
-    JSObject* holder = lookup->holder();
-    if (HasInterceptorGetter(holder)) {
+    Handle<JSObject> holder(lookup->holder());
+    if (HasInterceptorGetter(*holder)) {
       return;
     }
 
-    holder->LocalLookupRealNamedProperty(name, lookup);
+    holder->LocalLookupRealNamedProperty(*name, lookup);
     if (lookup->IsProperty()) {
       ASSERT(lookup->type() != INTERCEPTOR);
       return;
     }
 
-    Object* proto = holder->GetPrototype();
+    Handle<Object> proto(holder->GetPrototype());
     if (proto->IsNull()) {
       lookup->NotFound();
       return;
@@ -408,28 +410,29 @@
 }
 
 
-Object* CallICBase::TryCallAsFunction(Object* object) {
-  HandleScope scope(isolate());
-  Handle<Object> target(object, isolate());
-  Handle<Object> delegate = Execution::GetFunctionDelegate(target);
+Handle<Object> CallICBase::TryCallAsFunction(Handle<Object> object) {
+  Handle<Object> delegate = Execution::GetFunctionDelegate(object);
 
-  if (delegate->IsJSFunction()) {
+  if (delegate->IsJSFunction() && !object->IsJSFunctionProxy()) {
     // Patch the receiver and use the delegate as the function to
-    // invoke. This is used for invoking objects as if they were
-    // functions.
-    const int argc = this->target()->arguments_count();
+    // invoke. This is used for invoking objects as if they were functions.
+    const int argc = target()->arguments_count();
     StackFrameLocator locator;
     JavaScriptFrame* frame = locator.FindJavaScriptFrame(0);
     int index = frame->ComputeExpressionsCount() - (argc + 1);
-    frame->SetExpression(index, *target);
+    frame->SetExpression(index, *object);
   }
 
-  return *delegate;
+  return delegate;
 }
 
 
 void CallICBase::ReceiverToObjectIfRequired(Handle<Object> callee,
                                             Handle<Object> object) {
+  while (callee->IsJSFunctionProxy()) {
+    callee = Handle<Object>(JSFunctionProxy::cast(*callee)->call_trap());
+  }
+
   if (callee->IsJSFunction()) {
     Handle<JSFunction> function = Handle<JSFunction>::cast(callee);
     if (function->shared()->strict_mode() || function->IsBuiltin()) {
@@ -464,31 +467,27 @@
   // the element if so.
   uint32_t index;
   if (name->AsArrayIndex(&index)) {
-    Object* result;
-    { MaybeObject* maybe_result = object->GetElement(index);
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-
-    if (result->IsJSFunction()) return result;
+    Handle<Object> result = Object::GetElement(object, index);
+    RETURN_IF_EMPTY_HANDLE(isolate(), result);
+    if (result->IsJSFunction()) return *result;
 
     // Try to find a suitable function delegate for the object at hand.
     result = TryCallAsFunction(result);
-    if (result->IsJSFunction()) return result;
+    if (result->IsJSFunction()) return *result;
 
     // Otherwise, it will fail in the lookup step.
   }
 
   // Lookup the property in the object.
-  LookupResult lookup;
-  LookupForRead(*object, *name, &lookup);
+  LookupResult lookup(isolate());
+  LookupForRead(object, name, &lookup);
 
   if (!lookup.IsProperty()) {
     // If the object does not have the requested property, check which
     // exception we need to throw.
-    if (IsContextual(object)) {
-      return ReferenceError("not_defined", name);
-    }
-    return TypeError("undefined_method", object, name);
+    return IsContextual(object)
+        ? ReferenceError("not_defined", name)
+        : TypeError("undefined_method", object, name);
   }
 
   // Lookup is valid: Update inline cache and stub cache.
@@ -498,53 +497,42 @@
 
   // Get the property.
   PropertyAttributes attr;
-  Object* result;
-  { MaybeObject* maybe_result =
-        object->GetProperty(*object, &lookup, *name, &attr);
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  Handle<Object> result =
+      Object::GetProperty(object, object, &lookup, name, &attr);
+  RETURN_IF_EMPTY_HANDLE(isolate(), result);
 
-  if (lookup.type() == INTERCEPTOR) {
+  if (lookup.type() == INTERCEPTOR && attr == ABSENT) {
     // If the object does not have the requested property, check which
     // exception we need to throw.
-    if (attr == ABSENT) {
-      if (IsContextual(object)) {
-        return ReferenceError("not_defined", name);
-      }
-      return TypeError("undefined_method", object, name);
-    }
+    return IsContextual(object)
+        ? ReferenceError("not_defined", name)
+        : TypeError("undefined_method", object, name);
   }
 
   ASSERT(!result->IsTheHole());
 
-  HandleScope scope(isolate());
-  // Wrap result in a handle because ReceiverToObjectIfRequired may allocate
-  // new object and cause GC.
-  Handle<Object> result_handle(result);
   // Make receiver an object if the callee requires it. Strict mode or builtin
   // functions do not wrap the receiver, non-strict functions and objects
   // called as functions do.
-  ReceiverToObjectIfRequired(result_handle, object);
+  ReceiverToObjectIfRequired(result, object);
 
-  if (result_handle->IsJSFunction()) {
+  if (result->IsJSFunction()) {
+    Handle<JSFunction> function = Handle<JSFunction>::cast(result);
 #ifdef ENABLE_DEBUGGER_SUPPORT
     // Handle stepping into a function if step into is active.
     Debug* debug = isolate()->debug();
     if (debug->StepInActive()) {
       // Protect the result in a handle as the debugger can allocate and might
       // cause GC.
-      Handle<JSFunction> function(JSFunction::cast(*result_handle), isolate());
       debug->HandleStepIn(function, object, fp(), false);
-      return *function;
     }
 #endif
-
-    return *result_handle;
+    return *function;
   }
 
   // Try to find a suitable function delegate for the object at hand.
-  result_handle = Handle<Object>(TryCallAsFunction(*result_handle));
-  if (result_handle->IsJSFunction()) return *result_handle;
+  result = TryCallAsFunction(result);
+  if (result->IsJSFunction()) return *result;
 
   return TypeError("property_not_function", object, name);
 }
@@ -594,89 +582,57 @@
 }
 
 
-MaybeObject* CallICBase::ComputeMonomorphicStub(
-    LookupResult* lookup,
-    State state,
-    Code::ExtraICState extra_ic_state,
-    Handle<Object> object,
-    Handle<String> name) {
+Handle<Code> CallICBase::ComputeMonomorphicStub(LookupResult* lookup,
+                                                State state,
+                                                Code::ExtraICState extra_state,
+                                                Handle<Object> object,
+                                                Handle<String> name) {
   int argc = target()->arguments_count();
-  MaybeObject* maybe_code = NULL;
+  Handle<JSObject> holder(lookup->holder());
   switch (lookup->type()) {
     case FIELD: {
       int index = lookup->GetFieldIndex();
-      maybe_code = isolate()->stub_cache()->ComputeCallField(argc,
-                                                             kind_,
-                                                             extra_ic_state,
-                                                             *name,
-                                                             *object,
-                                                             lookup->holder(),
-                                                             index);
-      break;
+      return isolate()->stub_cache()->ComputeCallField(
+          argc, kind_, extra_state, name, object, holder, index);
     }
     case CONSTANT_FUNCTION: {
       // Get the constant function and compute the code stub for this
       // call; used for rewriting to monomorphic state and making sure
       // that the code stub is in the stub cache.
-      JSFunction* function = lookup->GetConstantFunction();
-      maybe_code =
-          isolate()->stub_cache()->ComputeCallConstant(argc,
-                                                       kind_,
-                                                       extra_ic_state,
-                                                       *name,
-                                                       *object,
-                                                       lookup->holder(),
-                                                       function);
-      break;
+      Handle<JSFunction> function(lookup->GetConstantFunction());
+      return isolate()->stub_cache()->ComputeCallConstant(
+          argc, kind_, extra_state, name, object, holder, function);
     }
     case NORMAL: {
-      if (!object->IsJSObject()) return NULL;
+      // If we return a null handle, the IC will not be patched.
+      if (!object->IsJSObject()) return Handle<Code>::null();
       Handle<JSObject> receiver = Handle<JSObject>::cast(object);
 
-      if (lookup->holder()->IsGlobalObject()) {
-        GlobalObject* global = GlobalObject::cast(lookup->holder());
-        JSGlobalPropertyCell* cell =
-            JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
-        if (!cell->value()->IsJSFunction()) return NULL;
-        JSFunction* function = JSFunction::cast(cell->value());
-        maybe_code = isolate()->stub_cache()->ComputeCallGlobal(argc,
-                                                                kind_,
-                                                                extra_ic_state,
-                                                                *name,
-                                                                *receiver,
-                                                                global,
-                                                                cell,
-                                                                function);
+      if (holder->IsGlobalObject()) {
+        Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder);
+        Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup));
+        if (!cell->value()->IsJSFunction()) return Handle<Code>::null();
+        Handle<JSFunction> function(JSFunction::cast(cell->value()));
+        return isolate()->stub_cache()->ComputeCallGlobal(
+            argc, kind_, extra_state, name, receiver, global, cell, function);
       } else {
         // There is only one shared stub for calling normalized
         // properties. It does not traverse the prototype chain, so the
         // property must be found in the receiver for the stub to be
         // applicable.
-        if (lookup->holder() != *receiver) return NULL;
-        maybe_code = isolate()->stub_cache()->ComputeCallNormal(argc,
-                                                                kind_,
-                                                                extra_ic_state,
-                                                                *name,
-                                                                *receiver);
+        if (!holder.is_identical_to(receiver)) return Handle<Code>::null();
+        return isolate()->stub_cache()->ComputeCallNormal(
+            argc, kind_, extra_state);
       }
       break;
     }
-    case INTERCEPTOR: {
-      ASSERT(HasInterceptorGetter(lookup->holder()));
-      maybe_code = isolate()->stub_cache()->ComputeCallInterceptor(
-          argc,
-          kind_,
-          extra_ic_state,
-          *name,
-          *object,
-          lookup->holder());
-      break;
-    }
+    case INTERCEPTOR:
+      ASSERT(HasInterceptorGetter(*holder));
+      return isolate()->stub_cache()->ComputeCallInterceptor(
+          argc, kind_, extra_state, name, object, holder);
     default:
-      maybe_code = NULL;
-      break;
+      return Handle<Code>::null();
   }
-  return maybe_code;
 }
 
 
@@ -698,75 +654,57 @@
 
   // Compute the number of arguments.
   int argc = target()->arguments_count();
-  MaybeObject* maybe_code = NULL;
   bool had_proto_failure = false;
+  Handle<Code> code;
   if (state == UNINITIALIZED) {
     // This is the first time we execute this inline cache.
     // Set the target to the pre monomorphic stub to delay
     // setting the monomorphic state.
-    maybe_code =
-        isolate()->stub_cache()->ComputeCallPreMonomorphic(argc,
-                                                           kind_,
-                                                           extra_ic_state);
+    code = isolate()->stub_cache()->ComputeCallPreMonomorphic(
+        argc, kind_, extra_ic_state);
   } else if (state == MONOMORPHIC) {
     if (kind_ == Code::CALL_IC &&
         TryUpdateExtraICState(lookup, object, &extra_ic_state)) {
-      maybe_code = ComputeMonomorphicStub(lookup,
-                                          state,
-                                          extra_ic_state,
-                                          object,
-                                          name);
+      code = ComputeMonomorphicStub(lookup, state, extra_ic_state,
+                                    object, name);
     } else if (kind_ == Code::CALL_IC &&
                TryRemoveInvalidPrototypeDependentStub(target(),
                                                       *object,
                                                       *name)) {
       had_proto_failure = true;
-      maybe_code = ComputeMonomorphicStub(lookup,
-                                          state,
-                                          extra_ic_state,
-                                          object,
-                                          name);
+      code = ComputeMonomorphicStub(lookup, state, extra_ic_state,
+                                    object, name);
     } else {
-      maybe_code =
-          isolate()->stub_cache()->ComputeCallMegamorphic(argc,
-                                                          kind_,
-                                                          extra_ic_state);
+      code = isolate()->stub_cache()->ComputeCallMegamorphic(
+          argc, kind_, extra_ic_state);
     }
   } else {
-    maybe_code = ComputeMonomorphicStub(lookup,
-                                        state,
-                                        extra_ic_state,
-                                        object,
-                                        name);
+    code = ComputeMonomorphicStub(lookup, state, extra_ic_state,
+                                  object, name);
   }
 
-  // If we're unable to compute the stub (not enough memory left), we
-  // simply avoid updating the caches.
-  Object* code;
-  if (maybe_code == NULL || !maybe_code->ToObject(&code)) return;
+  // If there's no appropriate stub we simply avoid updating the caches.
+  if (code.is_null()) return;
 
   // Patch the call site depending on the state of the cache.
   if (state == UNINITIALIZED ||
       state == PREMONOMORPHIC ||
       state == MONOMORPHIC ||
       state == MONOMORPHIC_PROTOTYPE_FAILURE) {
-    set_target(Code::cast(code));
+    set_target(*code);
   } else if (state == MEGAMORPHIC) {
     // Cache code holding map should be consistent with
     // GenerateMonomorphicCacheProbe. It is not the map which holds the stub.
-    Map* map = JSObject::cast(object->IsJSObject() ? *object :
-                              object->GetPrototype())->map();
-
+    Handle<JSObject> cache_object = object->IsJSObject()
+        ? Handle<JSObject>::cast(object)
+        : Handle<JSObject>(JSObject::cast(object->GetPrototype()));
     // Update the stub cache.
-    isolate()->stub_cache()->Set(*name, map, Code::cast(code));
+    isolate()->stub_cache()->Set(*name, cache_object->map(), *code);
   }
 
-  USE(had_proto_failure);
-#ifdef DEBUG
   if (had_proto_failure) state = MONOMORPHIC_PROTOTYPE_FAILURE;
-  TraceIC(kind_ == Code::CALL_IC ? "CallIC" : "KeyedCallIC",
-          name, state, target());
-#endif
+  TRACE_IC(kind_ == Code::CALL_IC ? "CallIC" : "KeyedCallIC",
+           name, state, target());
 }
 
 
@@ -786,34 +724,22 @@
 
   if (FLAG_use_ic && state != MEGAMORPHIC && object->IsHeapObject()) {
     int argc = target()->arguments_count();
-    Heap* heap = Handle<HeapObject>::cast(object)->GetHeap();
-    Map* map = heap->non_strict_arguments_elements_map();
+    Handle<Map> map =
+        isolate()->factory()->non_strict_arguments_elements_map();
     if (object->IsJSObject() &&
-        Handle<JSObject>::cast(object)->elements()->map() == map) {
-      MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallArguments(
+        Handle<JSObject>::cast(object)->elements()->map() == *map) {
+      Handle<Code> code = isolate()->stub_cache()->ComputeCallArguments(
           argc, Code::KEYED_CALL_IC);
-      Object* code;
-      if (maybe_code->ToObject(&code)) {
-        set_target(Code::cast(code));
-#ifdef DEBUG
-        TraceIC("KeyedCallIC", key, state, target());
-#endif
-      }
-    } else if (FLAG_use_ic && state != MEGAMORPHIC &&
-               !object->IsAccessCheckNeeded()) {
-      MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(
+      set_target(*code);
+      TRACE_IC("KeyedCallIC", key, state, target());
+    } else if (!object->IsAccessCheckNeeded()) {
+      Handle<Code> code = isolate()->stub_cache()->ComputeCallMegamorphic(
           argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
-      Object* code;
-      if (maybe_code->ToObject(&code)) {
-        set_target(Code::cast(code));
-#ifdef DEBUG
-        TraceIC("KeyedCallIC", key, state, target());
-#endif
-      }
+      set_target(*code);
+      TRACE_IC("KeyedCallIC", key, state, target());
     }
   }
 
-  HandleScope scope(isolate());
   Handle<Object> result = GetProperty(object, key);
   RETURN_IF_EMPTY_HANDLE(isolate(), result);
 
@@ -821,9 +747,9 @@
   // functions do not wrap the receiver, non-strict functions and objects
   // called as functions do.
   ReceiverToObjectIfRequired(result, object);
-
   if (result->IsJSFunction()) return *result;
-  result = Handle<Object>(TryCallAsFunction(*result));
+
+  result = TryCallAsFunction(result);
   if (result->IsJSFunction()) return *result;
 
   return TypeError("property_not_function", object, key);
@@ -846,53 +772,44 @@
     // the underlying string value.  See ECMA-262 15.5.5.1.
     if ((object->IsString() || object->IsStringWrapper()) &&
         name->Equals(isolate()->heap()->length_symbol())) {
-      AssertNoAllocation no_allocation;
-      Code* stub = NULL;
+      Handle<Code> stub;
       if (state == UNINITIALIZED) {
         stub = pre_monomorphic_stub();
       } else if (state == PREMONOMORPHIC) {
-        if (object->IsString()) {
-          stub = isolate()->builtins()->builtin(
-              Builtins::kLoadIC_StringLength);
-        } else {
-          stub = isolate()->builtins()->builtin(
-              Builtins::kLoadIC_StringWrapperLength);
-        }
+        stub = object->IsString()
+            ? isolate()->builtins()->LoadIC_StringLength()
+            : isolate()->builtins()->LoadIC_StringWrapperLength();
       } else if (state == MONOMORPHIC && object->IsStringWrapper()) {
-        stub = isolate()->builtins()->builtin(
-            Builtins::kLoadIC_StringWrapperLength);
+        stub = isolate()->builtins()->LoadIC_StringWrapperLength();
       } else if (state != MEGAMORPHIC) {
         stub = megamorphic_stub();
       }
-      if (stub != NULL) {
-        set_target(stub);
+      if (!stub.is_null()) {
+        set_target(*stub);
 #ifdef DEBUG
         if (FLAG_trace_ic) PrintF("[LoadIC : +#length /string]\n");
 #endif
       }
       // Get the string if we have a string wrapper object.
-      if (object->IsJSValue()) {
-        return Smi::FromInt(
-            String::cast(Handle<JSValue>::cast(object)->value())->length());
-      }
-      return Smi::FromInt(String::cast(*object)->length());
+      Handle<Object> string = object->IsJSValue()
+          ? Handle<Object>(Handle<JSValue>::cast(object)->value())
+          : object;
+      return Smi::FromInt(String::cast(*string)->length());
     }
 
     // Use specialized code for getting the length of arrays.
     if (object->IsJSArray() &&
         name->Equals(isolate()->heap()->length_symbol())) {
-      AssertNoAllocation no_allocation;
-      Code* stub = NULL;
+      Handle<Code> stub;
       if (state == UNINITIALIZED) {
         stub = pre_monomorphic_stub();
       } else if (state == PREMONOMORPHIC) {
-        stub = isolate()->builtins()->builtin(
-            Builtins::kLoadIC_ArrayLength);
+        stub = isolate()->builtins()->LoadIC_ArrayLength();
       } else if (state != MEGAMORPHIC) {
         stub = megamorphic_stub();
       }
-      if (stub != NULL) {
-        set_target(stub);
+      if (!stub.is_null()) {
+        set_target(*stub);
 #ifdef DEBUG
         if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n");
 #endif
@@ -903,23 +820,20 @@
     // Use specialized code for getting prototype of functions.
     if (object->IsJSFunction() &&
         name->Equals(isolate()->heap()->prototype_symbol()) &&
-        JSFunction::cast(*object)->should_have_prototype()) {
-      { AssertNoAllocation no_allocation;
-        Code* stub = NULL;
-        if (state == UNINITIALIZED) {
-          stub = pre_monomorphic_stub();
-        } else if (state == PREMONOMORPHIC) {
-          stub = isolate()->builtins()->builtin(
-              Builtins::kLoadIC_FunctionPrototype);
-        } else if (state != MEGAMORPHIC) {
-          stub = megamorphic_stub();
-        }
-        if (stub != NULL) {
-          set_target(stub);
+        Handle<JSFunction>::cast(object)->should_have_prototype()) {
+      Handle<Code> stub;
+      if (state == UNINITIALIZED) {
+        stub = pre_monomorphic_stub();
+      } else if (state == PREMONOMORPHIC) {
+        stub = isolate()->builtins()->LoadIC_FunctionPrototype();
+      } else if (state != MEGAMORPHIC) {
+        stub = megamorphic_stub();
+      }
+      if (!stub.is_null()) {
+        set_target(*stub);
 #ifdef DEBUG
-          if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n");
+        if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n");
 #endif
-        }
       }
       return Accessors::FunctionGetPrototype(*object, 0);
     }
@@ -931,8 +845,8 @@
   if (name->AsArrayIndex(&index)) return object->GetElement(index);
 
   // Named lookup in the object.
-  LookupResult lookup;
-  LookupForRead(*object, *name, &lookup);
+  LookupResult lookup(isolate());
+  LookupForRead(object, name, &lookup);
 
   // If we did not find a property, check if we need to throw an exception.
   if (!lookup.IsProperty()) {
@@ -951,17 +865,15 @@
   if (lookup.IsProperty() &&
       (lookup.type() == INTERCEPTOR || lookup.type() == HANDLER)) {
     // Get the property.
-    Object* result;
-    { MaybeObject* maybe_result =
-          object->GetProperty(*object, &lookup, *name, &attr);
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
+    Handle<Object> result =
+        Object::GetProperty(object, object, &lookup, name, &attr);
+    RETURN_IF_EMPTY_HANDLE(isolate(), result);
     // If the property is not present, check if we need to throw an
     // exception.
     if (attr == ABSENT && IsContextual(object)) {
       return ReferenceError("not_defined", name);
     }
-    return result;
+    return *result;
   }
 
   // Get the property.
@@ -984,128 +896,105 @@
   if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return;
 
   // Compute the code stub for this load.
-  MaybeObject* maybe_code = NULL;
-  Object* code;
+  Handle<Code> code;
   if (state == UNINITIALIZED) {
     // This is the first time we execute this inline cache.
     // Set the target to the pre monomorphic stub to delay
     // setting the monomorphic state.
-    maybe_code = pre_monomorphic_stub();
+    code = pre_monomorphic_stub();
   } else if (!lookup->IsProperty()) {
     // Nonexistent property. The result is undefined.
-    maybe_code = isolate()->stub_cache()->ComputeLoadNonexistent(*name,
-                                                                 *receiver);
+    code = isolate()->stub_cache()->ComputeLoadNonexistent(name, receiver);
   } else {
     // Compute monomorphic stub.
+    Handle<JSObject> holder(lookup->holder());
     switch (lookup->type()) {
-      case FIELD: {
-        maybe_code = isolate()->stub_cache()->ComputeLoadField(
-            *name,
-            *receiver,
-            lookup->holder(),
-            lookup->GetFieldIndex());
+      case FIELD:
+        code = isolate()->stub_cache()->ComputeLoadField(
+            name, receiver, holder, lookup->GetFieldIndex());
         break;
-      }
       case CONSTANT_FUNCTION: {
-        Object* constant = lookup->GetConstantFunction();
-        maybe_code = isolate()->stub_cache()->ComputeLoadConstant(
-            *name, *receiver, lookup->holder(), constant);
+        Handle<Object> constant(lookup->GetConstantFunction());
+        code = isolate()->stub_cache()->ComputeLoadConstant(
+            name, receiver, holder, constant);
         break;
       }
-      case NORMAL: {
-        if (lookup->holder()->IsGlobalObject()) {
-          GlobalObject* global = GlobalObject::cast(lookup->holder());
-          JSGlobalPropertyCell* cell =
-              JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
-          maybe_code = isolate()->stub_cache()->ComputeLoadGlobal(*name,
-                                                    *receiver,
-                                                    global,
-                                                    cell,
-                                                    lookup->IsDontDelete());
+      case NORMAL:
+        if (holder->IsGlobalObject()) {
+          Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder);
+          Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup));
+          code = isolate()->stub_cache()->ComputeLoadGlobal(
+              name, receiver, global, cell, lookup->IsDontDelete());
         } else {
           // There is only one shared stub for loading normalized
           // properties. It does not traverse the prototype chain, so the
           // property must be found in the receiver for the stub to be
           // applicable.
-          if (lookup->holder() != *receiver) return;
-          maybe_code = isolate()->stub_cache()->ComputeLoadNormal();
+          if (!holder.is_identical_to(receiver)) return;
+          code = isolate()->stub_cache()->ComputeLoadNormal();
         }
         break;
-      }
       case CALLBACKS: {
-        if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
-        AccessorInfo* callback =
-            AccessorInfo::cast(lookup->GetCallbackObject());
+        Handle<Object> callback_object(lookup->GetCallbackObject());
+        if (!callback_object->IsAccessorInfo()) return;
+        Handle<AccessorInfo> callback =
+            Handle<AccessorInfo>::cast(callback_object);
         if (v8::ToCData<Address>(callback->getter()) == 0) return;
-        maybe_code = isolate()->stub_cache()->ComputeLoadCallback(
-            *name, *receiver, lookup->holder(), callback);
+        code = isolate()->stub_cache()->ComputeLoadCallback(
+            name, receiver, holder, callback);
         break;
       }
-      case INTERCEPTOR: {
-        ASSERT(HasInterceptorGetter(lookup->holder()));
-        maybe_code = isolate()->stub_cache()->ComputeLoadInterceptor(
-            *name, *receiver, lookup->holder());
+      case INTERCEPTOR:
+        ASSERT(HasInterceptorGetter(*holder));
+        code = isolate()->stub_cache()->ComputeLoadInterceptor(
+            name, receiver, holder);
         break;
-      }
       default:
         return;
     }
   }
 
-  // If we're unable to compute the stub (not enough memory left), we
-  // simply avoid updating the caches.
-  if (maybe_code == NULL || !maybe_code->ToObject(&code)) return;
-
   // Patch the call site depending on the state of the cache.
-  if (state == UNINITIALIZED || state == PREMONOMORPHIC ||
+  if (state == UNINITIALIZED ||
+      state == PREMONOMORPHIC ||
       state == MONOMORPHIC_PROTOTYPE_FAILURE) {
-    set_target(Code::cast(code));
+    set_target(*code);
   } else if (state == MONOMORPHIC) {
-    set_target(megamorphic_stub());
+    set_target(*megamorphic_stub());
   } else if (state == MEGAMORPHIC) {
     // Cache code holding map should be consistent with
     // GenerateMonomorphicCacheProbe.
-    Map* map = JSObject::cast(object->IsJSObject() ? *object :
-                              object->GetPrototype())->map();
-
-    isolate()->stub_cache()->Set(*name, map, Code::cast(code));
+    isolate()->stub_cache()->Set(*name, receiver->map(), *code);
   }
 
-#ifdef DEBUG
-  TraceIC("LoadIC", name, state, target());
-#endif
+  TRACE_IC("LoadIC", name, state, target());
 }
 
 
-MaybeObject* KeyedLoadIC::GetElementStubWithoutMapCheck(
+Handle<Code> KeyedLoadIC::GetElementStubWithoutMapCheck(
     bool is_js_array,
     ElementsKind elements_kind) {
-  return KeyedLoadElementStub(elements_kind).TryGetCode();
+  return KeyedLoadElementStub(elements_kind).GetCode();
 }
 
 
-MaybeObject* KeyedLoadIC::ComputePolymorphicStub(
-    MapList* receiver_maps,
+Handle<Code> KeyedLoadIC::ComputePolymorphicStub(
+    MapHandleList* receiver_maps,
     StrictModeFlag strict_mode) {
-  CodeList handler_ics(receiver_maps->length());
+  CodeHandleList handler_ics(receiver_maps->length());
   for (int i = 0; i < receiver_maps->length(); ++i) {
-    Map* receiver_map(receiver_maps->at(i));
-    MaybeObject* maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck(
+    Handle<Map> receiver_map = receiver_maps->at(i);
+    Handle<Code> cached_stub = ComputeMonomorphicStubWithoutMapCheck(
         receiver_map, strict_mode);
-    Code* cached_stub;
-    if (!maybe_cached_stub->To(&cached_stub)) return maybe_cached_stub;
     handler_ics.Add(cached_stub);
   }
-  Object* object;
-  KeyedLoadStubCompiler compiler;
-  MaybeObject* maybe_code = compiler.CompileLoadPolymorphic(receiver_maps,
-                                                            &handler_ics);
-  if (!maybe_code->ToObject(&object)) return maybe_code;
+  KeyedLoadStubCompiler compiler(isolate());
+  Handle<Code> code = compiler.CompileLoadPolymorphic(
+      receiver_maps, &handler_ics);
   isolate()->counters()->keyed_load_polymorphic_stubs()->Increment();
-  PROFILE(isolate(), CodeCreateEvent(
-      Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG,
-      Code::cast(object), 0));
-  return object;
+  PROFILE(isolate(),
+          CodeCreateEvent(Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG, *code, 0));
+  return code;
 }
 
 
@@ -1115,9 +1004,8 @@
                                bool force_generic_stub) {
   // Check for values that can be converted into a symbol.
   // TODO(1295): Remove this code.
-  HandleScope scope(isolate());
   if (key->IsHeapNumber() &&
-      isnan(HeapNumber::cast(*key)->value())) {
+      isnan(Handle<HeapNumber>::cast(key)->value())) {
     key = isolate()->factory()->nan_symbol();
   } else if (key->IsUndefined()) {
     key = isolate()->factory()->undefined_symbol();
@@ -1139,16 +1027,11 @@
       if (object->IsString() &&
           name->Equals(isolate()->heap()->length_symbol())) {
         Handle<String> string = Handle<String>::cast(object);
-        Object* code = NULL;
-        { MaybeObject* maybe_code =
-              isolate()->stub_cache()->ComputeKeyedLoadStringLength(*name,
-                                                                    *string);
-          if (!maybe_code->ToObject(&code)) return maybe_code;
-        }
-        set_target(Code::cast(code));
-#ifdef DEBUG
-        TraceIC("KeyedLoadIC", name, state, target());
-#endif  // DEBUG
+        Handle<Code> code =
+            isolate()->stub_cache()->ComputeKeyedLoadStringLength(name, string);
+        ASSERT(!code.is_null());
+        set_target(*code);
+        TRACE_IC("KeyedLoadIC", name, state, target());
         return Smi::FromInt(string->length());
       }
 
@@ -1156,34 +1039,25 @@
       if (object->IsJSArray() &&
           name->Equals(isolate()->heap()->length_symbol())) {
         Handle<JSArray> array = Handle<JSArray>::cast(object);
-        Object* code;
-        { MaybeObject* maybe_code =
-              isolate()->stub_cache()->ComputeKeyedLoadArrayLength(*name,
-                                                                   *array);
-          if (!maybe_code->ToObject(&code)) return maybe_code;
-        }
-        set_target(Code::cast(code));
-#ifdef DEBUG
-        TraceIC("KeyedLoadIC", name, state, target());
-#endif  // DEBUG
-        return JSArray::cast(*object)->length();
+        Handle<Code> code =
+            isolate()->stub_cache()->ComputeKeyedLoadArrayLength(name, array);
+        ASSERT(!code.is_null());
+        set_target(*code);
+        TRACE_IC("KeyedLoadIC", name, state, target());
+        return array->length();
       }
 
       // Use specialized code for getting prototype of functions.
       if (object->IsJSFunction() &&
           name->Equals(isolate()->heap()->prototype_symbol()) &&
-        JSFunction::cast(*object)->should_have_prototype()) {
+          Handle<JSFunction>::cast(object)->should_have_prototype()) {
         Handle<JSFunction> function = Handle<JSFunction>::cast(object);
-        Object* code;
-        { MaybeObject* maybe_code =
-              isolate()->stub_cache()->ComputeKeyedLoadFunctionPrototype(
-                  *name, *function);
-          if (!maybe_code->ToObject(&code)) return maybe_code;
-        }
-        set_target(Code::cast(code));
-#ifdef DEBUG
-        TraceIC("KeyedLoadIC", name, state, target());
-#endif  // DEBUG
+        Handle<Code> code =
+            isolate()->stub_cache()->ComputeKeyedLoadFunctionPrototype(
+                name, function);
+        ASSERT(!code.is_null());
+        set_target(*code);
+        TRACE_IC("KeyedLoadIC", name, state, target());
         return Accessors::FunctionGetPrototype(*object, 0);
       }
     }
@@ -1192,15 +1066,14 @@
     // the element or char if so.
     uint32_t index = 0;
     if (name->AsArrayIndex(&index)) {
-      HandleScope scope(isolate());
       // Rewrite to the generic keyed load stub.
-      if (FLAG_use_ic) set_target(generic_stub());
+      if (FLAG_use_ic) set_target(*generic_stub());
       return Runtime::GetElementOrCharAt(isolate(), object, index);
     }
 
     // Named lookup.
-    LookupResult lookup;
-    LookupForRead(*object, *name, &lookup);
+    LookupResult lookup(isolate());
+    LookupForRead(object, name, &lookup);
 
     // If we did not find a property, check if we need to throw an exception.
     if (!lookup.IsProperty() && IsContextual(object)) {
@@ -1214,17 +1087,15 @@
     PropertyAttributes attr;
     if (lookup.IsProperty() && lookup.type() == INTERCEPTOR) {
       // Get the property.
-      Object* result;
-      { MaybeObject* maybe_result =
-            object->GetProperty(*object, &lookup, *name, &attr);
-        if (!maybe_result->ToObject(&result)) return maybe_result;
-      }
+      Handle<Object> result =
+          Object::GetProperty(object, object, &lookup, name, &attr);
+      RETURN_IF_EMPTY_HANDLE(isolate(), result);
       // If the property is not present, check if we need to throw an
       // exception.
       if (attr == ABSENT && IsContextual(object)) {
         return ReferenceError("not_defined", name);
       }
-      return result;
+      return *result;
     }
 
     return object->GetProperty(*object, &lookup, *name, &attr);
@@ -1235,44 +1106,38 @@
   bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded();
 
   if (use_ic) {
-    Code* stub = generic_stub();
+    Handle<Code> stub = generic_stub();
     if (!force_generic_stub) {
       if (object->IsString() && key->IsNumber()) {
         if (state == UNINITIALIZED) {
           stub = string_stub();
         }
       } else if (object->IsJSObject()) {
-        JSObject* receiver = JSObject::cast(*object);
-        Heap* heap = Handle<JSObject>::cast(object)->GetHeap();
-        Map* elements_map = Handle<JSObject>::cast(object)->elements()->map();
-        if (elements_map == heap->non_strict_arguments_elements_map()) {
+        Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+        if (receiver->elements()->map() ==
+            isolate()->heap()->non_strict_arguments_elements_map()) {
           stub = non_strict_arguments_stub();
         } else if (receiver->HasIndexedInterceptor()) {
           stub = indexed_interceptor_stub();
-        } else if (key->IsSmi() && (target() != non_strict_arguments_stub())) {
-          MaybeObject* maybe_stub = ComputeStub(receiver,
-                                                LOAD,
-                                                kNonStrictMode,
-                                                stub);
-          stub = maybe_stub->IsFailure() ?
-              NULL : Code::cast(maybe_stub->ToObjectUnchecked());
+        } else if (key->IsSmi() && (target() != *non_strict_arguments_stub())) {
+          stub = ComputeStub(receiver, LOAD, kNonStrictMode, stub);
         }
       }
     }
-    if (stub != NULL) set_target(stub);
+    if (!stub.is_null()) set_target(*stub);
   }
 
-#ifdef DEBUG
-  TraceIC("KeyedLoadIC", key, state, target());
-#endif  // DEBUG
+  TRACE_IC("KeyedLoadIC", key, state, target());
 
   // Get the property.
   return Runtime::GetObjectProperty(isolate(), object, key);
 }
 
 
-void KeyedLoadIC::UpdateCaches(LookupResult* lookup, State state,
-                               Handle<Object> object, Handle<String> name) {
+void KeyedLoadIC::UpdateCaches(LookupResult* lookup,
+                               State state,
+                               Handle<Object> object,
+                               Handle<String> name) {
   // Bail out if we didn't find a result.
   if (!lookup->IsProperty() || !lookup->IsCacheable()) return;
 
@@ -1282,68 +1147,60 @@
   if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return;
 
   // Compute the code stub for this load.
-  MaybeObject* maybe_code = NULL;
-  Object* code;
+  Handle<Code> code;
 
   if (state == UNINITIALIZED) {
     // This is the first time we execute this inline cache.
     // Set the target to the pre monomorphic stub to delay
     // setting the monomorphic state.
-    maybe_code = pre_monomorphic_stub();
+    code = pre_monomorphic_stub();
   } else {
     // Compute a monomorphic stub.
+    Handle<JSObject> holder(lookup->holder());
     switch (lookup->type()) {
-      case FIELD: {
-        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadField(
-            *name, *receiver, lookup->holder(), lookup->GetFieldIndex());
+      case FIELD:
+        code = isolate()->stub_cache()->ComputeKeyedLoadField(
+            name, receiver, holder, lookup->GetFieldIndex());
         break;
-      }
       case CONSTANT_FUNCTION: {
-        Object* constant = lookup->GetConstantFunction();
-        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadConstant(
-            *name, *receiver, lookup->holder(), constant);
+        Handle<Object> constant(lookup->GetConstantFunction());
+        code = isolate()->stub_cache()->ComputeKeyedLoadConstant(
+            name, receiver, holder, constant);
         break;
       }
       case CALLBACKS: {
-        if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
-        AccessorInfo* callback =
-            AccessorInfo::cast(lookup->GetCallbackObject());
+        Handle<Object> callback_object(lookup->GetCallbackObject());
+        if (!callback_object->IsAccessorInfo()) return;
+        Handle<AccessorInfo> callback =
+            Handle<AccessorInfo>::cast(callback_object);
         if (v8::ToCData<Address>(callback->getter()) == 0) return;
-        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadCallback(
-            *name, *receiver, lookup->holder(), callback);
+        code = isolate()->stub_cache()->ComputeKeyedLoadCallback(
+            name, receiver, holder, callback);
         break;
       }
-      case INTERCEPTOR: {
+      case INTERCEPTOR:
         ASSERT(HasInterceptorGetter(lookup->holder()));
-        maybe_code = isolate()->stub_cache()->ComputeKeyedLoadInterceptor(
-            *name, *receiver, lookup->holder());
+        code = isolate()->stub_cache()->ComputeKeyedLoadInterceptor(
+            name, receiver, holder);
         break;
-      }
-      default: {
+      default:
         // Always rewrite to the generic case so that we do not
         // repeatedly try to rewrite.
-        maybe_code = generic_stub();
+        code = generic_stub();
         break;
-      }
     }
   }
 
-  // If we're unable to compute the stub (not enough memory left), we
-  // simply avoid updating the caches.
-  if (maybe_code == NULL || !maybe_code->ToObject(&code)) return;
-
   // Patch the call site depending on the state of the cache.  Make
   // sure to always rewrite from monomorphic to megamorphic.
   ASSERT(state != MONOMORPHIC_PROTOTYPE_FAILURE);
   if (state == UNINITIALIZED || state == PREMONOMORPHIC) {
-    set_target(Code::cast(code));
+    set_target(*code);
   } else if (state == MONOMORPHIC) {
-    set_target(megamorphic_stub());
+    set_target(*megamorphic_stub());
   }
 
-#ifdef DEBUG
-  TraceIC("KeyedLoadIC", name, state, target());
-#endif
+  TRACE_IC("KeyedLoadIC", name, state, target());
 }
 
 
@@ -1359,17 +1216,17 @@
 }
 
 
-static bool LookupForWrite(JSObject* receiver,
-                           String* name,
+static bool LookupForWrite(Handle<JSObject> receiver,
+                           Handle<String> name,
                            LookupResult* lookup) {
-  receiver->LocalLookup(name, lookup);
+  receiver->LocalLookup(*name, lookup);
   if (!StoreICableLookup(lookup)) {
     return false;
   }
 
   if (lookup->type() == INTERCEPTOR &&
       receiver->GetNamedInterceptor()->setter()->IsUndefined()) {
-    receiver->LocalLookupRealNamedProperty(name, lookup);
+    receiver->LocalLookupRealNamedProperty(*name, lookup);
     return StoreICableLookup(lookup);
   }
 
@@ -1401,6 +1258,7 @@
       return TypeError("strict_read_only_property", object, name);
     }
     // Ignore other stores where the receiver is not a JSObject.
+    // TODO(1475): Must check prototype chains of object wrappers.
     return *value;
   }
 
@@ -1409,31 +1267,30 @@
   // Check if the given name is an array index.
   uint32_t index;
   if (name->AsArrayIndex(&index)) {
-    HandleScope scope(isolate());
     Handle<Object> result = SetElement(receiver, index, value, strict_mode);
-    if (result.is_null()) return Failure::Exception();
+    RETURN_IF_EMPTY_HANDLE(isolate(), result);
     return *value;
   }
 
   // Use specialized code for setting the length of arrays.
   if (receiver->IsJSArray()
       && name->Equals(isolate()->heap()->length_symbol())
-      && JSArray::cast(*receiver)->AllowsSetElementsLength()) {
+      && Handle<JSArray>::cast(receiver)->AllowsSetElementsLength()) {
 #ifdef DEBUG
     if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n");
 #endif
-    Builtins::Name target = (strict_mode == kStrictMode)
-        ? Builtins::kStoreIC_ArrayLength_Strict
-        : Builtins::kStoreIC_ArrayLength;
-    set_target(isolate()->builtins()->builtin(target));
+    Handle<Code> stub = (strict_mode == kStrictMode)
+        ? isolate()->builtins()->StoreIC_ArrayLength_Strict()
+        : isolate()->builtins()->StoreIC_ArrayLength();
+    set_target(*stub);
     return receiver->SetProperty(*name, *value, NONE, strict_mode);
   }
 
   // Lookup the property locally in the receiver.
   if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
-    LookupResult lookup;
+    LookupResult lookup(isolate());
 
-    if (LookupForWrite(*receiver, *name, &lookup)) {
+    if (LookupForWrite(receiver, name, &lookup)) {
       // Generate a stub for this store.
       UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
     } else {
@@ -1450,16 +1307,15 @@
   }
 
   if (receiver->IsJSGlobalProxy()) {
+    // TODO(ulan): find out why we patch this site even with --no-use-ic
     // Generate a generic stub that goes to the runtime when we see a global
     // proxy as receiver.
-    Code* stub = (strict_mode == kStrictMode)
+    Handle<Code> stub = (strict_mode == kStrictMode)
         ? global_proxy_stub_strict()
         : global_proxy_stub();
-    if (target() != stub) {
-      set_target(stub);
-#ifdef DEBUG
-      TraceIC("StoreIC", name, state, target());
-#endif
+    if (target() != *stub) {
+      set_target(*stub);
+      TRACE_IC("StoreIC", name, state, target());
     }
   }
 
@@ -1487,89 +1343,82 @@
   // Compute the code stub for this store; used for rewriting to
   // monomorphic state and making sure that the code stub is in the
   // stub cache.
-  MaybeObject* maybe_code = NULL;
-  Object* code = NULL;
+  Handle<Code> code;
   switch (type) {
-    case FIELD: {
-      maybe_code = isolate()->stub_cache()->ComputeStoreField(
-          *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode);
+    case FIELD:
+      code = isolate()->stub_cache()->ComputeStoreField(name,
+                                                        receiver,
+                                                        lookup->GetFieldIndex(),
+                                                        Handle<Map>::null(),
+                                                        strict_mode);
       break;
-    }
     case MAP_TRANSITION: {
       if (lookup->GetAttributes() != NONE) return;
-      HandleScope scope(isolate());
       ASSERT(type == MAP_TRANSITION);
       Handle<Map> transition(lookup->GetTransitionMap());
       int index = transition->PropertyIndexFor(*name);
-      maybe_code = isolate()->stub_cache()->ComputeStoreField(
-          *name, *receiver, index, *transition, strict_mode);
+      code = isolate()->stub_cache()->ComputeStoreField(
+          name, receiver, index, transition, strict_mode);
       break;
     }
-    case NORMAL: {
+    case NORMAL:
       if (receiver->IsGlobalObject()) {
         // The stub generated for the global object picks the value directly
         // from the property cell. So the property must be directly on the
         // global object.
         Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
-        JSGlobalPropertyCell* cell =
-            JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
-        maybe_code = isolate()->stub_cache()->ComputeStoreGlobal(
-            *name, *global, cell, strict_mode);
+        Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup));
+        code = isolate()->stub_cache()->ComputeStoreGlobal(
+            name, global, cell, strict_mode);
       } else {
         if (lookup->holder() != *receiver) return;
-        maybe_code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
+        code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
       }
       break;
-    }
     case CALLBACKS: {
-      if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
-      AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+      Handle<Object> callback_object(lookup->GetCallbackObject());
+      if (!callback_object->IsAccessorInfo()) return;
+      Handle<AccessorInfo> callback =
+          Handle<AccessorInfo>::cast(callback_object);
       if (v8::ToCData<Address>(callback->setter()) == 0) return;
-      maybe_code = isolate()->stub_cache()->ComputeStoreCallback(
-          *name, *receiver, callback, strict_mode);
+      code = isolate()->stub_cache()->ComputeStoreCallback(
+          name, receiver, callback, strict_mode);
       break;
     }
-    case INTERCEPTOR: {
+    case INTERCEPTOR:
       ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
-      maybe_code = isolate()->stub_cache()->ComputeStoreInterceptor(
-          *name, *receiver, strict_mode);
+      code = isolate()->stub_cache()->ComputeStoreInterceptor(
+          name, receiver, strict_mode);
       break;
-    }
     default:
       return;
   }
 
-  // If we're unable to compute the stub (not enough memory left), we
-  // simply avoid updating the caches.
-  if (maybe_code == NULL || !maybe_code->ToObject(&code)) return;
-
   // Patch the call site depending on the state of the cache.
   if (state == UNINITIALIZED || state == MONOMORPHIC_PROTOTYPE_FAILURE) {
-    set_target(Code::cast(code));
+    set_target(*code);
   } else if (state == MONOMORPHIC) {
     // Only move to megamorphic if the target changes.
-    if (target() != Code::cast(code)) {
+    if (target() != *code) {
       set_target((strict_mode == kStrictMode)
                    ? megamorphic_stub_strict()
                    : megamorphic_stub());
     }
   } else if (state == MEGAMORPHIC) {
     // Update the stub cache.
-    isolate()->stub_cache()->Set(*name,
-                                 receiver->map(),
-                                 Code::cast(code));
+    isolate()->stub_cache()->Set(*name, receiver->map(), *code);
   }
 
-#ifdef DEBUG
-  TraceIC("StoreIC", name, state, target());
-#endif
+  TRACE_IC("StoreIC", name, state, target());
 }
 
 
-static bool AddOneReceiverMapIfMissing(MapList* receiver_maps,
-                                       Map* new_receiver_map) {
+static bool AddOneReceiverMapIfMissing(MapHandleList* receiver_maps,
+                                       Handle<Map> new_receiver_map) {
+  ASSERT(!new_receiver_map.is_null());
   for (int current = 0; current < receiver_maps->length(); ++current) {
-    if (receiver_maps->at(current) == new_receiver_map) {
+    if (!receiver_maps->at(current).is_null() &&
+        receiver_maps->at(current).is_identical_to(new_receiver_map)) {
       return false;
     }
   }
@@ -1578,45 +1427,40 @@
 }
 
 
-void KeyedIC::GetReceiverMapsForStub(Code* stub, MapList* result) {
+void KeyedIC::GetReceiverMapsForStub(Handle<Code> stub,
+                                     MapHandleList* result) {
   ASSERT(stub->is_inline_cache_stub());
-  if (stub == string_stub()) {
-    return result->Add(isolate()->heap()->string_map());
+  if (!string_stub().is_null() && stub.is_identical_to(string_stub())) {
+    return result->Add(isolate()->factory()->string_map());
   } else if (stub->is_keyed_load_stub() || stub->is_keyed_store_stub()) {
     if (stub->ic_state() == MONOMORPHIC) {
-      result->Add(Map::cast(stub->FindFirstMap()));
+      result->Add(Handle<Map>(stub->FindFirstMap()));
     } else {
       ASSERT(stub->ic_state() == MEGAMORPHIC);
       AssertNoAllocation no_allocation;
       int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
-      for (RelocIterator it(stub, mask); !it.done(); it.next()) {
+      for (RelocIterator it(*stub, mask); !it.done(); it.next()) {
         RelocInfo* info = it.rinfo();
-        Object* object = info->target_object();
+        Handle<Object> object(info->target_object());
         ASSERT(object->IsMap());
-        result->Add(Map::cast(object));
+        AddOneReceiverMapIfMissing(result, Handle<Map>::cast(object));
       }
     }
   }
 }
 
 
-MaybeObject* KeyedIC::ComputeStub(JSObject* receiver,
+Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver,
                                   StubKind stub_kind,
                                   StrictModeFlag strict_mode,
-                                  Code* generic_stub) {
+                                  Handle<Code> generic_stub) {
   State ic_state = target()->ic_state();
   if ((ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) &&
       !IsTransitionStubKind(stub_kind)) {
-    Code* monomorphic_stub;
-    MaybeObject* maybe_stub = ComputeMonomorphicStub(receiver,
-                                                     stub_kind,
-                                                     strict_mode,
-                                                     generic_stub);
-    if (!maybe_stub->To(&monomorphic_stub)) return maybe_stub;
-
-    return monomorphic_stub;
+    return ComputeMonomorphicStub(
+        receiver, stub_kind, strict_mode, generic_stub);
   }
-  ASSERT(target() != generic_stub);
+  ASSERT(target() != *generic_stub);
 
   // Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
   // via megamorphic stubs, since they don't have a map in their relocation info
@@ -1627,18 +1471,17 @@
 
   // Determine the list of receiver maps that this call site has seen,
   // adding the map that was just encountered.
-  MapList target_receiver_maps;
+  MapHandleList target_receiver_maps;
+  Handle<Map> receiver_map(receiver->map());
   if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
-    target_receiver_maps.Add(receiver->map());
+    target_receiver_maps.Add(receiver_map);
   } else {
-    GetReceiverMapsForStub(target(), &target_receiver_maps);
+    GetReceiverMapsForStub(Handle<Code>(target()), &target_receiver_maps);
   }
   bool map_added =
-      AddOneReceiverMapIfMissing(&target_receiver_maps, receiver->map());
+      AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map);
   if (IsTransitionStubKind(stub_kind)) {
-    MaybeObject* maybe_map = ComputeTransitionedMap(receiver, stub_kind);
-    Map* new_map = NULL;
-    if (!maybe_map->To(&new_map)) return maybe_map;
+    Handle<Map> new_map = ComputeTransitionedMap(receiver, stub_kind);
     map_added |= AddOneReceiverMapIfMissing(&target_receiver_maps, new_map);
   }
   if (!map_added) {
@@ -1653,31 +1496,24 @@
     return generic_stub;
   }
 
-  PolymorphicCodeCache* cache = isolate()->heap()->polymorphic_code_cache();
-  Code::Flags flags = Code::ComputeFlags(this->kind(),
-                                         MEGAMORPHIC,
-                                         strict_mode);
-  Object* maybe_cached_stub = cache->Lookup(&target_receiver_maps, flags);
-  // If there is a cached stub, use it.
-  if (!maybe_cached_stub->IsUndefined()) {
-    ASSERT(maybe_cached_stub->IsCode());
-    return Code::cast(maybe_cached_stub);
-  }
-  MaybeObject* maybe_stub =
+  Handle<PolymorphicCodeCache> cache =
+      isolate()->factory()->polymorphic_code_cache();
+  Code::Flags flags = Code::ComputeFlags(kind(), MEGAMORPHIC, strict_mode);
+  Handle<Object> probe = cache->Lookup(&target_receiver_maps, flags);
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  Handle<Code> stub =
       ComputePolymorphicStub(&target_receiver_maps, strict_mode);
-  Code* stub;
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  MaybeObject* maybe_update = cache->Update(&target_receiver_maps, flags, stub);
-  if (maybe_update->IsFailure()) return maybe_update;
+  PolymorphicCodeCache::Update(cache, &target_receiver_maps, flags, stub);
   return stub;
 }
 
 
-MaybeObject* KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
-    Map* receiver_map,
+Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
+    Handle<Map> receiver_map,
     StrictModeFlag strict_mode) {
   if ((receiver_map->instance_type() & kNotStringTag) == 0) {
-    ASSERT(string_stub() != NULL);
+    ASSERT(!string_stub().is_null());
     return string_stub();
   } else {
     ASSERT(receiver_map->has_dictionary_elements() ||
@@ -1692,137 +1528,78 @@
 }
 
 
-MaybeObject* KeyedIC::ComputeMonomorphicStub(JSObject* receiver,
+Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<JSObject> receiver,
                                              StubKind stub_kind,
                                              StrictModeFlag strict_mode,
-                                             Code* generic_stub) {
-  Code* result = NULL;
+                                             Handle<Code> generic_stub) {
   if (receiver->HasFastElements() ||
       receiver->HasFastSmiOnlyElements() ||
       receiver->HasExternalArrayElements() ||
       receiver->HasFastDoubleElements() ||
       receiver->HasDictionaryElements()) {
-    MaybeObject* maybe_stub =
-        isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement(
-            receiver, stub_kind, strict_mode);
-    if (!maybe_stub->To(&result)) return maybe_stub;
+    return isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement(
+        receiver, stub_kind, strict_mode);
   } else {
-    result = generic_stub;
+    return generic_stub;
   }
-  return result;
 }
 
 
-MaybeObject* KeyedIC::ComputeTransitionedMap(JSObject* receiver,
-                                             StubKind stub_kind) {
+Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver,
+                                            StubKind stub_kind) {
   switch (stub_kind) {
     case KeyedIC::STORE_TRANSITION_SMI_TO_OBJECT:
     case KeyedIC::STORE_TRANSITION_DOUBLE_TO_OBJECT:
-      return receiver->GetElementsTransitionMap(FAST_ELEMENTS);
+      return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS);
+      break;
     case KeyedIC::STORE_TRANSITION_SMI_TO_DOUBLE:
-      return receiver->GetElementsTransitionMap(FAST_DOUBLE_ELEMENTS);
+      return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS);
+      break;
     default:
       UNREACHABLE();
-      return NULL;
+      return Handle<Map>::null();
   }
 }
 
 
-MaybeObject* KeyedStoreIC::GetElementStubWithoutMapCheck(
+Handle<Code> KeyedStoreIC::GetElementStubWithoutMapCheck(
     bool is_js_array,
     ElementsKind elements_kind) {
-  return KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
+  return KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
 }
 
 
-// If |map| is contained in |maps_list|, returns |map|; otherwise returns NULL.
-Map* GetMapIfPresent(Map* map, MapList* maps_list) {
-  for (int i = 0; i < maps_list->length(); ++i) {
-    if (maps_list->at(i) == map) return map;
-  }
-  return NULL;
-}
-
-
-// Returns the most generic transitioned map for |map| that's found in
-// |maps_list|, or NULL if no transitioned map for |map| is found at all.
-Map* GetTransitionedMap(Map* map, MapList* maps_list) {
-  ElementsKind elements_kind = map->elements_kind();
-  if (elements_kind == FAST_ELEMENTS) {
-    return NULL;
-  }
-  if (elements_kind == FAST_DOUBLE_ELEMENTS) {
-    bool dummy = true;
-    Map* fast_map = map->LookupElementsTransitionMap(FAST_ELEMENTS, &dummy);
-    if (fast_map == NULL) return NULL;
-    return GetMapIfPresent(fast_map, maps_list);
-  }
-  if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
-    bool dummy = true;
-    Map* double_map = map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS,
-                                                       &dummy);
-    // In the current implementation, if the DOUBLE map doesn't exist, the
-    // FAST map can't exist either.
-    if (double_map == NULL) return NULL;
-    Map* fast_map = map->LookupElementsTransitionMap(FAST_ELEMENTS, &dummy);
-    if (fast_map == NULL) {
-      return GetMapIfPresent(double_map, maps_list);
-    }
-    // Both double_map and fast_map are non-NULL. Return fast_map if it's in
-    // maps_list, double_map otherwise.
-    Map* fast_map_present = GetMapIfPresent(fast_map, maps_list);
-    if (fast_map_present != NULL) return fast_map_present;
-    return GetMapIfPresent(double_map, maps_list);
-  }
-  return NULL;
-}
-
-
-MaybeObject* KeyedStoreIC::ComputePolymorphicStub(
-    MapList* receiver_maps,
-    StrictModeFlag strict_mode) {
-  // TODO(yangguo): <remove>
-  Code* generic_stub = (strict_mode == kStrictMode)
-      ? isolate()->builtins()->builtin(Builtins::kKeyedStoreIC_Generic_Strict)
-      : isolate()->builtins()->builtin(Builtins::kKeyedStoreIC_Generic);
-  // </remove>
-
+Handle<Code> KeyedStoreIC::ComputePolymorphicStub(MapHandleList* receiver_maps,
+                                                  StrictModeFlag strict_mode) {
   // Collect MONOMORPHIC stubs for all target_receiver_maps.
-  CodeList handler_ics(receiver_maps->length());
-  MapList transitioned_maps(receiver_maps->length());
+  CodeHandleList handler_ics(receiver_maps->length());
+  MapHandleList transitioned_maps(receiver_maps->length());
   for (int i = 0; i < receiver_maps->length(); ++i) {
-    Map* receiver_map(receiver_maps->at(i));
-    MaybeObject* maybe_cached_stub = NULL;
-    Map* transitioned_map = GetTransitionedMap(receiver_map, receiver_maps);
-    if (transitioned_map != NULL) {
-      // TODO(yangguo): Enable this code!
-      // maybe_cached_stub = FastElementsConversionStub(
-      //     receiver_map->elements_kind(),  // original elements_kind
-      //     transitioned_map->elements_kind(),
-      //     receiver_map->instance_type() == JS_ARRAY_TYPE,  // is_js_array
-      //     strict_mode_).TryGetCode();
-      // TODO(yangguo): <remove>
-      maybe_cached_stub = generic_stub;
-      // </remove>
+    Handle<Map> receiver_map(receiver_maps->at(i));
+    Handle<Code> cached_stub;
+    Handle<Map> transitioned_map =
+        receiver_map->FindTransitionedMap(receiver_maps);
+    if (!transitioned_map.is_null()) {
+      cached_stub = ElementsTransitionAndStoreStub(
+          receiver_map->elements_kind(),  // original elements_kind
+          transitioned_map->elements_kind(),
+          receiver_map->instance_type() == JS_ARRAY_TYPE,  // is_js_array
+          strict_mode).GetCode();
     } else {
-      maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck(
-          receiver_map, strict_mode);
+      cached_stub = ComputeMonomorphicStubWithoutMapCheck(receiver_map,
+                                                          strict_mode);
     }
-    Code* cached_stub;
-    if (!maybe_cached_stub->To(&cached_stub)) return maybe_cached_stub;
+    ASSERT(!cached_stub.is_null());
     handler_ics.Add(cached_stub);
     transitioned_maps.Add(transitioned_map);
   }
-  Object* object;
-  KeyedStoreStubCompiler compiler(strict_mode);
-  MaybeObject* maybe_code = compiler.CompileStorePolymorphic(
+  KeyedStoreStubCompiler compiler(isolate(), strict_mode);
+  Handle<Code> code = compiler.CompileStorePolymorphic(
       receiver_maps, &handler_ics, &transitioned_maps);
-  if (!maybe_code->ToObject(&object)) return maybe_code;
   isolate()->counters()->keyed_store_polymorphic_stubs()->Increment();
-  PROFILE(isolate(), CodeCreateEvent(
-      Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG,
-      Code::cast(object), 0));
-  return object;
+  PROFILE(isolate(),
+          CodeCreateEvent(Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG, *code, 0));
+  return code;
 }
 
 
@@ -1835,6 +1612,12 @@
   if (key->IsSymbol()) {
     Handle<String> name = Handle<String>::cast(key);
 
+    // Handle proxies.
+    if (object->IsJSProxy()) {
+      return JSProxy::cast(*object)->SetProperty(
+          *name, *value, NONE, strict_mode);
+    }
+
     // If the object is undefined or null it's illegal to try to set any
     // properties on it; throw a TypeError in that case.
     if (object->IsUndefined() || object->IsNull()) {
@@ -1848,14 +1631,13 @@
     // Check if the given name is an array index.
     uint32_t index;
     if (name->AsArrayIndex(&index)) {
-      HandleScope scope(isolate());
       Handle<Object> result = SetElement(receiver, index, value, strict_mode);
-      if (result.is_null()) return Failure::Exception();
+      RETURN_IF_EMPTY_HANDLE(isolate(), result);
       return *value;
     }
 
     // Lookup the property locally in the receiver.
-    LookupResult lookup;
+    LookupResult lookup(isolate());
     receiver->LocalLookup(*name, &lookup);
 
     // Update inline cache and stub cache.
@@ -1873,17 +1655,16 @@
   ASSERT(!(use_ic && object->IsJSGlobalProxy()));
 
   if (use_ic) {
-    Code* stub = (strict_mode == kStrictMode)
+    Handle<Code> stub = (strict_mode == kStrictMode)
         ? generic_stub_strict()
         : generic_stub();
     if (object->IsJSObject()) {
-      JSObject* receiver = JSObject::cast(*object);
-      Heap* heap = Handle<JSObject>::cast(object)->GetHeap();
-      Map* elements_map = Handle<JSObject>::cast(object)->elements()->map();
-      if (elements_map == heap->non_strict_arguments_elements_map()) {
+      Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+      if (receiver->elements()->map() ==
+          isolate()->heap()->non_strict_arguments_elements_map()) {
         stub = non_strict_arguments_stub();
       } else if (!force_generic) {
-        if (key->IsSmi() && (target() != non_strict_arguments_stub())) {
+        if (key->IsSmi() && (target() != *non_strict_arguments_stub())) {
           StubKind stub_kind = STORE_NO_TRANSITION;
           if (receiver->GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
             if (value->IsHeapNumber()) {
@@ -1896,22 +1677,14 @@
               stub_kind = STORE_TRANSITION_DOUBLE_TO_OBJECT;
             }
           }
-          HandleScope scope(isolate());
-          MaybeObject* maybe_stub = ComputeStub(receiver,
-                                                stub_kind,
-                                                strict_mode,
-                                                stub);
-          stub = maybe_stub->IsFailure() ?
-              NULL : Code::cast(maybe_stub->ToObjectUnchecked());
+          stub = ComputeStub(receiver, stub_kind, strict_mode, stub);
         }
       }
     }
-    if (stub != NULL) set_target(stub);
+    if (!stub.is_null()) set_target(*stub);
   }
 
-#ifdef DEBUG
-  TraceIC("KeyedStoreIC", key, state, target());
-#endif
+  TRACE_IC("KeyedStoreIC", key, state, target());
 
   // Set the property.
   return Runtime::SetObjectProperty(
@@ -1943,75 +1716,60 @@
   // Compute the code stub for this store; used for rewriting to
   // monomorphic state and making sure that the code stub is in the
   // stub cache.
-  MaybeObject* maybe_code = NULL;
-  Object* code = NULL;
+  Handle<Code> code;
 
   switch (type) {
-    case FIELD: {
-      maybe_code = isolate()->stub_cache()->ComputeKeyedStoreField(
-          *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode);
+    case FIELD:
+      code = isolate()->stub_cache()->ComputeKeyedStoreField(
+          name, receiver, lookup->GetFieldIndex(),
+          Handle<Map>::null(), strict_mode);
       break;
-    }
-    case MAP_TRANSITION: {
+    case MAP_TRANSITION:
       if (lookup->GetAttributes() == NONE) {
-        HandleScope scope(isolate());
         ASSERT(type == MAP_TRANSITION);
         Handle<Map> transition(lookup->GetTransitionMap());
         int index = transition->PropertyIndexFor(*name);
-        maybe_code = isolate()->stub_cache()->ComputeKeyedStoreField(
-            *name, *receiver, index, *transition, strict_mode);
+        code = isolate()->stub_cache()->ComputeKeyedStoreField(
+            name, receiver, index, transition, strict_mode);
         break;
       }
       // fall through.
-    }
-    default: {
+    default:
       // Always rewrite to the generic case so that we do not
       // repeatedly try to rewrite.
-      maybe_code = (strict_mode == kStrictMode)
+      code = (strict_mode == kStrictMode)
           ? generic_stub_strict()
           : generic_stub();
       break;
-    }
   }
 
-  // If we're unable to compute the stub (not enough memory left), we
-  // simply avoid updating the caches.
-  if (maybe_code == NULL || !maybe_code->ToObject(&code)) return;
+  ASSERT(!code.is_null());
 
   // Patch the call site depending on the state of the cache.  Make
   // sure to always rewrite from monomorphic to megamorphic.
   ASSERT(state != MONOMORPHIC_PROTOTYPE_FAILURE);
   if (state == UNINITIALIZED || state == PREMONOMORPHIC) {
-    set_target(Code::cast(code));
+    set_target(*code);
   } else if (state == MONOMORPHIC) {
     set_target((strict_mode == kStrictMode)
-                 ? megamorphic_stub_strict()
-                 : megamorphic_stub());
+                 ? *megamorphic_stub_strict()
+                 : *megamorphic_stub());
   }
 
-#ifdef DEBUG
-  TraceIC("KeyedStoreIC", name, state, target());
-#endif
+  TRACE_IC("KeyedStoreIC", name, state, target());
 }
 
 
+#undef TRACE_IC
+
+
 // ----------------------------------------------------------------------------
 // Static IC stub generators.
 //
 
-static JSFunction* CompileFunction(Isolate* isolate,
-                                   JSFunction* function) {
-  // Compile now with optimization.
-  HandleScope scope(isolate);
-  Handle<JSFunction> function_handle(function, isolate);
-  CompileLazy(function_handle, CLEAR_EXCEPTION);
-  return *function_handle;
-}
-
-
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(MaybeObject*, CallIC_Miss) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   CallIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
@@ -2020,45 +1778,46 @@
                                               extra_ic_state,
                                               args.at<Object>(0),
                                               args.at<String>(1));
-  Object* result;
-  if (!maybe_result->ToObject(&result)) return maybe_result;
+  // Result could be a function or a failure.
+  JSFunction* raw_function = NULL;
+  if (!maybe_result->To(&raw_function)) return maybe_result;
 
   // The first time the inline cache is updated may be the first time the
-  // function it references gets called.  If the function was lazily compiled
+  // function it references gets called.  If the function is lazily compiled
   // then the first call will trigger a compilation.  We check for this case
   // and we do the compilation immediately, instead of waiting for the stub
-  // currently attached to the JSFunction object to trigger compilation.  We
-  // do this in the case where we know that the inline cache is inside a loop,
-  // because then we know that we want to optimize the function.
-  if (!result->IsJSFunction() || JSFunction::cast(result)->is_compiled()) {
-    return result;
-  }
-  return CompileFunction(isolate, JSFunction::cast(result));
+  // currently attached to the JSFunction object to trigger compilation.
+  if (raw_function->is_compiled()) return raw_function;
+
+  Handle<JSFunction> function(raw_function);
+  JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+  return *function;
 }
 
 
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_Miss) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   KeyedCallIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
-  Object* result;
-  { MaybeObject* maybe_result =
+  MaybeObject* maybe_result =
       ic.LoadFunction(state, args.at<Object>(0), args.at<Object>(1));
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  // Result could be a function or a failure.
+  JSFunction* raw_function = NULL;
+  if (!maybe_result->To(&raw_function)) return maybe_result;
 
-  if (!result->IsJSFunction() || JSFunction::cast(result)->is_compiled()) {
-    return result;
-  }
-  return CompileFunction(isolate, JSFunction::cast(result));
+  if (raw_function->is_compiled()) return raw_function;
+
+  Handle<JSFunction> function(raw_function);
+  JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+  return *function;
 }
 
 
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(MaybeObject*, LoadIC_Miss) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   LoadIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
@@ -2068,7 +1827,7 @@
 
 // Used from ic-<arch>.cc
 RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   KeyedLoadIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
@@ -2077,7 +1836,7 @@
 
 
 RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissForceGeneric) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
   KeyedLoadIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
@@ -2087,7 +1846,7 @@
 
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(MaybeObject*, StoreIC_Miss) {
-  NoHandleAllocation na;
+  HandleScope scope;
   ASSERT(args.length() == 3);
   StoreIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
@@ -2156,7 +1915,7 @@
 
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   KeyedStoreIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
@@ -2190,7 +1949,7 @@
 
 
 RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) {
-  NoHandleAllocation na;
+  HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   KeyedStoreIC ic(isolate);
   IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
diff --git a/src/ic.h b/src/ic.h
index ca8447e..81aa6b7 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -198,47 +198,60 @@
   class Contextual: public BitField<bool, 0, 1> {};
   class StringStubState: public BitField<StringStubFeedback, 1, 1> {};
 
- protected:
-  CallICBase(Code::Kind kind, Isolate* isolate)
-      : IC(EXTRA_CALL_FRAME, isolate), kind_(kind) {}
-
- public:
+  // Returns a JSFunction or a Failure.
   MUST_USE_RESULT MaybeObject* LoadFunction(State state,
                                             Code::ExtraICState extra_ic_state,
                                             Handle<Object> object,
                                             Handle<String> name);
 
  protected:
-  Code::Kind kind_;
+  CallICBase(Code::Kind kind, Isolate* isolate)
+      : IC(EXTRA_CALL_FRAME, isolate), kind_(kind) {}
 
   bool TryUpdateExtraICState(LookupResult* lookup,
                              Handle<Object> object,
                              Code::ExtraICState* extra_ic_state);
 
-  MUST_USE_RESULT MaybeObject* ComputeMonomorphicStub(
-      LookupResult* lookup,
-      State state,
-      Code::ExtraICState extra_ic_state,
-      Handle<Object> object,
-      Handle<String> name);
+  // Compute a monomorphic stub if possible, otherwise return a null handle.
+  Handle<Code> ComputeMonomorphicStub(LookupResult* lookup,
+                                      State state,
+                                      Code::ExtraICState extra_state,
+                                      Handle<Object> object,
+                                      Handle<String> name);
 
-  // Update the inline cache and the global stub cache based on the
-  // lookup result.
+  // Update the inline cache and the global stub cache based on the lookup
+  // result.
   void UpdateCaches(LookupResult* lookup,
                     State state,
                     Code::ExtraICState extra_ic_state,
                     Handle<Object> object,
                     Handle<String> name);
 
-  // Returns a JSFunction if the object can be called as a function,
-  // and patches the stack to be ready for the call.
-  // Otherwise, it returns the undefined value.
-  Object* TryCallAsFunction(Object* object);
+  // Returns a JSFunction if the object can be called as a function, and
+  // patches the stack to be ready for the call.  Otherwise, it returns the
+  // undefined value.
+  Handle<Object> TryCallAsFunction(Handle<Object> object);
 
   void ReceiverToObjectIfRequired(Handle<Object> callee, Handle<Object> object);
 
   static void Clear(Address address, Code* target);
 
+  // Platform-specific code generation functions used by both call and
+  // keyed call.
+  static void GenerateMiss(MacroAssembler* masm,
+                           int argc,
+                           IC::UtilityId id,
+                           Code::ExtraICState extra_state);
+
+  static void GenerateNormal(MacroAssembler* masm, int argc);
+
+  static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
+                                            int argc,
+                                            Code::Kind kind,
+                                            Code::ExtraICState extra_state);
+
+  Code::Kind kind_;
+
   friend class IC;
 };
 
@@ -252,16 +265,24 @@
   // Code generator routines.
   static void GenerateInitialize(MacroAssembler* masm,
                                  int argc,
-                                 Code::ExtraICState extra_ic_state) {
-    GenerateMiss(masm, argc, extra_ic_state);
+                                 Code::ExtraICState extra_state) {
+    GenerateMiss(masm, argc, extra_state);
   }
+
   static void GenerateMiss(MacroAssembler* masm,
                            int argc,
-                           Code::ExtraICState extra_ic_state);
+                           Code::ExtraICState extra_state) {
+    CallICBase::GenerateMiss(masm, argc, IC::kCallIC_Miss, extra_state);
+  }
+
   static void GenerateMegamorphic(MacroAssembler* masm,
                                   int argc,
                                   Code::ExtraICState extra_ic_state);
-  static void GenerateNormal(MacroAssembler* masm, int argc);
+
+  static void GenerateNormal(MacroAssembler* masm, int argc) {
+    CallICBase::GenerateNormal(masm, argc);
+    GenerateMiss(masm, argc, Code::kNoExtraICState);
+  }
 };
 
 
@@ -280,7 +301,12 @@
   static void GenerateInitialize(MacroAssembler* masm, int argc) {
     GenerateMiss(masm, argc);
   }
-  static void GenerateMiss(MacroAssembler* masm, int argc);
+
+  static void GenerateMiss(MacroAssembler* masm, int argc) {
+    CallICBase::GenerateMiss(masm, argc, IC::kKeyedCallIC_Miss,
+                             Code::kNoExtraICState);
+  }
+
   static void GenerateMegamorphic(MacroAssembler* masm, int argc);
   static void GenerateNormal(MacroAssembler* masm, int argc);
   static void GenerateNonStrictArguments(MacroAssembler* masm, int argc);
@@ -321,17 +347,15 @@
                     Handle<String> name);
 
   // Stub accessors.
-  Code* megamorphic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kLoadIC_Megamorphic);
+  Handle<Code> megamorphic_stub() {
+    return isolate()->builtins()->LoadIC_Megamorphic();
   }
   static Code* initialize_stub() {
     return Isolate::Current()->builtins()->builtin(
         Builtins::kLoadIC_Initialize);
   }
-  Code* pre_monomorphic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kLoadIC_PreMonomorphic);
+  Handle<Code> pre_monomorphic_stub() {
+    return isolate()->builtins()->LoadIC_PreMonomorphic();
   }
 
   static void Clear(Address address, Code* target);
@@ -352,38 +376,39 @@
   explicit KeyedIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {}
   virtual ~KeyedIC() {}
 
-  virtual MaybeObject* GetElementStubWithoutMapCheck(
+  virtual Handle<Code> GetElementStubWithoutMapCheck(
       bool is_js_array,
       ElementsKind elements_kind) = 0;
 
  protected:
-  virtual Code* string_stub() {
-    return NULL;
+  virtual Handle<Code> string_stub() {
+    return Handle<Code>::null();
   }
 
   virtual Code::Kind kind() const = 0;
 
-  MaybeObject* ComputeStub(JSObject* receiver,
+  Handle<Code> ComputeStub(Handle<JSObject> receiver,
                            StubKind stub_kind,
                            StrictModeFlag strict_mode,
-                           Code* default_stub);
+                           Handle<Code> default_stub);
 
-  virtual MaybeObject* ComputePolymorphicStub(MapList* receiver_maps,
+  virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps,
                                               StrictModeFlag strict_mode) = 0;
 
-  MaybeObject* ComputeMonomorphicStubWithoutMapCheck(
-      Map* receiver_map,
+  Handle<Code> ComputeMonomorphicStubWithoutMapCheck(
+      Handle<Map> receiver_map,
       StrictModeFlag strict_mode);
 
  private:
-  void GetReceiverMapsForStub(Code* stub, MapList* result);
+  void GetReceiverMapsForStub(Handle<Code> stub, MapHandleList* result);
 
-  MaybeObject* ComputeMonomorphicStub(JSObject* receiver,
+  Handle<Code> ComputeMonomorphicStub(Handle<JSObject> receiver,
                                       StubKind stub_kind,
                                       StrictModeFlag strict_mode,
-                                      Code* default_stub);
+                                      Handle<Code> default_stub);
 
-  MaybeObject* ComputeTransitionedMap(JSObject* receiver, StubKind stub_kind);
+  Handle<Map> ComputeTransitionedMap(Handle<JSObject> receiver,
+                                     StubKind stub_kind);
 
   static bool IsTransitionStubKind(StubKind stub_kind) {
     return stub_kind > STORE_NO_TRANSITION;
@@ -423,20 +448,18 @@
   static const int kSlowCaseBitFieldMask =
       (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor);
 
-  virtual MaybeObject* GetElementStubWithoutMapCheck(
+  virtual Handle<Code> GetElementStubWithoutMapCheck(
       bool is_js_array,
       ElementsKind elements_kind);
 
  protected:
   virtual Code::Kind kind() const { return Code::KEYED_LOAD_IC; }
 
-  virtual MaybeObject* ComputePolymorphicStub(
-      MapList* receiver_maps,
-      StrictModeFlag strict_mode);
+  virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps,
+                                              StrictModeFlag strict_mode);
 
-  virtual Code* string_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedLoadIC_String);
+  virtual Handle<Code> string_stub() {
+    return isolate()->builtins()->KeyedLoadIC_String();
   }
 
  private:
@@ -451,25 +474,20 @@
     return Isolate::Current()->builtins()->builtin(
         Builtins::kKeyedLoadIC_Initialize);
   }
-  Code* megamorphic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedLoadIC_Generic);
+  Handle<Code> megamorphic_stub() {
+    return isolate()->builtins()->KeyedLoadIC_Generic();
   }
-  Code* generic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedLoadIC_Generic);
+  Handle<Code> generic_stub() {
+    return isolate()->builtins()->KeyedLoadIC_Generic();
   }
-  Code* pre_monomorphic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedLoadIC_PreMonomorphic);
+  Handle<Code> pre_monomorphic_stub() {
+    return isolate()->builtins()->KeyedLoadIC_PreMonomorphic();
   }
-  Code* indexed_interceptor_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedLoadIC_IndexedInterceptor);
+  Handle<Code> indexed_interceptor_stub() {
+    return isolate()->builtins()->KeyedLoadIC_IndexedInterceptor();
   }
-  Code* non_strict_arguments_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedLoadIC_NonStrictArguments);
+  Handle<Code> non_strict_arguments_stub() {
+    return isolate()->builtins()->KeyedLoadIC_NonStrictArguments();
   }
 
   static void Clear(Address address, Code* target);
@@ -534,13 +552,11 @@
     return Isolate::Current()->builtins()->builtin(
         Builtins::kStoreIC_Initialize_Strict);
   }
-  Code* global_proxy_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kStoreIC_GlobalProxy);
+  Handle<Code> global_proxy_stub() {
+    return isolate()->builtins()->StoreIC_GlobalProxy();
   }
-  Code* global_proxy_stub_strict() {
-    return isolate()->builtins()->builtin(
-        Builtins::kStoreIC_GlobalProxy_Strict);
+  Handle<Code> global_proxy_stub_strict() {
+    return isolate()->builtins()->StoreIC_GlobalProxy_Strict();
   }
 
   static void Clear(Address address, Code* target);
@@ -572,17 +588,18 @@
                                          StrictModeFlag strict_mode);
   static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
   static void GenerateNonStrictArguments(MacroAssembler* masm);
+  static void GenerateTransitionElementsSmiToDouble(MacroAssembler* masm);
+  static void GenerateTransitionElementsDoubleToObject(MacroAssembler* masm);
 
-  virtual MaybeObject* GetElementStubWithoutMapCheck(
+  virtual Handle<Code> GetElementStubWithoutMapCheck(
       bool is_js_array,
       ElementsKind elements_kind);
 
  protected:
   virtual Code::Kind kind() const { return Code::KEYED_STORE_IC; }
 
-  virtual MaybeObject* ComputePolymorphicStub(
-      MapList* receiver_maps,
-      StrictModeFlag strict_mode);
+  virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps,
+                                              StrictModeFlag strict_mode);
 
   private:
   // Update the inline cache.
@@ -605,29 +622,24 @@
     return Isolate::Current()->builtins()->builtin(
         Builtins::kKeyedStoreIC_Initialize);
   }
-  Code* megamorphic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedStoreIC_Generic);
-  }
   static Code* initialize_stub_strict() {
     return Isolate::Current()->builtins()->builtin(
         Builtins::kKeyedStoreIC_Initialize_Strict);
   }
-  Code* megamorphic_stub_strict() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedStoreIC_Generic_Strict);
+  Handle<Code> megamorphic_stub() {
+    return isolate()->builtins()->KeyedStoreIC_Generic();
   }
-  Code* generic_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedStoreIC_Generic);
+  Handle<Code> megamorphic_stub_strict() {
+    return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
   }
-  Code* generic_stub_strict() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedStoreIC_Generic_Strict);
+  Handle<Code> generic_stub() {
+    return isolate()->builtins()->KeyedStoreIC_Generic();
   }
-  Code* non_strict_arguments_stub() {
-    return isolate()->builtins()->builtin(
-        Builtins::kKeyedStoreIC_NonStrictArguments);
+  Handle<Code> generic_stub_strict() {
+    return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
+  }
+  Handle<Code> non_strict_arguments_stub() {
+    return isolate()->builtins()->KeyedStoreIC_NonStrictArguments();
   }
 
   static void Clear(Address address, Code* target);
diff --git a/src/incremental-marking-inl.h b/src/incremental-marking-inl.h
index 43fe0f5..2a7fba7 100644
--- a/src/incremental-marking-inl.h
+++ b/src/incremental-marking-inl.h
@@ -143,9 +143,6 @@
 
 
 void IncrementalMarking::WhiteToGrey(HeapObject* obj, MarkBit mark_bit) {
-  ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
-  ASSERT(obj->Size() >= 2*kPointerSize);
-  ASSERT(IsMarking());
   Marking::WhiteToGrey(mark_bit);
 }
 
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index 88ebd78..68b830a 100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -50,7 +50,8 @@
       steps_took_since_last_gc_(0),
       should_hurry_(false),
       allocation_marking_factor_(0),
-      allocated_(0) {
+      allocated_(0),
+      no_marking_scope_depth_(0) {
 }
 
 
@@ -87,6 +88,16 @@
 }
 
 
+void IncrementalMarking::RecordCodeTargetPatch(Code* host,
+                                               Address pc,
+                                               HeapObject* value) {
+  if (IsMarking()) {
+    RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
+    RecordWriteIntoCode(host, &rinfo, value);
+  }
+}
+
+
 void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) {
   if (IsMarking()) {
     Code* host = heap_->isolate()->inner_pointer_to_code_cache()->
@@ -343,7 +354,8 @@
   static const intptr_t kActivationThreshold = 0;
 #endif
 
-  return FLAG_incremental_marking &&
+  return !FLAG_expose_gc &&
+      FLAG_incremental_marking &&
       !Serializer::enabled() &&
       heap_->PromotedSpaceSize() > kActivationThreshold;
 }
@@ -461,7 +473,9 @@
 
 #ifdef DEBUG
   // Marking bits are cleared by the sweeper.
-  heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
+  if (FLAG_verify_heap) {
+    heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
+  }
 #endif
 
   heap_->CompletelyClearInstanceofCache();
@@ -692,6 +706,8 @@
 
   if (allocated_ < kAllocatedThreshold) return;
 
+  if (state_ == MARKING && no_marking_scope_depth_ > 0) return;
+
   intptr_t bytes_to_process = allocated_ * allocation_marking_factor_;
 
   double start = 0;
@@ -739,8 +755,8 @@
       }
 
       MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
-      ASSERT(Marking::IsGrey(obj_mark_bit) ||
-             (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
+      SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
+                  (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
       Marking::MarkBlack(obj_mark_bit);
       MemoryChunk::IncrementLiveBytes(obj->address(), size);
     }
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index d1627bc..fa7337b 100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -127,6 +127,7 @@
   inline void RecordWriteIntoCode(HeapObject* obj,
                                   RelocInfo* rinfo,
                                   Object* value);
+  void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
   void RecordCodeTargetPatch(Address pc, HeapObject* value);
   void RecordWriteOfCodeEntry(JSFunction* host, Object** slot, Code* value);
 
@@ -197,6 +198,14 @@
     }
   }
 
+  void EnterNoMarkingScope() {
+    no_marking_scope_depth_++;
+  }
+
+  void LeaveNoMarkingScope() {
+    no_marking_scope_depth_--;
+  }
+
  private:
   void set_should_hurry(bool val) {
     should_hurry_ = val;
@@ -248,6 +257,8 @@
   int allocation_marking_factor_;
   intptr_t allocated_;
 
+  int no_marking_scope_depth_;
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
 };
 
diff --git a/src/interpreter-irregexp.cc b/src/interpreter-irregexp.cc
index 796a447..b337e88 100644
--- a/src/interpreter-irregexp.cc
+++ b/src/interpreter-irregexp.cc
@@ -33,9 +33,9 @@
 #include "utils.h"
 #include "ast.h"
 #include "bytecodes-irregexp.h"
+#include "jsregexp.h"
 #include "interpreter-irregexp.h"
 
-
 namespace v8 {
 namespace internal {
 
@@ -187,12 +187,12 @@
 
 
 template <typename Char>
-static bool RawMatch(Isolate* isolate,
-                     const byte* code_base,
-                     Vector<const Char> subject,
-                     int* registers,
-                     int current,
-                     uint32_t current_char) {
+static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate,
+                                           const byte* code_base,
+                                           Vector<const Char> subject,
+                                           int* registers,
+                                           int current,
+                                           uint32_t current_char) {
   const byte* pc = code_base;
   // BacktrackStack ensures that the memory allocated for the backtracking stack
   // is returned to the system or cached if there is no stack being cached at
@@ -211,24 +211,24 @@
     switch (insn & BYTECODE_MASK) {
       BYTECODE(BREAK)
         UNREACHABLE();
-        return false;
+        return RegExpImpl::RE_FAILURE;
       BYTECODE(PUSH_CP)
         if (--backtrack_stack_space < 0) {
-          return false;  // No match on backtrack stack overflow.
+          return RegExpImpl::RE_EXCEPTION;
         }
         *backtrack_sp++ = current;
         pc += BC_PUSH_CP_LENGTH;
         break;
       BYTECODE(PUSH_BT)
         if (--backtrack_stack_space < 0) {
-          return false;  // No match on backtrack stack overflow.
+          return RegExpImpl::RE_EXCEPTION;
         }
         *backtrack_sp++ = Load32Aligned(pc + 4);
         pc += BC_PUSH_BT_LENGTH;
         break;
       BYTECODE(PUSH_REGISTER)
         if (--backtrack_stack_space < 0) {
-          return false;  // No match on backtrack stack overflow.
+          return RegExpImpl::RE_EXCEPTION;
         }
         *backtrack_sp++ = registers[insn >> BYTECODE_SHIFT];
         pc += BC_PUSH_REGISTER_LENGTH;
@@ -278,9 +278,9 @@
         pc += BC_POP_REGISTER_LENGTH;
         break;
       BYTECODE(FAIL)
-        return false;
+        return RegExpImpl::RE_FAILURE;
       BYTECODE(SUCCEED)
-        return true;
+        return RegExpImpl::RE_SUCCESS;
       BYTECODE(ADVANCE_CP)
         current += insn >> BYTECODE_SHIFT;
         pc += BC_ADVANCE_CP_LENGTH;
@@ -625,11 +625,12 @@
 }
 
 
-bool IrregexpInterpreter::Match(Isolate* isolate,
-                                Handle<ByteArray> code_array,
-                                Handle<String> subject,
-                                int* registers,
-                                int start_position) {
+RegExpImpl::IrregexpResult IrregexpInterpreter::Match(
+    Isolate* isolate,
+    Handle<ByteArray> code_array,
+    Handle<String> subject,
+    int* registers,
+    int start_position) {
   ASSERT(subject->IsFlat());
 
   AssertNoAllocation a;
diff --git a/src/interpreter-irregexp.h b/src/interpreter-irregexp.h
index 076f0c5..0f45d98 100644
--- a/src/interpreter-irregexp.h
+++ b/src/interpreter-irregexp.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -36,11 +36,11 @@
 
 class IrregexpInterpreter {
  public:
-  static bool Match(Isolate* isolate,
-                    Handle<ByteArray> code,
-                    Handle<String> subject,
-                    int* captures,
-                    int start_position);
+  static RegExpImpl::IrregexpResult Match(Isolate* isolate,
+                                          Handle<ByteArray> code,
+                                          Handle<String> subject,
+                                          int* captures,
+                                          int start_position);
 };
 
 
diff --git a/src/isolate.cc b/src/isolate.cc
index 492694e..a073af9 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -98,6 +98,7 @@
   failed_access_check_callback_ = NULL;
   save_context_ = NULL;
   catcher_ = NULL;
+  top_lookup_result_ = NULL;
 
   // These members are re-initialized later after deserialization
   // is complete.
@@ -480,6 +481,9 @@
   for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
     it.frame()->Iterate(v);
   }
+
+  // Iterate pointers in live lookup results.
+  thread->top_lookup_result_->Iterate(v);
 }
 
 
@@ -1068,6 +1072,16 @@
       message_obj = MessageHandler::MakeMessageObject("uncaught_exception",
           location, HandleVector<Object>(&exception_handle, 1), stack_trace,
           stack_trace_object);
+    } else if (location != NULL && !location->script().is_null()) {
+      // We are bootstrapping and caught an error where the location is set
+      // and we have a script for the location.
+      // In this case we could have an extension (or an internal error
+      // somewhere) and we print out the line number at which the error occured
+      // to the console for easier debugging.
+      int line_number = GetScriptLineNumberSafe(location->script(),
+                                                location->start_pos());
+      OS::PrintError("Extension or internal compilation error at line %d.\n",
+                     line_number);
     }
   }
 
diff --git a/src/isolate.h b/src/isolate.h
index 01ab04e..5453bf2 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -255,6 +255,9 @@
   // Call back function to report unsafe JS accesses.
   v8::FailedAccessCheckCallback failed_access_check_callback_;
 
+  // Head of the list of live LookupResults.
+  LookupResult* top_lookup_result_;
+
   // Whether out of memory exceptions should be ignored.
   bool ignore_out_of_memory_;
 
@@ -311,7 +314,6 @@
   V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
   V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
   V(int, suffix_table, (kBMMaxShift + 1))                                      \
-  V(uint32_t, random_seed, 2)                                                  \
   V(uint32_t, private_random_seed, 2)                                          \
   ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
 
@@ -995,6 +997,13 @@
   void SetData(void* data) { embedder_data_ = data; }
   void* GetData() { return embedder_data_; }
 
+  LookupResult* top_lookup_result() {
+    return thread_local_top_.top_lookup_result_;
+  }
+  void SetTopLookupResult(LookupResult* top) {
+    thread_local_top_.top_lookup_result_ = top;
+  }
+
  private:
   Isolate();
 
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index c1a9e06..18ff257 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -509,14 +509,16 @@
   }
   Handle<ByteArray> byte_codes(IrregexpByteCode(*irregexp, is_ascii), isolate);
 
-  if (IrregexpInterpreter::Match(isolate,
-                                 byte_codes,
-                                 subject,
-                                 register_vector,
-                                 index)) {
-    return RE_SUCCESS;
+  IrregexpResult result = IrregexpInterpreter::Match(isolate,
+                                                     byte_codes,
+                                                     subject,
+                                                     register_vector,
+                                                     index);
+  if (result == RE_EXCEPTION) {
+    ASSERT(!isolate->has_pending_exception());
+    isolate->StackOverflow();
   }
-  return RE_FAILURE;
+  return result;
 #endif  // V8_INTERPRETED_REGEXP
 }
 
diff --git a/src/list-inl.h b/src/list-inl.h
index 80bccc9..e2c358c 100644
--- a/src/list-inl.h
+++ b/src/list-inl.h
@@ -216,11 +216,11 @@
     int mid = (low + high) / 2;
     T mid_elem = list[mid];
 
-    if (mid_elem > elem) {
+    if (cmp(&mid_elem, &elem) > 0) {
       high = mid - 1;
       continue;
     }
-    if (mid_elem < elem) {
+    if (cmp(&mid_elem, &elem) < 0) {
       low = mid + 1;
       continue;
     }
@@ -236,6 +236,7 @@
   return SortedListBSearch<T>(list, elem, PointerValueCompare<T>);
 }
 
+
 } }  // namespace v8::internal
 
 #endif  // V8_LIST_INL_H_
diff --git a/src/list.h b/src/list.h
index 0558709..57504e0 100644
--- a/src/list.h
+++ b/src/list.h
@@ -165,8 +165,11 @@
 
 class Map;
 class Code;
+template<typename T> class Handle;
 typedef List<Map*> MapList;
 typedef List<Code*> CodeList;
+typedef List<Handle<Map> > MapHandleList;
+typedef List<Handle<Code> > CodeHandleList;
 
 // Perform binary search for an element in an already sorted
 // list. Returns the index of the element of -1 if it was not found.
@@ -176,6 +179,7 @@
 template <typename T>
 int SortedListBSearch(const List<T>& list, T elem);
 
+
 } }  // namespace v8::internal
 
 
diff --git a/src/liveobjectlist.cc b/src/liveobjectlist.cc
index d62c4d1..408e2a3 100644
--- a/src/liveobjectlist.cc
+++ b/src/liveobjectlist.cc
@@ -1085,7 +1085,7 @@
 static int CountHeapObjects() {
   int count = 0;
   // Iterate over all the heap spaces and count the number of objects.
-  HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
+  HeapIterator iterator;
   HeapObject* heap_obj = NULL;
   while ((heap_obj = iterator.next()) != NULL) {
     count++;
@@ -1122,7 +1122,7 @@
   // allocation, and we need allocate below.
   {
     // Iterate over all the heap spaces and add the objects.
-    HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
+    HeapIterator iterator;
     HeapObject* heap_obj = NULL;
     bool failed = false;
     while (!failed && (heap_obj = iterator.next()) != NULL) {
@@ -2513,7 +2513,7 @@
   OS::Print("  Start verify ...\n");
   OS::Print("  Verifying ...");
   Flush();
-  HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
+  HeapIterator iterator;
   HeapObject* heap_obj = NULL;
   while ((heap_obj = iterator.next()) != NULL) {
     number_of_heap_objects++;
diff --git a/src/macros.py b/src/macros.py
index 7a493ca..a42e83c 100644
--- a/src/macros.py
+++ b/src/macros.py
@@ -128,6 +128,11 @@
 # we cannot handle those anyway.
 macro IS_SPEC_FUNCTION(arg) = (%_ClassOf(arg) === 'Function');
 
+# Indices in bound function info retrieved by %BoundFunctionGetBindings(...).
+const kBoundFunctionIndex = 0;
+const kBoundThisIndex = 1;
+const kBoundArgumentsStartIndex = 2;
+
 # Inline macros. Use %IS_VAR to make sure arg is evaluated only once.
 macro NUMBER_IS_NAN(arg) = (!%_IsSmi(%IS_VAR(arg)) && !(arg == arg));
 macro NUMBER_IS_FINITE(arg) = (%_IsSmi(%IS_VAR(arg)) || ((arg == arg) && (arg != 1/0) && (arg != -1/0)));
diff --git a/src/mark-compact-inl.h b/src/mark-compact-inl.h
index 20f11a7..573715e 100644
--- a/src/mark-compact-inl.h
+++ b/src/mark-compact-inl.h
@@ -38,7 +38,7 @@
 
 
 MarkBit Marking::MarkBitFrom(Address addr) {
-  MemoryChunk *p = MemoryChunk::FromAddress(addr);
+  MemoryChunk* p = MemoryChunk::FromAddress(addr);
   return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr),
                                          p->ContainsOnlyData());
 }
@@ -54,9 +54,6 @@
   if (!mark_bit.Get()) {
     mark_bit.Set();
     MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
-#ifdef DEBUG
-    UpdateLiveObjectCount(obj);
-#endif
     ProcessNewlyMarkedObject(obj);
   }
 }
@@ -67,9 +64,6 @@
   ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
   mark_bit.Set();
   MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
-#ifdef DEBUG
-  UpdateLiveObjectCount(obj);
-#endif
 }
 
 
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 9fa79ca..b41b033 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -65,16 +65,6 @@
       collect_maps_(FLAG_collect_maps),
       tracer_(NULL),
       migration_slots_buffer_(NULL),
-#ifdef DEBUG
-      live_young_objects_size_(0),
-      live_old_pointer_objects_size_(0),
-      live_old_data_objects_size_(0),
-      live_code_objects_size_(0),
-      live_map_objects_size_(0),
-      live_cell_objects_size_(0),
-      live_lo_objects_size_(0),
-      live_bytes_(0),
-#endif
       heap_(NULL),
       code_flusher_(NULL),
       encountered_weak_maps_(NULL) { }
@@ -330,7 +320,7 @@
 #endif
 
 
-static void ClearMarkbits(PagedSpace* space) {
+static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
   PageIterator it(space);
 
   while (it.has_next()) {
@@ -339,7 +329,7 @@
 }
 
 
-static void ClearMarkbits(NewSpace* space) {
+static void ClearMarkbitsInNewSpace(NewSpace* space) {
   NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
 
   while (it.has_next()) {
@@ -348,15 +338,15 @@
 }
 
 
-static void ClearMarkbits(Heap* heap) {
-  ClearMarkbits(heap->code_space());
-  ClearMarkbits(heap->map_space());
-  ClearMarkbits(heap->old_pointer_space());
-  ClearMarkbits(heap->old_data_space());
-  ClearMarkbits(heap->cell_space());
-  ClearMarkbits(heap->new_space());
+void MarkCompactCollector::ClearMarkbits() {
+  ClearMarkbitsInPagedSpace(heap_->code_space());
+  ClearMarkbitsInPagedSpace(heap_->map_space());
+  ClearMarkbitsInPagedSpace(heap_->old_pointer_space());
+  ClearMarkbitsInPagedSpace(heap_->old_data_space());
+  ClearMarkbitsInPagedSpace(heap_->cell_space());
+  ClearMarkbitsInNewSpace(heap_->new_space());
 
-  LargeObjectIterator it(heap->lo_space());
+  LargeObjectIterator it(heap_->lo_space());
   for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
     MarkBit mark_bit = Marking::MarkBitFrom(obj);
     mark_bit.Clear();
@@ -504,7 +494,7 @@
   // Clear marking bits for precise sweeping to collect all garbage.
   if (was_marked_incrementally_ && PreciseSweepingRequired()) {
     heap()->incremental_marking()->Abort();
-    ClearMarkbits(heap_);
+    ClearMarkbits();
     AbortCompaction();
     was_marked_incrementally_ = false;
   }
@@ -523,21 +513,10 @@
   }
 
 #ifdef DEBUG
-  if (!was_marked_incrementally_) {
+  if (!was_marked_incrementally_ && FLAG_verify_heap) {
     VerifyMarkbitsAreClean();
   }
 #endif
-
-#ifdef DEBUG
-  live_bytes_ = 0;
-  live_young_objects_size_ = 0;
-  live_old_pointer_objects_size_ = 0;
-  live_old_data_objects_size_ = 0;
-  live_code_objects_size_ = 0;
-  live_map_objects_size_ = 0;
-  live_cell_objects_size_ = 0;
-  live_lo_objects_size_ = 0;
-#endif
 }
 
 
@@ -2176,32 +2155,6 @@
 }
 
 
-#ifdef DEBUG
-void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
-  live_bytes_ += obj->Size();
-  if (heap()->new_space()->Contains(obj)) {
-    live_young_objects_size_ += obj->Size();
-  } else if (heap()->map_space()->Contains(obj)) {
-    ASSERT(obj->IsMap());
-    live_map_objects_size_ += obj->Size();
-  } else if (heap()->cell_space()->Contains(obj)) {
-    ASSERT(obj->IsJSGlobalPropertyCell());
-    live_cell_objects_size_ += obj->Size();
-  } else if (heap()->old_pointer_space()->Contains(obj)) {
-    live_old_pointer_objects_size_ += obj->Size();
-  } else if (heap()->old_data_space()->Contains(obj)) {
-    live_old_data_objects_size_ += obj->Size();
-  } else if (heap()->code_space()->Contains(obj)) {
-    live_code_objects_size_ += obj->Size();
-  } else if (heap()->lo_space()->Contains(obj)) {
-    live_lo_objects_size_ += obj->Size();
-  } else {
-    UNREACHABLE();
-  }
-}
-#endif  // DEBUG
-
-
 void MarkCompactCollector::ReattachInitialMaps() {
   HeapObjectIterator map_iterator(heap()->map_space());
   for (HeapObject* obj = map_iterator.Next();
@@ -3649,8 +3602,6 @@
   // of the previous ones.
   SweepSpace(heap()->map_space(), PRECISE);
 
-  ASSERT(live_map_objects_size_ <= heap()->map_space()->Size());
-
   // Deallocate unmarked objects and clear marked bits for marked objects.
   heap_->lo_space()->FreeUnmarkedObjects();
 }
diff --git a/src/mark-compact.h b/src/mark-compact.h
index d54d822..254f175 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -61,68 +61,52 @@
   // Impossible markbits: 01
   static const char* kImpossibleBitPattern;
   static inline bool IsImpossible(MarkBit mark_bit) {
-    ASSERT(strcmp(kImpossibleBitPattern, "01") == 0);
     return !mark_bit.Get() && mark_bit.Next().Get();
   }
 
   // Black markbits: 10 - this is required by the sweeper.
   static const char* kBlackBitPattern;
   static inline bool IsBlack(MarkBit mark_bit) {
-    ASSERT(strcmp(kBlackBitPattern, "10") == 0);
-    ASSERT(!IsImpossible(mark_bit));
     return mark_bit.Get() && !mark_bit.Next().Get();
   }
 
   // White markbits: 00 - this is required by the mark bit clearer.
   static const char* kWhiteBitPattern;
   static inline bool IsWhite(MarkBit mark_bit) {
-    ASSERT(strcmp(kWhiteBitPattern, "00") == 0);
-    ASSERT(!IsImpossible(mark_bit));
     return !mark_bit.Get();
   }
 
   // Grey markbits: 11
   static const char* kGreyBitPattern;
   static inline bool IsGrey(MarkBit mark_bit) {
-    ASSERT(strcmp(kGreyBitPattern, "11") == 0);
-    ASSERT(!IsImpossible(mark_bit));
     return mark_bit.Get() && mark_bit.Next().Get();
   }
 
   static inline void MarkBlack(MarkBit mark_bit) {
     mark_bit.Set();
     mark_bit.Next().Clear();
-    ASSERT(Marking::IsBlack(mark_bit));
   }
 
   static inline void BlackToGrey(MarkBit markbit) {
-    ASSERT(IsBlack(markbit));
     markbit.Next().Set();
-    ASSERT(IsGrey(markbit));
   }
 
   static inline void WhiteToGrey(MarkBit markbit) {
-    ASSERT(IsWhite(markbit));
     markbit.Set();
     markbit.Next().Set();
-    ASSERT(IsGrey(markbit));
   }
 
   static inline void GreyToBlack(MarkBit markbit) {
-    ASSERT(IsGrey(markbit));
     markbit.Next().Clear();
-    ASSERT(IsBlack(markbit));
   }
 
   static inline void BlackToGrey(HeapObject* obj) {
-    ASSERT(obj->Size() >= 2 * kPointerSize);
     BlackToGrey(MarkBitFrom(obj));
   }
 
   static inline void AnyToGrey(MarkBit markbit) {
     markbit.Set();
     markbit.Next().Set();
-    ASSERT(IsGrey(markbit));
   }
 
   // Returns true if the the object whose mark is transferred is marked black.
@@ -173,8 +157,6 @@
       to_mark_bit.Next().Set();
       is_black = false;  // Was actually gray.
     }
-    ASSERT(Color(from) == Color(to));
-    ASSERT(is_black == (Color(to) == BLACK_OBJECT));
     return is_black;
   }
 
@@ -227,7 +209,6 @@
   inline void PushGrey(HeapObject* object) {
     ASSERT(object->IsHeapObject());
     if (IsFull()) {
-      ASSERT(Marking::IsGrey(Marking::MarkBitFrom(object)));
       SetOverflowed();
     } else {
       array_[top_] = object;
@@ -246,7 +227,6 @@
   inline void UnshiftGrey(HeapObject* object) {
     ASSERT(object->IsHeapObject());
     if (IsFull()) {
-      ASSERT(Marking::IsGrey(Marking::MarkBitFrom(object)));
       SetOverflowed();
     } else {
       bottom_ = ((bottom_ - 1) & mask_);
@@ -558,6 +538,8 @@
 
   void InvalidateCode(Code* code);
 
+  void ClearMarkbits();
+
  private:
   MarkCompactCollector();
   ~MarkCompactCollector();
@@ -687,10 +669,6 @@
   // heap object.
   static bool IsUnmarkedHeapObject(Object** p);
 
-#ifdef DEBUG
-  void UpdateLiveObjectCount(HeapObject* obj);
-#endif
-
   // Map transitions from a live map to a dead map must be killed.
   // We replace them with a null descriptor, with the same key.
   void ClearNonLiveTransitions();
@@ -737,37 +715,7 @@
 
   void SweepSpace(PagedSpace* space, SweeperType sweeper);
 
-
 #ifdef DEBUG
-  // -----------------------------------------------------------------------
-  // Debugging variables, functions and classes
-  // Counters used for debugging the marking phase of mark-compact or
-  // mark-sweep collection.
-
-  // Size of live objects in Heap::to_space_.
-  int live_young_objects_size_;
-
-  // Size of live objects in Heap::old_pointer_space_.
-  int live_old_pointer_objects_size_;
-
-  // Size of live objects in Heap::old_data_space_.
-  int live_old_data_objects_size_;
-
-  // Size of live objects in Heap::code_space_.
-  int live_code_objects_size_;
-
-  // Size of live objects in Heap::map_space_.
-  int live_map_objects_size_;
-
-  // Size of live objects in Heap::cell_space_.
-  int live_cell_objects_size_;
-
-  // Size of live objects in Heap::lo_space_.
-  int live_lo_objects_size_;
-
-  // Number of live bytes in this collection.
-  int live_bytes_;
-
   friend class MarkObjectVisitor;
   static void VisitObject(HeapObject* obj);
 
diff --git a/src/messages.js b/src/messages.js
index a9993af..e4607ab 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -83,7 +83,7 @@
 // objects between script tags in a browser setting.
 function ToStringCheckErrorObject(obj) {
   if (IsNativeErrorObject(obj)) {
-    return %_CallFunction(obj, errorToString);
+    return %_CallFunction(obj, ErrorToString);
   } else {
     return ToString(obj);
   }
@@ -185,14 +185,15 @@
       "define_disallowed",            ["Cannot define property:", "%0", ", object is not extensible."],
       "non_extensible_proto",         ["%0", " is not extensible"],
       "handler_non_object",           ["Proxy.", "%0", " called with non-object as handler"],
-      "trap_function_expected",       ["Proxy.", "%0", " called with non-function for ", "%1", " trap"],
+      "proto_non_object",             ["Proxy.", "%0", " called with non-object as prototype"],
+      "trap_function_expected",       ["Proxy.", "%0", " called with non-function for '", "%1", "' trap"],
       "handler_trap_missing",         ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
       "handler_trap_must_be_callable", ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
-      "handler_returned_false",       ["Proxy handler ", "%0", " returned false for '", "%1", "' trap"],
-      "handler_returned_undefined",   ["Proxy handler ", "%0", " returned undefined for '", "%1", "' trap"],
-      "proxy_prop_not_configurable",  ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"],
-      "proxy_non_object_prop_names",  ["Trap ", "%1", " returned non-object ", "%0"],
-      "proxy_repeated_prop_name",     ["Trap ", "%1", " returned repeated property name ", "%2"],
+      "handler_returned_false",       ["Proxy handler ", "%0", " returned false from '", "%1", "' trap"],
+      "handler_returned_undefined",   ["Proxy handler ", "%0", " returned undefined from '", "%1", "' trap"],
+      "proxy_prop_not_configurable",  ["Proxy handler ", "%0", " returned non-configurable descriptor for property '", "%2", "' from '", "%1", "' trap"],
+      "proxy_non_object_prop_names",  ["Trap '", "%1", "' returned non-object ", "%0"],
+      "proxy_repeated_prop_name",     ["Trap '", "%1", "' returned repeated property name '", "%2", "'"],
       "invalid_weakmap_key",          ["Invalid value used as weak map key"],
       // RangeError
       "invalid_array_length",         ["Invalid array length"],
@@ -240,6 +241,7 @@
       "strict_poison_pill",           ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"],
       "strict_caller",                ["Illegal access to a strict mode caller function."],
       "unprotected_let",              ["Illegal let declaration in unprotected statement context."],
+      "unprotected_const",            ["Illegal const declaration in unprotected statement context."],
       "cant_prevent_ext_external_array_elements", ["Cannot prevent extension of an object with external array elements"],
       "redef_external_array_element", ["Cannot redefine a property of an object with external array elements"],
     ];
@@ -1126,6 +1128,7 @@
         return new f(m);
       }
     });
+    %SetNativeFlag(f);
   }
 
   DefineError(function Error() { });
@@ -1143,42 +1146,43 @@
 
 %SetProperty($Error.prototype, 'message', '', DONT_ENUM);
 
-// Global list of error objects visited during errorToString. This is
+// Global list of error objects visited during ErrorToString. This is
 // used to detect cycles in error toString formatting.
 const visited_errors = new InternalArray();
 const cyclic_error_marker = new $Object();
 
-function errorToStringDetectCycle(error) {
+function ErrorToStringDetectCycle(error) {
   if (!%PushIfAbsent(visited_errors, error)) throw cyclic_error_marker;
   try {
     var type = error.type;
+    var name = error.name
+    name = IS_UNDEFINED(name) ? "Error" : TO_STRING_INLINE(name);
+    var message = error.message;
     var hasMessage = %_CallFunction(error, "message", ObjectHasOwnProperty);
     if (type && !hasMessage) {
-      var formatted = FormatMessage(%NewMessageObject(type, error.arguments));
-      return error.name + ": " + formatted;
+      message = FormatMessage(%NewMessageObject(type, error.arguments));
     }
-    var message = hasMessage ? (": " + error.message) : "";
-    return error.name + message;
+    message = IS_UNDEFINED(message) ? "" : TO_STRING_INLINE(message);
+    if (name === "") return message;
+    if (message === "") return name;
+    return name + ": " + message;
   } finally {
     visited_errors.length = visited_errors.length - 1;
   }
 }
 
-function errorToString() {
+function ErrorToString() {
   if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
     throw MakeTypeError("called_on_null_or_undefined",
                         ["Error.prototype.toString"]);
   }
-  // This helper function is needed because access to properties on
-  // the builtins object do not work inside of a catch clause.
-  function isCyclicErrorMarker(o) { return o === cyclic_error_marker; }
 
   try {
-    return errorToStringDetectCycle(this);
+    return ErrorToStringDetectCycle(this);
   } catch(e) {
     // If this error message was encountered already return the empty
     // string for it instead of recursively formatting it.
-    if (isCyclicErrorMarker(e)) {
+    if (e === cyclic_error_marker) {
       return '';
     }
     throw e;
@@ -1186,7 +1190,7 @@
 }
 
 
-InstallFunctions($Error.prototype, DONT_ENUM, ['toString', errorToString]);
+InstallFunctions($Error.prototype, DONT_ENUM, ['toString', ErrorToString]);
 
 // Boilerplate for exceptions for stack overflows. Used from
 // Isolate::StackOverflow().
diff --git a/src/mips/assembler-mips-inl.h b/src/mips/assembler-mips-inl.h
index 553c511..2ba9760 100644
--- a/src/mips/assembler-mips-inl.h
+++ b/src/mips/assembler-mips-inl.h
@@ -116,10 +116,10 @@
 }
 
 
-void RelocInfo::set_target_address(Address target) {
+void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
   Assembler::set_target_address_at(pc_, target);
-  if (host() != NULL && IsCodeTarget(rmode_)) {
+  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
     Object* target_code = Code::GetCodeFromTargetAddress(target);
     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
         host(), this, HeapObject::cast(target_code));
@@ -150,10 +150,12 @@
 }
 
 
-void RelocInfo::set_target_object(Object* target) {
+void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
-  if (host() != NULL && target->IsHeapObject()) {
+  if (mode == UPDATE_WRITE_BARRIER &&
+      host() != NULL &&
+      target->IsHeapObject()) {
     host()->GetHeap()->incremental_marking()->RecordWrite(
         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
   }
@@ -184,11 +186,12 @@
 }
 
 
-void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) {
+void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
+                                WriteBarrierMode mode) {
   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
   Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
   Memory::Address_at(pc_) = address;
-  if (host() != NULL) {
+  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
     // TODO(1550) We are passing NULL as a slot because cell can never be on
     // evacuation candidate.
     host()->GetHeap()->incremental_marking()->RecordWrite(
diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h
index 38e9537..b66ea0d 100644
--- a/src/mips/assembler-mips.h
+++ b/src/mips/assembler-mips.h
@@ -302,7 +302,7 @@
 const FPURegister f30 = { 30 };
 const FPURegister f31 = { 31 };
 
-const FPURegister kDoubleRegZero = f28;
+static const FPURegister& kDoubleRegZero = f28;
 
 // FPU (coprocessor 1) control registers.
 // Currently only FCSR (#31) is implemented.
diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
index 5609d5e..1687abe 100644
--- a/src/mips/builtins-mips.cc
+++ b/src/mips/builtins-mips.cc
@@ -88,12 +88,6 @@
 }
 
 
-// This constant has the same value as JSArray::kPreallocatedArrayElements and
-// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
-// below should be reconsidered.
-static const int kLoopUnfoldLimit = 4;
-
-
 // Allocate an empty JSArray. The allocated array is put into the result
 // register. An elements backing store is allocated with size initial_capacity
 // and filled with the hole values.
@@ -103,9 +97,9 @@
                                  Register scratch1,
                                  Register scratch2,
                                  Register scratch3,
-                                 int initial_capacity,
                                  Label* gc_required) {
-  ASSERT(initial_capacity > 0);
+  const int initial_capacity = JSArray::kPreallocatedArrayElements;
+  STATIC_ASSERT(initial_capacity >= 0);
   // Load the initial map from the array function.
   __ lw(scratch1, FieldMemOperand(array_function,
                                   JSFunction::kPrototypeOrInitialMapOffset));
@@ -155,13 +149,24 @@
   __ sw(scratch3, MemOperand(scratch1));
   __ Addu(scratch1, scratch1, kPointerSize);
 
-  // Fill the FixedArray with the hole value.
+  // Fill the FixedArray with the hole value. Inline the code if short.
+  if (initial_capacity == 0) return;
   ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
-  ASSERT(initial_capacity <= kLoopUnfoldLimit);
   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
-  for (int i = 0; i < initial_capacity; i++) {
+  static const int kLoopUnfoldLimit = 4;
+  if (initial_capacity <= kLoopUnfoldLimit) {
+    for (int i = 0; i < initial_capacity; i++) {
+      __ sw(scratch3, MemOperand(scratch1, i * kPointerSize));
+    }
+  } else {
+    Label loop, entry;
+    __ Addu(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
+    __ Branch(&entry);
+    __ bind(&loop);
     __ sw(scratch3, MemOperand(scratch1));
     __ Addu(scratch1, scratch1, kPointerSize);
+    __ bind(&entry);
+    __ Branch(&loop, lt, scratch1, Operand(scratch2));
   }
 }
 
@@ -177,7 +182,7 @@
 // register elements_array_storage is scratched.
 static void AllocateJSArray(MacroAssembler* masm,
                             Register array_function,  // Array function.
-                            Register array_size,  // As a smi.
+                            Register array_size,  // As a smi, cannot be 0.
                             Register result,
                             Register elements_array_storage,
                             Register elements_array_end,
@@ -185,31 +190,18 @@
                             Register scratch2,
                             bool fill_with_hole,
                             Label* gc_required) {
-  Label not_empty, allocated;
-
   // Load the initial map from the array function.
   __ lw(elements_array_storage,
          FieldMemOperand(array_function,
                          JSFunction::kPrototypeOrInitialMapOffset));
 
-  // Check whether an empty sized array is requested.
-  __ Branch(&not_empty, ne, array_size, Operand(zero_reg));
-
-  // If an empty array is requested allocate a small elements array anyway. This
-  // keeps the code below free of special casing for the empty array.
-  int size = JSArray::kSize +
-             FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
-  __ AllocateInNewSpace(size,
-                        result,
-                        elements_array_end,
-                        scratch1,
-                        gc_required,
-                        TAG_OBJECT);
-  __ Branch(&allocated);
+  if (FLAG_debug_code) {  // Assert that array size is not zero.
+    __ Assert(
+        ne, "array size is unexpectedly 0", array_size, Operand(zero_reg));
+  }
 
   // Allocate the JSArray object together with space for a FixedArray with the
   // requested number of elements.
-  __ bind(&not_empty);
   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   __ li(elements_array_end,
         (JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize);
@@ -228,7 +220,6 @@
   // result: JSObject
   // elements_array_storage: initial map
   // array_size: size of array (smi)
-  __ bind(&allocated);
   __ sw(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
   __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
   __ sw(elements_array_storage,
@@ -262,8 +253,6 @@
   // the actual JSArray has length 0 and the size of the JSArray for non-empty
   // JSArrays. The length of a FixedArray is stored as a smi.
   STATIC_ASSERT(kSmiTag == 0);
-  __ li(at, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
-  __ movz(array_size, at, array_size);
 
   ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
   __ sw(array_size, MemOperand(elements_array_storage));
@@ -312,18 +301,18 @@
 static void ArrayNativeCode(MacroAssembler* masm,
                             Label* call_generic_code) {
   Counters* counters = masm->isolate()->counters();
-  Label argc_one_or_more, argc_two_or_more;
+  Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array;
 
   // Check for array construction with zero arguments or one.
   __ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg));
   // Handle construction of an empty array.
+  __ bind(&empty_array);
   AllocateEmptyJSArray(masm,
                        a1,
                        a2,
                        a3,
                        t0,
                        t1,
-                       JSArray::kPreallocatedArrayElements,
                        call_generic_code);
   __ IncrementCounter(counters->array_function_native(), 1, a3, t0);
   // Setup return value, remove receiver from stack and return.
@@ -338,6 +327,12 @@
 
   STATIC_ASSERT(kSmiTag == 0);
   __ lw(a2, MemOperand(sp));  // Get the argument from the stack.
+  __ Branch(&not_empty_array, ne, a2, Operand(zero_reg));
+  __ Drop(1);  // Adjust stack.
+  __ mov(a0, zero_reg);  // Treat this as a call with argc of zero.
+  __ Branch(&empty_array);
+
+  __ bind(&not_empty_array);
   __ And(a3, a2, Operand(kIntptrSignBit | kSmiTagMask));
   __ Branch(call_generic_code, eq, a3, Operand(zero_reg));
 
@@ -1053,9 +1048,9 @@
     __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
 
     // Set up the roots register.
-    ExternalReference roots_address =
-        ExternalReference::roots_address(masm->isolate());
-    __ li(s6, Operand(roots_address));
+    ExternalReference roots_array_start =
+        ExternalReference::roots_array_start(masm->isolate());
+    __ li(s6, Operand(roots_array_start));
 
     // Push the function and the receiver onto the stack.
     __ Push(a1, a2);
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index fe251b9..85e929d 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -262,7 +262,12 @@
   // [sp + (2 * kPointerSize)]: literals array.
 
   // All sizes here are multiples of kPointerSize.
-  int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
+  int elements_size = 0;
+  if (length_ > 0) {
+    elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
+        ? FixedDoubleArray::SizeFor(length_)
+        : FixedArray::SizeFor(length_);
+  }
   int size = JSArray::kSize + elements_size;
 
   // Load boilerplate object into r3 and check if we need to create a
@@ -283,6 +288,9 @@
     if (mode_ == CLONE_ELEMENTS) {
       message = "Expected (writable) fixed array";
       expected_map_index = Heap::kFixedArrayMapRootIndex;
+    } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
+      message = "Expected (writable) fixed double array";
+      expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
     } else {
       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
       message = "Expected copy-on-write fixed array";
@@ -322,6 +330,7 @@
     __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
 
     // Copy the elements array.
+    ASSERT((elements_size % kPointerSize) == 0);
     __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize);
   }
 
@@ -4071,7 +4080,7 @@
   }
 
   // Get the prototype of the function.
-  __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
+  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
 
   // Check that the function prototype is a JS object.
   __ JumpIfSmi(prototype, &slow);
@@ -6889,7 +6898,7 @@
   // The saved ra is after the reserved stack space for the 4 args.
   __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
 
-  if (FLAG_debug_code && EnableSlowAsserts()) {
+  if (FLAG_debug_code && FLAG_enable_slow_asserts) {
     // In case of an error the return address may point to a memory area
     // filled with kZapValue by the GC.
     // Dereference the address and check for this.
@@ -6939,7 +6948,82 @@
 }
 
 
-MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
+void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
+                                                        Label* miss,
+                                                        Label* done,
+                                                        Register receiver,
+                                                        Register properties,
+                                                        Handle<String> name,
+                                                        Register scratch0) {
+  // If names of slots in range from 1 to kProbes - 1 for the hash value are
+  // not equal to the name and kProbes-th slot is not used (its name is the
+  // undefined value), it guarantees the hash table doesn't contain the
+  // property. It's true even if some slots represent deleted properties
+  // (their names are the null value).
+  for (int i = 0; i < kInlinedProbes; i++) {
+    // scratch0 points to properties hash.
+    // Compute the masked index: (hash + i + i * i) & mask.
+    Register index = scratch0;
+    // Capacity is smi 2^n.
+    __ lw(index, FieldMemOperand(properties, kCapacityOffset));
+    __ Subu(index, index, Operand(1));
+    __ And(index, index, Operand(
+        Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
+
+    // Scale the index by multiplying by the entry size.
+    ASSERT(StringDictionary::kEntrySize == 3);
+    __ sll(at, index, 1);
+    __ Addu(index, index, at);
+
+    Register entity_name = scratch0;
+    // Having undefined at this place means the name is not contained.
+    ASSERT_EQ(kSmiTagSize, 1);
+    Register tmp = properties;
+    __ sll(tmp, index, 1);
+    __ Addu(tmp, properties, tmp);
+    __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
+
+    ASSERT(!tmp.is(entity_name));
+    __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
+    __ Branch(done, eq, entity_name, Operand(tmp));
+
+    if (i != kInlinedProbes - 1) {
+      // Stop if found the property.
+      __ Branch(miss, eq, entity_name, Operand(Handle<String>(name)));
+
+      // Check if the entry name is not a symbol.
+      __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
+      __ lbu(entity_name,
+             FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
+      __ And(tmp, entity_name, Operand(kIsSymbolMask));
+      __ Branch(miss, eq, tmp, Operand(zero_reg));
+
+      // Restore the properties.
+      __ lw(properties,
+            FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+    }
+  }
+
+  const int spill_mask =
+      (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
+       a2.bit() | a1.bit() | a0.bit() | v0.bit());
+
+  __ MultiPush(spill_mask);
+  __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+  __ li(a1, Operand(Handle<String>(name)));
+  StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+  __ CallStub(&stub);
+  __ mov(at, v0);
+  __ MultiPop(spill_mask);
+
+  __ Branch(done, eq, at, Operand(zero_reg));
+  __ Branch(miss, ne, at, Operand(zero_reg));
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
     MacroAssembler* masm,
     Label* miss,
     Label* done,
@@ -6965,8 +7049,7 @@
     // Scale the index by multiplying by the entry size.
     ASSERT(StringDictionary::kEntrySize == 3);
     // index *= 3.
-    __ mov(at, index);
-    __ sll(index, index, 1);
+    __ sll(at, index, 1);
     __ Addu(index, index, at);
 
     Register entity_name = scratch0;
@@ -7001,7 +7084,7 @@
 
   const int spill_mask =
       (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
-       a2.bit() | a1.bit() | a0.bit());
+       a2.bit() | a1.bit() | a0.bit() | v0.bit());
 
   __ MultiPush(spill_mask);
   __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
@@ -7009,10 +7092,11 @@
   StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
   MaybeObject* result = masm->TryCallStub(&stub);
   if (result->IsFailure()) return result;
+  __ mov(at, v0);
   __ MultiPop(spill_mask);
 
-  __ Branch(done, eq, v0, Operand(zero_reg));
-  __ Branch(miss, ne, v0, Operand(zero_reg));
+  __ Branch(done, eq, at, Operand(zero_reg));
+  __ Branch(miss, ne, at, Operand(zero_reg));
   return result;
 }
 
@@ -7058,8 +7142,7 @@
     ASSERT(StringDictionary::kEntrySize == 3);
     // scratch2 = scratch2 * 3.
 
-    __ mov(at, scratch2);
-    __ sll(scratch2, scratch2, 1);
+    __ sll(at, scratch2, 1);
     __ Addu(scratch2, scratch2, at);
 
     // Check if the key is identical to the name.
@@ -7071,19 +7154,26 @@
 
   const int spill_mask =
       (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
-       a3.bit() | a2.bit() | a1.bit() | a0.bit()) &
+       a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
       ~(scratch1.bit() | scratch2.bit());
 
   __ MultiPush(spill_mask);
-  __ Move(a0, elements);
-  __ Move(a1, name);
+  if (name.is(a0)) {
+    ASSERT(!elements.is(a1));
+    __ Move(a1, name);
+    __ Move(a0, elements);
+  } else {
+    __ Move(a0, elements);
+    __ Move(a1, name);
+  }
   StringDictionaryLookupStub stub(POSITIVE_LOOKUP);
   __ CallStub(&stub);
   __ mov(scratch2, a2);
+  __ mov(at, v0);
   __ MultiPop(spill_mask);
 
-  __ Branch(done, ne, v0, Operand(zero_reg));
-  __ Branch(miss, eq, v0, Operand(zero_reg));
+  __ Branch(done, ne, at, Operand(zero_reg));
+  __ Branch(miss, eq, at, Operand(zero_reg));
 }
 
 
@@ -7207,6 +7297,13 @@
   { a3, a1, a2, EMIT_REMEMBERED_SET },
   // KeyedStoreStubCompiler::GenerateStoreFastElement.
   { t0, a2, a3, EMIT_REMEMBERED_SET },
+  // ElementsTransitionGenerator::GenerateSmiOnlyToObject
+  // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+  // and ElementsTransitionGenerator::GenerateDoubleToObject
+  { a2, a3, t5, EMIT_REMEMBERED_SET },
+  // ElementsTransitionGenerator::GenerateDoubleToObject
+  { t2, a2, a0, EMIT_REMEMBERED_SET },
+  { a2, t2, t5, EMIT_REMEMBERED_SET },
   // Null termination.
   { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
 };
diff --git a/src/mips/code-stubs-mips.h b/src/mips/code-stubs-mips.h
index ef6b889..beb20aa 100644
--- a/src/mips/code-stubs-mips.h
+++ b/src/mips/code-stubs-mips.h
@@ -799,7 +799,17 @@
 
   void Generate(MacroAssembler* masm);
 
-  MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup(
+  static void GenerateNegativeLookup(MacroAssembler* masm,
+                                     Label* miss,
+                                     Label* done,
+                                     Register receiver,
+                                     Register properties,
+                                     Handle<String> name,
+                                     Register scratch0);
+
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
+  MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup(
       MacroAssembler* masm,
       Label* miss,
       Label* done,
diff --git a/src/mips/codegen-mips.cc b/src/mips/codegen-mips.cc
index ff146dd..e9fe232 100644
--- a/src/mips/codegen-mips.cc
+++ b/src/mips/codegen-mips.cc
@@ -30,10 +30,13 @@
 #if defined(V8_TARGET_ARCH_MIPS)
 
 #include "codegen.h"
+#include "macro-assembler.h"
 
 namespace v8 {
 namespace internal {
 
+#define __ ACCESS_MASM(masm)
+
 // -------------------------------------------------------------------------
 // Platform-specific RuntimeCallHelper functions.
 
@@ -50,6 +53,260 @@
   masm->set_has_frame(false);
 }
 
+// -------------------------------------------------------------------------
+// Code generators
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  //  -- a1    : key
+  //  -- a2    : receiver
+  //  -- ra    : return address
+  //  -- a3    : target map, scratch for subsequent call
+  //  -- t0    : scratch (elements)
+  // -----------------------------------
+  // Set transitioned map.
+  __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
+  __ RecordWriteField(a2,
+                      HeapObject::kMapOffset,
+                      a3,
+                      t5,
+                      kRAHasNotBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+}
+
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  //  -- a1    : key
+  //  -- a2    : receiver
+  //  -- ra    : return address
+  //  -- a3    : target map, scratch for subsequent call
+  //  -- t0    : scratch (elements)
+  // -----------------------------------
+  Label loop, entry, convert_hole, gc_required;
+  bool fpu_supported = CpuFeatures::IsSupported(FPU);
+  __ push(ra);
+
+  Register scratch = t6;
+
+  __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
+  __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
+  // t0: source FixedArray
+  // t1: number of elements (smi-tagged)
+
+  // Allocate new FixedDoubleArray.
+  __ sll(scratch, t1, 2);
+  __ Addu(scratch, scratch, FixedDoubleArray::kHeaderSize);
+  __ AllocateInNewSpace(scratch, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS);
+  // t2: destination FixedDoubleArray, not tagged as heap object
+  __ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex);
+  __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
+  // Set destination FixedDoubleArray's length.
+  __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset));
+  // Update receiver's map.
+
+  __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
+  __ RecordWriteField(a2,
+                      HeapObject::kMapOffset,
+                      a3,
+                      t5,
+                      kRAHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Replace receiver's backing store with newly created FixedDoubleArray.
+  __ Addu(a3, t2, Operand(kHeapObjectTag));
+  __ sw(a3, FieldMemOperand(a2, JSObject::kElementsOffset));
+  __ RecordWriteField(a2,
+                      JSObject::kElementsOffset,
+                      a3,
+                      t5,
+                      kRAHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+
+
+  // Prepare for conversion loop.
+  __ Addu(a3, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+  __ Addu(t3, t2, Operand(FixedDoubleArray::kHeaderSize));
+  __ sll(t2, t1, 2);
+  __ Addu(t2, t2, t3);
+  __ li(t0, Operand(kHoleNanLower32));
+  __ li(t1, Operand(kHoleNanUpper32));
+  // t0: kHoleNanLower32
+  // t1: kHoleNanUpper32
+  // t2: end of destination FixedDoubleArray, not tagged
+  // t3: begin of FixedDoubleArray element fields, not tagged
+
+  if (!fpu_supported) __ Push(a1, a0);
+
+  __ Branch(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  __ pop(ra);
+  __ Branch(fail);
+
+  // Convert and copy elements.
+  __ bind(&loop);
+  __ lw(t5, MemOperand(a3));
+  __ Addu(a3, a3, kIntSize);
+  // t5: current element
+  __ JumpIfNotSmi(t5, &convert_hole);
+
+  // Normal smi, convert to double and store.
+  __ SmiUntag(t5);
+  if (fpu_supported) {
+    CpuFeatures::Scope scope(FPU);
+    __ mtc1(t5, f0);
+    __ cvt_d_w(f0, f0);
+    __ sdc1(f0, MemOperand(t3));
+    __ Addu(t3, t3, kDoubleSize);
+  } else {
+    FloatingPointHelper::ConvertIntToDouble(masm,
+                                            t5,
+                                            FloatingPointHelper::kCoreRegisters,
+                                            f0,
+                                            a0,
+                                            a1,
+                                            t7,
+                                            f0);
+    __ sw(a0, MemOperand(t3));  // mantissa
+    __ sw(a1, MemOperand(t3, kIntSize));  // exponent
+    __ Addu(t3, t3, kDoubleSize);
+  }
+  __ Branch(&entry);
+
+  // Hole found, store the-hole NaN.
+  __ bind(&convert_hole);
+  __ sw(t0, MemOperand(t3));  // mantissa
+  __ sw(t1, MemOperand(t3, kIntSize));  // exponent
+  __ Addu(t3, t3, kDoubleSize);
+
+  __ bind(&entry);
+  __ Branch(&loop, lt, t3, Operand(t2));
+
+  if (!fpu_supported) __ Pop(a1, a0);
+  __ pop(ra);
+}
+
+
+void ElementsTransitionGenerator::GenerateDoubleToObject(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  //  -- a1    : key
+  //  -- a2    : receiver
+  //  -- ra    : return address
+  //  -- a3    : target map, scratch for subsequent call
+  //  -- t0    : scratch (elements)
+  // -----------------------------------
+  Label entry, loop, convert_hole, gc_required;
+  __ MultiPush(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
+
+  __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
+  __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
+  // t0: source FixedArray
+  // t1: number of elements (smi-tagged)
+
+  // Allocate new FixedArray.
+  __ sll(a0, t1, 1);
+  __ Addu(a0, a0, FixedDoubleArray::kHeaderSize);
+  __ AllocateInNewSpace(a0, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS);
+  // t2: destination FixedArray, not tagged as heap object
+  __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
+  __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
+  // Set destination FixedDoubleArray's length.
+  __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset));
+
+  // Prepare for conversion loop.
+  __ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
+  __ Addu(a3, t2, Operand(FixedArray::kHeaderSize));
+  __ Addu(t2, t2, Operand(kHeapObjectTag));
+  __ sll(t1, t1, 1);
+  __ Addu(t1, a3, t1);
+  __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
+  __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex);
+  // Using offsetted addresses.
+  // a3: begin of destination FixedArray element fields, not tagged
+  // t0: begin of source FixedDoubleArray element fields, not tagged, +4
+  // t1: end of destination FixedArray, not tagged
+  // t2: destination FixedArray
+  // t3: the-hole pointer
+  // t5: heap number map
+  __ Branch(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  __ MultiPop(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
+
+  __ Branch(fail);
+
+  __ bind(&loop);
+  __ lw(a1, MemOperand(t0));
+  __ Addu(t0, t0, kDoubleSize);
+  // a1: current element's upper 32 bit
+  // t0: address of next element's upper 32 bit
+  __ Branch(&convert_hole, eq, a1, Operand(kHoleNanUpper32));
+
+  // Non-hole double, copy value into a heap number.
+  __ AllocateHeapNumber(a2, a0, t6, t5, &gc_required);
+  // a2: new heap number
+  __ lw(a0, MemOperand(t0, -12));
+  __ sw(a0, FieldMemOperand(a2, HeapNumber::kMantissaOffset));
+  __ sw(a1, FieldMemOperand(a2, HeapNumber::kExponentOffset));
+  __ mov(a0, a3);
+  __ sw(a2, MemOperand(a3));
+  __ Addu(a3, a3, kIntSize);
+  __ RecordWrite(t2,
+                 a0,
+                 a2,
+                 kRAHasBeenSaved,
+                 kDontSaveFPRegs,
+                 EMIT_REMEMBERED_SET,
+                 OMIT_SMI_CHECK);
+  __ Branch(&entry);
+
+  // Replace the-hole NaN with the-hole pointer.
+  __ bind(&convert_hole);
+  __ sw(t3, MemOperand(a3));
+  __ Addu(a3, a3, kIntSize);
+
+  __ bind(&entry);
+  __ Branch(&loop, lt, a3, Operand(t1));
+
+  __ MultiPop(a2.bit() | a3.bit() | a0.bit() | a1.bit());
+  // Update receiver's map.
+  __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
+  __ RecordWriteField(a2,
+                      HeapObject::kMapOffset,
+                      a3,
+                      t5,
+                      kRAHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Replace receiver's backing store with newly created and filled FixedArray.
+  __ sw(t2, FieldMemOperand(a2, JSObject::kElementsOffset));
+  __ RecordWriteField(a2,
+                      JSObject::kElementsOffset,
+                      t2,
+                      t5,
+                      kRAHasBeenSaved,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  __ pop(ra);
+}
+
+#undef __
 
 } }  // namespace v8::internal
 
diff --git a/src/mips/codegen-mips.h b/src/mips/codegen-mips.h
index b020d80..4549509 100644
--- a/src/mips/codegen-mips.h
+++ b/src/mips/codegen-mips.h
@@ -31,7 +31,6 @@
 
 
 #include "ast.h"
-#include "code-stubs-mips.h"
 #include "ic-inl.h"
 
 namespace v8 {
diff --git a/src/mips/deoptimizer-mips.cc b/src/mips/deoptimizer-mips.cc
index 280b8cb..92d7edd 100644
--- a/src/mips/deoptimizer-mips.cc
+++ b/src/mips/deoptimizer-mips.cc
@@ -61,7 +61,8 @@
 }
 
 
-void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
+void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
+                                         Address pc_after,
                                          Code* check_code,
                                          Code* replacement_code) {
   UNIMPLEMENTED();
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index b3f0540..2f989bc 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -278,7 +278,10 @@
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
         int ignored = 0;
-        EmitDeclaration(scope()->function(), CONST, NULL, &ignored);
+        VariableProxy* proxy = scope()->function();
+        ASSERT(proxy->var()->mode() == CONST ||
+               proxy->var()->mode() == CONST_HARMONY);
+        EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -728,6 +731,8 @@
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
   Variable* variable = proxy->var();
+  bool binding_needs_init =
+      mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
       ++(*global_count);
@@ -739,7 +744,7 @@
         Comment cmnt(masm_, "[ Declaration");
         VisitForAccumulatorValue(function);
         __ sw(result_register(), StackOperand(variable));
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
           Comment cmnt(masm_, "[ Declaration");
           __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
           __ sw(t0, StackOperand(variable));
@@ -775,7 +780,7 @@
                                   EMIT_REMEMBERED_SET,
                                   OMIT_SMI_CHECK);
         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
           Comment cmnt(masm_, "[ Declaration");
           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
           __ sw(at, ContextOperand(cp, variable->index()));
@@ -787,9 +792,13 @@
     case Variable::LOOKUP: {
       Comment cmnt(masm_, "[ Declaration");
       __ li(a2, Operand(variable->name()));
-      // Declaration nodes are always introduced in one of three modes.
-      ASSERT(mode == VAR || mode == CONST || mode == LET);
-      PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE;
+      // Declaration nodes are always introduced in one of four modes.
+      ASSERT(mode == VAR ||
+             mode == CONST ||
+             mode == CONST_HARMONY ||
+             mode == LET);
+      PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
+        ? READ_ONLY : NONE;
       __ li(a1, Operand(Smi::FromInt(attr)));
       // Push initial value, if any.
       // Note: For variables we must not push an initial value (such as
@@ -799,7 +808,7 @@
         __ Push(cp, a2, a1);
         // Push initial value for function declaration.
         VisitForStackValue(function);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
           __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
           __ Push(cp, a2, a1, a0);
       } else {
@@ -942,11 +951,17 @@
   __ bind(&done_convert);
   __ push(a0);
 
+  // Check for proxies.
+  Label call_runtime;
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ GetObjectType(a0, a1, a1);
+  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
+
   // Check cache validity in generated code. This is a fast case for
   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   // guarantee cache validity, call the runtime system to check cache
   // validity or get the property names in a fixed array.
-  Label next, call_runtime;
+  Label next;
   // Preload a couple of values used in the loop.
   Register  empty_fixed_array_value = t2;
   __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
@@ -1020,9 +1035,16 @@
   __ jmp(&loop);
 
   // We got a fixed array in register v0. Iterate through that.
+  Label non_proxy;
   __ bind(&fixed_array);
-  __ li(a1, Operand(Smi::FromInt(0)));  // Map (0) - force slow check.
-  __ Push(a1, v0);
+  __ li(a1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
+  __ lw(a2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ GetObjectType(a2, a3, a3);
+  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
+  __ li(a1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
+  __ bind(&non_proxy);
+  __ Push(a1, v0);  // Smi and array
   __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
   __ li(a0, Operand(Smi::FromInt(0)));
   __ Push(a1, a0);  // Fixed array length (as smi) and initial index.
@@ -1041,17 +1063,22 @@
   __ addu(t0, a2, t0);  // Array base + scaled (smi) index.
   __ lw(a3, MemOperand(t0));  // Current entry.
 
-  // Get the expected map from the stack or a zero map in the
+  // Get the expected map from the stack or a smi in the
   // permanent slow case into register a2.
   __ lw(a2, MemOperand(sp, 3 * kPointerSize));
 
   // Check if the expected map still matches that of the enumerable.
-  // If not, we have to filter the key.
+  // If not, we may have to filter the key.
   Label update_each;
   __ lw(a1, MemOperand(sp, 4 * kPointerSize));
   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
   __ Branch(&update_each, eq, t0, Operand(a2));
 
+  // For proxies, no filtering is done.
+  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
+  ASSERT_EQ(Smi::FromInt(0), 0);
+  __ Branch(&update_each, eq, a2, Operand(zero_reg));
+
   // Convert the entry to a string or (smi) 0 if it isn't a property
   // any more. If the property has been removed while iterating, we
   // just skip it.
@@ -1106,7 +1133,7 @@
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(info->strict_mode_flag());
     __ li(a0, Operand(info));
     __ push(a0);
     __ CallStub(&stub);
@@ -1137,7 +1164,7 @@
   Scope* s = scope();
   while (s != NULL) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
         __ Branch(slow, ne, temp, Operand(zero_reg));
@@ -1149,7 +1176,7 @@
     }
     // If no outer scope calls eval, we do not need to check more
     // context extensions.
-    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
     s = s->outer_scope();
   }
 
@@ -1191,7 +1218,7 @@
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
         __ Branch(slow, ne, temp, Operand(zero_reg));
@@ -1228,13 +1255,14 @@
     Variable* local = var->local_if_not_shadowed();
     __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
     if (local->mode() == CONST ||
+        local->mode() == CONST_HARMONY ||
         local->mode() == LET) {
       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
       __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
       if (local->mode() == CONST) {
         __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
         __ movz(v0, a0, at);  // Conditional move: return Undefined if TheHole.
-      } else {  // LET
+      } else {  // LET || CONST_HARMONY
         __ Branch(done, ne, at, Operand(zero_reg));
         __ li(a0, Operand(var->name()));
         __ push(a0);
@@ -1272,14 +1300,16 @@
       Comment cmnt(masm_, var->IsContextSlot()
                               ? "Context variable"
                               : "Stack variable");
-      if (var->mode() != LET && var->mode() != CONST) {
+      if (!var->binding_needs_init()) {
         context()->Plug(var);
       } else {
         // Let and const need a read barrier.
         GetVar(v0, var);
         __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
         __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
-        if (var->mode() == LET) {
+        if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+          // Throw a reference error when using an uninitialized let/const
+          // binding in harmony mode.
           Label done;
           __ Branch(&done, ne, at, Operand(zero_reg));
           __ li(a0, Operand(var->name()));
@@ -1287,6 +1317,8 @@
           __ CallRuntime(Runtime::kThrowReferenceError, 1);
           __ bind(&done);
         } else {
+          // Uninitalized const bindings outside of harmony mode are unholed.
+          ASSERT(var->mode() == CONST);
           __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
           __ movz(v0, a0, at);  // Conditional move: Undefined if TheHole.
         }
@@ -1476,13 +1508,21 @@
 
   ZoneList<Expression*>* subexprs = expr->values();
   int length = subexprs->length();
+
+  Handle<FixedArray> constant_elements = expr->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+  Handle<FixedArrayBase> constant_elements_values(
+      FixedArrayBase::cast(constant_elements->get(1)));
+
   __ mov(a0, result_register());
   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
-  __ li(a1, Operand(expr->constant_elements()));
+  __ li(a1, Operand(constant_elements));
   __ Push(a3, a2, a1);
-  if (expr->constant_elements()->map() ==
+  if (constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
@@ -1494,8 +1534,14 @@
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
-    FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
+    ASSERT(constant_elements_kind == FAST_ELEMENTS ||
+           constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+           FLAG_smi_only_arrays);
+    FastCloneShallowArrayStub::Mode mode =
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, length);
     __ CallStub(&stub);
   }
 
@@ -1518,24 +1564,57 @@
     }
     VisitForAccumulatorValue(subexpr);
 
-    // Store the subexpression value in the array's elements.
     __ lw(t6, MemOperand(sp));  // Copy of array literal.
     __ lw(a1, FieldMemOperand(t6, JSObject::kElementsOffset));
+    __ lw(a2, FieldMemOperand(t6, JSObject::kMapOffset));
     int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-    __ sw(result_register(), FieldMemOperand(a1, offset));
 
-    Label no_map_change;
-    __ JumpIfSmi(result_register(), &no_map_change);
-    // Update the write barrier for the array store with v0 as the scratch
-    // register.
+    Label element_done;
+    Label double_elements;
+    Label smi_element;
+    Label slow_elements;
+    Label fast_elements;
+    __ CheckFastElements(a2, a3, &double_elements);
+
+    // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+    __ JumpIfSmi(result_register(), &smi_element);
+    __ CheckFastSmiOnlyElements(a2, a3, &fast_elements);
+
+    // Store into the array literal requires a elements transition. Call into
+    // the runtime.
+    __ bind(&slow_elements);
+    __ push(t6);  // Copy of array literal.
+    __ li(a1, Operand(Smi::FromInt(i)));
+    __ li(a2, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
+    __ li(a3, Operand(Smi::FromInt(strict_mode_flag())));  // Strict mode.
+    __ Push(a1, result_register(), a2, a3);
+    __ CallRuntime(Runtime::kSetProperty, 5);
+    __ Branch(&element_done);
+
+      // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+    __ bind(&double_elements);
+    __ li(a3, Operand(Smi::FromInt(i)));
+    __ StoreNumberToDoubleElements(result_register(), a3, t6, a1, t0, t1, t5,
+                                   t3, &slow_elements);
+    __ Branch(&element_done);
+
+    // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+    __ bind(&fast_elements);
+    __ sw(result_register(), FieldMemOperand(a1, offset));
+    // Update the write barrier for the array store.
+
     __ RecordWriteField(
         a1, offset, result_register(), a2, kRAHasBeenSaved, kDontSaveFPRegs,
         EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
-    __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
-    __ CheckFastSmiOnlyElements(a3, a2, &no_map_change);
-    __ push(t6);  // Copy of array literal.
-    __ CallRuntime(Runtime::kNonSmiElementStored, 1);
-    __ bind(&no_map_change);
+    __ Branch(&element_done);
+
+    // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+    // FAST_ELEMENTS, and value is Smi.
+    __ bind(&smi_element);
+    __ sw(result_register(), FieldMemOperand(a1, offset));
+    // Fall through
+
+    __ bind(&element_done);
 
     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   }
@@ -1917,8 +1996,9 @@
       }
     }
 
-  } else if (var->mode() != CONST) {
-    // Assignment to var or initializing assignment to let.
+  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
+    // Assignment to var or initializing assignment to let/const
+    // in harmony mode.
     if (var->IsStackAllocated() || var->IsContextSlot()) {
       MemOperand location = VarOperand(var, a1);
       if (FLAG_debug_code && op == Token::INIT_LET) {
@@ -2803,10 +2883,10 @@
   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   if (CpuFeatures::IsSupported(FPU)) {
     __ PrepareCallCFunction(1, a0);
-    __ li(a0, Operand(ExternalReference::isolate_address()));
+    __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX));
+    __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
     __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
 
-
     CpuFeatures::Scope scope(FPU);
     // 0x41300000 is the top half of 1.0 x 2^20 as a double.
     __ li(a1, Operand(0x41300000));
@@ -2821,7 +2901,8 @@
   } else {
     __ PrepareCallCFunction(2, a0);
     __ mov(a0, s0);
-    __ li(a1, Operand(ExternalReference::isolate_address()));
+    __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
+    __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset));
     __ CallCFunction(
         ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
   }
@@ -4100,36 +4181,26 @@
         case Token::EQ_STRICT:
         case Token::EQ:
           cc = eq;
-          __ mov(a0, result_register());
-          __ pop(a1);
           break;
         case Token::LT:
           cc = lt;
-          __ mov(a0, result_register());
-          __ pop(a1);
           break;
         case Token::GT:
-          // Reverse left and right sides to obtain ECMA-262 conversion order.
-          cc = lt;
-          __ mov(a1, result_register());
-          __ pop(a0);
+          cc = gt;
          break;
         case Token::LTE:
-          // Reverse left and right sides to obtain ECMA-262 conversion order.
-          cc = ge;
-          __ mov(a1, result_register());
-          __ pop(a0);
+          cc = le;
           break;
         case Token::GTE:
           cc = ge;
-          __ mov(a0, result_register());
-          __ pop(a1);
           break;
         case Token::IN:
         case Token::INSTANCEOF:
         default:
           UNREACHABLE();
       }
+      __ mov(a0, result_register());
+      __ pop(a1);
 
       bool inline_smi_code = ShouldInlineSmiCase(op);
       JumpPatchSite patch_site(masm_);
diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc
index fb33eb6..ca6383c 100644
--- a/src/mips/ic-mips.cc
+++ b/src/mips/ic-mips.cc
@@ -384,10 +384,10 @@
 
 // The generated code does not accept smi keys.
 // The generated code falls through if both probes miss.
-static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
-                                          int argc,
-                                          Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
+                                               int argc,
+                                               Code::Kind kind,
+                                               Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- a1    : receiver
   //  -- a2    : name
@@ -397,7 +397,7 @@
   // Probe the stub cache.
   Code::Flags flags = Code::ComputeFlags(kind,
                                          MONOMORPHIC,
-                                         extra_ic_state,
+                                         extra_state,
                                          NORMAL,
                                          argc);
   Isolate::Current()->stub_cache()->GenerateProbe(
@@ -463,7 +463,7 @@
 }
 
 
-static void GenerateCallNormal(MacroAssembler* masm, int argc) {
+void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   //  -- a2    : name
   //  -- ra    : return address
@@ -486,10 +486,10 @@
 }
 
 
-static void GenerateCallMiss(MacroAssembler* masm,
-                             int argc,
-                             IC::UtilityId id,
-                             Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMiss(MacroAssembler* masm,
+                              int argc,
+                              IC::UtilityId id,
+                              Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   //  -- a2    : name
   //  -- ra    : return address
@@ -540,7 +540,7 @@
     __ bind(&invoke);
   }
   // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   ParameterCount actual(argc);
@@ -552,18 +552,6 @@
 }
 
 
-void CallIC::GenerateMiss(MacroAssembler* masm,
-                          int argc,
-                          Code::ExtraICState extra_ic_state) {
-  // ----------- S t a t e -------------
-  //  -- a2    : name
-  //  -- ra    : return address
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
-}
-
-
 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
                                  int argc,
                                  Code::ExtraICState extra_ic_state) {
@@ -579,27 +567,6 @@
 }
 
 
-void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  //  -- a2    : name
-  //  -- ra    : return address
-  // -----------------------------------
-
-  GenerateCallNormal(masm, argc);
-  GenerateMiss(masm, argc, Code::kNoExtraICState);
-}
-
-
-void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  //  -- a2    : name
-  //  -- ra    : return address
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
-}
-
-
 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   //  -- a2    : name
@@ -716,7 +683,7 @@
   __ JumpIfSmi(a2, &miss);
   __ IsObjectJSStringType(a2, a0, &miss);
 
-  GenerateCallNormal(masm, argc);
+  CallICBase::GenerateNormal(masm, argc);
   __ bind(&miss);
   GenerateMiss(masm, argc);
 }
@@ -1421,6 +1388,47 @@
 }
 
 
+void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
+  // ---------- S t a t e --------------
+  //  -- a2     : receiver
+  //  -- a3     : target map
+  //  -- ra     : return address
+  // -----------------------------------
+  // Must return the modified receiver in v0.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+    __ Ret(USE_DELAY_SLOT);
+    __ mov(v0, a2);
+    __ bind(&fail);
+  }
+
+  __ push(a2);
+  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
+}
+
+
+void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
+    MacroAssembler* masm) {
+  // ---------- S t a t e --------------
+  //  -- a2     : receiver
+  //  -- a3     : target map
+  //  -- ra     : return address
+  // -----------------------------------
+  // Must return the modified receiver in v0.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
+    __ Ret(USE_DELAY_SLOT);
+    __ mov(v0, a2);
+    __ bind(&fail);
+  }
+
+  __ push(a2);
+  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
+}
+
+
 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
                                   StrictModeFlag strict_mode) {
   // ----------- S t a t e -------------
@@ -1560,11 +1568,9 @@
     case Token::LT:
       return lt;
     case Token::GT:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return lt;
+      return gt;
     case Token::LTE:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return ge;
+      return le;
     case Token::GTE:
       return ge;
     default:
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 2964fbc..32dce66 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -2873,6 +2873,7 @@
   ASSERT(!result.is(scratch1));
   ASSERT(!result.is(scratch2));
   ASSERT(!scratch1.is(scratch2));
+  ASSERT(!object_size.is(t9));
   ASSERT(!scratch1.is(t9) && !scratch2.is(t9) && !result.is(t9));
 
   // Check relative positions of allocation top and limit addresses.
@@ -3616,24 +3617,16 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
-  ASSERT(function->is_compiled());
-
   // Get the function and setup the context.
   li(a1, Operand(Handle<JSFunction>(function)));
   lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
 
-  // Invoke the cached code.
-  Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
-    InvokeCode(a3, expected, actual, flag, NullCallWrapper(), call_kind);
-  } else {
-    InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag, call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
+  InvokeCode(a3, expected, actual, flag, NullCallWrapper(), call_kind);
 }
 
 
@@ -3674,7 +3667,8 @@
 void MacroAssembler::TryGetFunctionPrototype(Register function,
                                              Register result,
                                              Register scratch,
-                                             Label* miss) {
+                                             Label* miss,
+                                             bool miss_on_bound_function) {
   // Check that the receiver isn't a smi.
   JumpIfSmi(function, miss);
 
@@ -3682,6 +3676,16 @@
   GetObjectType(function, result, scratch);
   Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
 
+  if (miss_on_bound_function) {
+    lw(scratch,
+       FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
+    lw(scratch,
+       FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
+    And(scratch, scratch,
+        Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
+    Branch(miss, ne, scratch, Operand(zero_reg));
+  }
+
   // Make sure that the function has an instance prototype.
   Label non_instance;
   lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index 6f81a4b..84c55f7 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -887,7 +887,8 @@
   void TryGetFunctionPrototype(Register function,
                                Register result,
                                Register scratch,
-                               Label* miss);
+                               Label* miss,
+                               bool miss_on_bound_function = false);
 
   void GetObjectType(Register function,
                      Register map,
diff --git a/src/mips/regexp-macro-assembler-mips.cc b/src/mips/regexp-macro-assembler-mips.cc
index 9db5c5b..cb210fe 100644
--- a/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/mips/regexp-macro-assembler-mips.cc
@@ -1112,6 +1112,11 @@
     frame_entry<const String*>(re_frame, kInputString) = *subject;
     frame_entry<const byte*>(re_frame, kInputStart) = new_address;
     frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
+  } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
+    // Subject string might have been a ConsString that underwent
+    // short-circuiting during GC. That will not change start_address but
+    // will change pointer inside the subject handle.
+    frame_entry<const String*>(re_frame, kInputString) = *subject;
   }
 
   return 0;
diff --git a/src/mips/simulator-mips.cc b/src/mips/simulator-mips.cc
index 17c1897..0ec3e28 100644
--- a/src/mips/simulator-mips.cc
+++ b/src/mips/simulator-mips.cc
@@ -1359,9 +1359,9 @@
 
 // Returns the limit of the stack area to enable checking for stack overflows.
 uintptr_t Simulator::StackLimit() const {
-  // Leave a safety margin of 256 bytes to prevent overrunning the stack when
+  // Leave a safety margin of 512 bytes to prevent overrunning the stack when
   // pushing values.
-  return reinterpret_cast<uintptr_t>(stack_) + 256;
+  return reinterpret_cast<uintptr_t>(stack_) + 512;
 }
 
 
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index 4bad0a2..9f94b1d 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -99,7 +99,61 @@
 // must always call a backup property check that is complete.
 // This function is safe to call if the receiver has fast properties.
 // Name must be a symbol and receiver must be a heap object.
-MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
+static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                             Label* miss_label,
+                                             Register receiver,
+                                             Handle<String> name,
+                                             Register scratch0,
+                                             Register scratch1) {
+  ASSERT(name->IsSymbol());
+  Counters* counters = masm->isolate()->counters();
+  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
+  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
+
+  Label done;
+
+  const int kInterceptorOrAccessCheckNeededMask =
+      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
+
+  // Bail out if the receiver has a named interceptor or requires access checks.
+  Register map = scratch1;
+  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
+  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
+  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
+  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
+
+  // Check that receiver is a JSObject.
+  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
+  __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
+
+  // Load properties array.
+  Register properties = scratch0;
+  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+  // Check that the properties array is a dictionary.
+  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
+  Register tmp = properties;
+  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
+  __ Branch(miss_label, ne, map, Operand(tmp));
+
+  // Restore the temporarily used register.
+  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+
+
+  StringDictionaryLookupStub::GenerateNegativeLookup(masm,
+                                                     miss_label,
+                                                     &done,
+                                                     receiver,
+                                                     properties,
+                                                     name,
+                                                     scratch1);
+  __ bind(&done);
+  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup(
     MacroAssembler* masm,
     Label* miss_label,
     Register receiver,
@@ -140,7 +194,7 @@
   // Restore the temporarily used register.
   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
 
-  MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
+  MaybeObject* result = StringDictionaryLookupStub::TryGenerateNegativeLookup(
       masm,
       miss_label,
       &done,
@@ -261,8 +315,10 @@
 // are loaded directly otherwise the property is loaded from the properties
 // fixed array.
 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
-                                            Register dst, Register src,
-                                            JSObject* holder, int index) {
+                                            Register dst,
+                                            Register src,
+                                            Handle<JSObject> holder,
+                                            int index) {
   // Adjust for the number of properties stored in the holder.
   index -= holder->map()->inobject_properties();
   if (index < 0) {
@@ -469,20 +525,15 @@
 
 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
-  Code* code = NULL;
-  if (kind == Code::LOAD_IC) {
-    code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
-  } else {
-    code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
-  }
-
-  Handle<Code> ic(code);
-  __ Jump(ic, RelocInfo::CODE_TARGET);
+  Handle<Code> code = (kind == Code::LOAD_IC)
+      ? masm->isolate()->builtins()->LoadIC_Miss()
+      : masm->isolate()->builtins()->KeyedLoadIC_Miss();
+  __ Jump(code, RelocInfo::CODE_TARGET);
 }
 
 
 static void GenerateCallFunction(MacroAssembler* masm,
-                                 Object* object,
+                                 Handle<Object> object,
                                  const ParameterCount& arguments,
                                  Label* miss,
                                  Code::ExtraICState extra_ic_state) {
@@ -878,7 +929,25 @@
 // Generate code to check that a global property cell is empty. Create
 // the property cell at compilation time if no cell exists for the
 // property.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
+static void GenerateCheckPropertyCell(MacroAssembler* masm,
+                                      Handle<GlobalObject> global,
+                                      Handle<String> name,
+                                      Register scratch,
+                                      Label* miss) {
+  Handle<JSGlobalPropertyCell> cell =
+      GlobalObject::EnsurePropertyCell(global, name);
+  ASSERT(cell->value()->IsTheHole());
+  __ li(scratch, Operand(cell));
+  __ lw(scratch,
+        FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
+  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+  __ Branch(miss, ne, scratch, Operand(at));
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell(
     MacroAssembler* masm,
     GlobalObject* global,
     String* name,
@@ -901,7 +970,29 @@
 
 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
 // from object to (but not including) holder.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
+static void GenerateCheckPropertyCells(MacroAssembler* masm,
+                                       Handle<JSObject> object,
+                                       Handle<JSObject> holder,
+                                       Handle<String> name,
+                                       Register scratch,
+                                       Label* miss) {
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    if (current->IsGlobalObject()) {
+      GenerateCheckPropertyCell(masm,
+                                Handle<GlobalObject>::cast(current),
+                                name,
+                                scratch,
+                                miss);
+    }
+    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
+  }
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells(
     MacroAssembler* masm,
     JSObject* object,
     JSObject* holder,
@@ -912,7 +1003,7 @@
   while (current != holder) {
     if (current->IsGlobalObject()) {
       // Returns a cell or a failure.
-      MaybeObject* result = GenerateCheckPropertyCell(
+      MaybeObject* result = TryGenerateCheckPropertyCell(
           masm,
           GlobalObject::cast(current),
           name,
@@ -1047,6 +1138,108 @@
 #define __ ACCESS_MASM(masm())
 
 
+Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
+                                       Register object_reg,
+                                       Handle<JSObject> holder,
+                                       Register holder_reg,
+                                       Register scratch1,
+                                       Register scratch2,
+                                       Handle<String> name,
+                                       int save_at_depth,
+                                       Label* miss) {
+  // Make sure there's no overlap between holder and object registers.
+  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
+  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
+         && !scratch2.is(scratch1));
+
+  // Keep track of the current object in register reg.
+  Register reg = object_reg;
+  int depth = 0;
+
+  if (save_at_depth == depth) {
+    __ sw(reg, MemOperand(sp));
+  }
+
+  // Check the maps in the prototype chain.
+  // Traverse the prototype chain from the object and do map checks.
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    ++depth;
+
+    // Only global objects and objects that do not require access
+    // checks are allowed in stubs.
+    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
+
+    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
+    if (!current->HasFastProperties() &&
+        !current->IsJSGlobalObject() &&
+        !current->IsJSGlobalProxy()) {
+      if (!name->IsSymbol()) {
+        name = factory()->LookupSymbol(name);
+      }
+      ASSERT(current->property_dictionary()->FindEntry(*name) ==
+             StringDictionary::kNotFound);
+
+      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
+                                       scratch1, scratch2);
+
+      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+      __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
+    } else {
+      Handle<Map> current_map(current->map());
+      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
+      // Branch on the result of the map check.
+      __ Branch(miss, ne, scratch1, Operand(current_map));
+      // Check access rights to the global object.  This has to happen after
+      // the map check so that we know that the object is actually a global
+      // object.
+      if (current->IsJSGlobalProxy()) {
+        __ CheckAccessGlobalProxy(reg, scratch2, miss);
+      }
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+
+      if (heap()->InNewSpace(*prototype)) {
+        // The prototype is in new space; we cannot store a reference to it
+        // in the code.  Load it from the map.
+        __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
+      } else {
+        // The prototype is in old space; load it directly.
+        __ li(reg, Operand(prototype));
+      }
+    }
+
+    if (save_at_depth == depth) {
+      __ sw(reg, MemOperand(sp));
+    }
+
+    // Go to the next object in the prototype chain.
+    current = prototype;
+  }
+
+  // Log the check depth.
+  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
+
+  // Check the holder map.
+  __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
+  __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
+
+  // Perform security check for access to the global object.
+  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
+  if (holder->IsJSGlobalProxy()) {
+    __ CheckAccessGlobalProxy(reg, scratch1, miss);
+  }
+
+  // If we've skipped any global objects, it's not enough to verify that
+  // their maps haven't changed.  We also need to check that the property
+  // cell for the property is still empty.
+  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
+
+  // Return the register containing the holder.
+  return reg;
+}
+
+
 Register StubCompiler::CheckPrototypes(JSObject* object,
                                        Register object_reg,
                                        JSObject* holder,
@@ -1096,12 +1289,14 @@
       ASSERT(current->property_dictionary()->FindEntry(name) ==
              StringDictionary::kNotFound);
 
-      MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
-                                                                      miss,
-                                                                      reg,
-                                                                      name,
-                                                                      scratch1,
-                                                                      scratch2);
+      MaybeObject* negative_lookup =
+          TryGenerateDictionaryNegativeLookup(masm(),
+                                              miss,
+                                              reg,
+                                              name,
+                                              scratch1,
+                                              scratch2);
+
       if (negative_lookup->IsFailure()) {
         set_failure(Failure::cast(negative_lookup));
         return reg;
@@ -1166,18 +1361,18 @@
   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
   if (holder->IsJSGlobalProxy()) {
     __ CheckAccessGlobalProxy(reg, scratch1, miss);
-  };
+  }
 
   // If we've skipped any global objects, it's not enough to verify
   // that their maps haven't changed.  We also need to check that the
   // property cell for the property is still empty.
 
-  MaybeObject* result = GenerateCheckPropertyCells(masm(),
-                                                   object,
-                                                   holder,
-                                                   name,
-                                                   scratch1,
-                                                   miss);
+  MaybeObject* result = TryGenerateCheckPropertyCells(masm(),
+                                                      object,
+                                                      holder,
+                                                      name,
+                                                      scratch1,
+                                                      miss);
   if (result->IsFailure()) set_failure(Failure::cast(result));
 
   // Return the register containing the holder.
@@ -1185,36 +1380,35 @@
 }
 
 
-void StubCompiler::GenerateLoadField(JSObject* object,
-                                     JSObject* holder,
+void StubCompiler::GenerateLoadField(Handle<JSObject> object,
+                                     Handle<JSObject> holder,
                                      Register receiver,
                                      Register scratch1,
                                      Register scratch2,
                                      Register scratch3,
                                      int index,
-                                     String* name,
+                                     Handle<String> name,
                                      Label* miss) {
   // Check that the receiver isn't a smi.
   __ And(scratch1, receiver, Operand(kSmiTagMask));
   __ Branch(miss, eq, scratch1, Operand(zero_reg));
 
   // Check that the maps haven't changed.
-  Register reg =
-      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
-                      name, miss);
+  Register reg = CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
   GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
   __ Ret();
 }
 
 
-void StubCompiler::GenerateLoadConstant(JSObject* object,
-                                        JSObject* holder,
+void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
+                                        Handle<JSObject> holder,
                                         Register receiver,
                                         Register scratch1,
                                         Register scratch2,
                                         Register scratch3,
-                                        Object* value,
-                                        String* name,
+                                        Handle<Object> value,
+                                        Handle<String> name,
                                         Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss, scratch1);
@@ -1225,7 +1419,7 @@
                       scratch1, scratch2, scratch3, name, miss);
 
   // Return the constant value.
-  __ li(v0, Operand(Handle<Object>(value)));
+  __ li(v0, Operand(value));
   __ Ret();
 }
 
@@ -1390,7 +1584,8 @@
       // We found FIELD property in prototype chain of interceptor's holder.
       // Retrieve a field from field's holder.
       GenerateFastPropertyLoad(masm(), v0, holder_reg,
-                               lookup->holder(), lookup->GetFieldIndex());
+                               Handle<JSObject>(lookup->holder()),
+                               lookup->GetFieldIndex());
       __ Ret();
     } else {
       // We found CALLBACKS property in prototype chain of interceptor's
@@ -1440,9 +1635,9 @@
 }
 
 
-void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
+void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
   if (kind_ == Code::KEYED_CALL_IC) {
-    __ Branch(miss, ne, a2, Operand(Handle<String>(name)));
+    __ Branch(miss, ne, a2, Operand(name));
   }
 }
 
@@ -1499,11 +1694,22 @@
 }
 
 
-MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj =
+void CallStubCompiler::GenerateMissBranch() {
+  Handle<Code> code =
       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
                                                kind_,
-                                               extra_ic_state_);
+                                               extra_state_);
+  __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* CallStubCompiler::TryGenerateMissBranch() {
+  MaybeObject* maybe_obj =
+      isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(),
+                                                  kind_,
+                                                  extra_state_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1511,10 +1717,10 @@
 }
 
 
-MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- a2    : name
   //  -- ra    : return address
@@ -1534,12 +1740,11 @@
   Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
   GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
 
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
+  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
-  if (maybe_result->IsFailure()) return maybe_result;
+  GenerateMissBranch();
 
   // Return the generated code.
   return GetCode(FIELD, name);
@@ -1564,7 +1769,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   Register receiver = a1;
 
@@ -1640,7 +1845,7 @@
       __ bind(&with_write_barrier);
 
       __ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset));
-      __ CheckFastSmiOnlyElements(t2, t2, &call_builtin);
+      __ CheckFastObjectElements(t2, t2, &call_builtin);
 
       // Save new length.
       __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
@@ -1730,11 +1935,11 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1759,7 +1964,7 @@
   Register receiver = a1;
   Register elements = a3;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -1819,11 +2024,11 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1853,12 +2058,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1906,11 +2111,11 @@
   // Restore function name in a2.
   __ li(a2, Handle<String>(name));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1939,12 +2144,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1994,11 +2199,11 @@
   // Restore function name in a2.
   __ li(a2, Handle<String>(name));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2023,7 +2228,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ lw(a1, MemOperand(sp, 1 * kPointerSize));
@@ -2066,11 +2271,11 @@
 
   __ bind(&miss);
   // a2: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2098,7 +2303,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss, slow;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ lw(a1, MemOperand(sp, 1 * kPointerSize));
@@ -2200,11 +2405,11 @@
 
   __ bind(&miss);
   // a2: function name.
-  MaybeObject* obj = GenerateMissBranch();
+  MaybeObject* obj = TryGenerateMissBranch();
   if (obj->IsFailure()) return obj;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2228,7 +2433,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ lw(a1, MemOperand(sp, 1 * kPointerSize));
@@ -2302,11 +2507,11 @@
 
   __ bind(&miss);
   // a2: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2332,7 +2537,7 @@
 
   Label miss, miss_before_stack_reserved;
 
-  GenerateNameCheck(name, &miss_before_stack_reserved);
+  GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2357,11 +2562,11 @@
   FreeSpaceForFastApiCall(masm());
 
   __ bind(&miss_before_stack_reserved);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2385,7 +2590,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2484,7 +2689,7 @@
       UNREACHABLE();
   }
 
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
@@ -2492,11 +2697,11 @@
   // Handle call cache miss.
   __ bind(&miss);
 
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2510,18 +2715,18 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
   // Get the receiver from the stack.
   __ lw(a1, MemOperand(sp, argc * kPointerSize));
 
-  CallInterceptorCompiler compiler(this, arguments(), a2, extra_ic_state_);
+  CallInterceptorCompiler compiler(this, arguments(), a2, extra_state_);
   MaybeObject* result = compiler.Compile(masm(),
                                          object,
                                          holder,
@@ -2541,15 +2746,16 @@
   // Restore receiver.
   __ lw(a0, MemOperand(sp, argc * kPointerSize));
 
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
+  GenerateCallFunction(masm(), Handle<Object>(object), arguments(), &miss,
+                       extra_state_);
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
@@ -2574,7 +2780,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
@@ -2595,32 +2801,26 @@
   // Jump to the cached code (tail call).
   Counters* counters = masm()->isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
-  ASSERT(function->is_compiled());
   Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
-    __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
-                  NullCallWrapper(), call_kind);
-  } else {
-    __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
-                  JUMP_FUNCTION, call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
+  __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
+                NullCallWrapper(), call_kind);
 
   // Handle call cache miss.
   __ bind(&miss);
   __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(NORMAL, name);
+  return TryGetCode(NORMAL, name);
 }
 
 
@@ -2799,9 +2999,9 @@
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
-                                                      JSObject* object,
-                                                      JSObject* last) {
+Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
+                                                      Handle<JSObject> object,
+                                                      Handle<JSObject> last) {
   // ----------- S t a t e -------------
   //  -- a0    : receiver
   //  -- ra    : return address
@@ -2817,15 +3017,8 @@
   // If the last object in the prototype chain is a global object,
   // check that the global property cell is empty.
   if (last->IsGlobalObject()) {
-    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
-                                                  GlobalObject::cast(last),
-                                                  name,
-                                                  a1,
-                                                  &miss);
-    if (cell->IsFailure()) {
-      miss.Unuse();
-      return cell;
-    }
+    GenerateCheckPropertyCell(
+        masm(), Handle<GlobalObject>::cast(last), name, a1, &miss);
   }
 
   // Return undefined if maps of the full prototype chain is still the same.
@@ -2836,14 +3029,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, heap()->empty_string());
+  return GetCode(NONEXISTENT, factory()->empty_string());
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- a0    : receiver
   //  -- a2    : name
@@ -2884,14 +3077,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
-                                                   JSObject* holder,
-                                                   Object* value,
-                                                   String* name) {
+Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
+                                                   Handle<JSObject> holder,
+                                                   Handle<Object> value,
+                                                   Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- a0    : receiver
   //  -- a2    : name
@@ -2919,7 +3112,7 @@
   // -----------------------------------
   Label miss;
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
   GenerateLoadInterceptor(object,
                           holder,
@@ -2935,7 +3128,7 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
@@ -2982,13 +3175,13 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NORMAL, name);
+  return TryGetCode(NORMAL, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
-                                                     JSObject* receiver,
-                                                     JSObject* holder,
+Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
+                                                     Handle<JSObject> receiver,
+                                                     Handle<JSObject> holder,
                                                      int index) {
   // ----------- S t a t e -------------
   //  -- ra    : return address
@@ -2998,7 +3191,7 @@
   Label miss;
 
   // Check the key is the cached one.
-  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
+  __ Branch(&miss, ne, a0, Operand(name));
 
   GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
   __ bind(&miss);
@@ -3033,14 +3226,15 @@
   __ bind(&miss);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
-                                                        JSObject* receiver,
-                                                        JSObject* holder,
-                                                        Object* value) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
+    Handle<String> name,
+    Handle<JSObject> receiver,
+    Handle<JSObject> holder,
+    Handle<Object> value) {
   // ----------- S t a t e -------------
   //  -- ra    : return address
   //  -- a0    : key
@@ -3049,7 +3243,7 @@
   Label miss;
 
   // Check the key is the cached one.
-  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
+  __ Branch(&miss, ne, a0, Operand(name));
 
   GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
   __ bind(&miss);
@@ -3073,7 +3267,7 @@
   // Check the key is the cached one.
   __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
   GenerateLoadInterceptor(receiver,
                           holder,
@@ -3088,11 +3282,12 @@
   __ bind(&miss);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- ra    : return address
   //  -- a0    : key
@@ -3101,7 +3296,7 @@
   Label miss;
 
   // Check the key is the cached one.
-  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
+  __ Branch(&miss, ne, a0, Operand(name));
 
   GenerateLoadArrayLength(masm(), a1, a2, &miss);
   __ bind(&miss);
@@ -3111,7 +3306,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- ra    : return address
   //  -- a0    : key
@@ -3123,7 +3319,7 @@
   __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
 
   // Check the key is the cached one.
-  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
+  __ Branch(&miss, ne, a0, Operand(name));
 
   GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
   __ bind(&miss);
@@ -3135,7 +3331,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- ra    : return address
   //  -- a0    : key
@@ -3147,7 +3344,7 @@
   __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
 
   // Check the name hasn't changed.
-  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
+  __ Branch(&miss, ne, a0, Operand(name));
 
   GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
   __ bind(&miss);
@@ -3178,7 +3375,7 @@
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return TryGetCode(NORMAL, NULL);
 }
 
 
@@ -3206,7 +3403,7 @@
   __ Jump(miss_ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return TryGetCode(NORMAL, NULL, MEGAMORPHIC);
 }
 
 
@@ -3299,8 +3496,8 @@
       __ Jump(code, RelocInfo::CODE_TARGET, eq, a3, Operand(map));
     } else {
       Label next_map;
-      __ Branch(&next_map, eq, a3, Operand(map));
-      __ li(t0, Operand(Handle<Map>(transitioned_maps->at(i))));
+      __ Branch(&next_map, ne, a3, Operand(map));
+      __ li(a3, Operand(Handle<Map>(transitioned_maps->at(i))));
       __ Jump(code, RelocInfo::CODE_TARGET);
       __ bind(&next_map);
     }
diff --git a/src/mirror-debugger.js b/src/mirror-debugger.js
index e3f3c48..999252d 100644
--- a/src/mirror-debugger.js
+++ b/src/mirror-debugger.js
@@ -1087,7 +1087,7 @@
   // Use the same text representation as in messages.js.
   var text;
   try {
-    str = %_CallFunction(this.value_, builtins.errorToString);
+    str = %_CallFunction(this.value_, builtins.ErrorToString);
   } catch (e) {
     str = '#<Error>';
   }
diff --git a/src/mksnapshot.cc b/src/mksnapshot.cc
index 7a3fd09..bc0c2fc 100644
--- a/src/mksnapshot.cc
+++ b/src/mksnapshot.cc
@@ -312,7 +312,6 @@
   }
   // If we don't do this then we end up with a stray root pointing at the
   // context even after we have disposed of the context.
-  // TODO(gc): request full compaction?
   HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
   i::Object* raw_context = *(v8::Utils::OpenHandle(*context));
   context.Dispose();
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 6d2cf5f..64bda94 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -156,6 +156,12 @@
     case JS_ARRAY_TYPE:
       JSArray::cast(this)->JSArrayVerify();
       break;
+    case JS_SET_TYPE:
+      JSSet::cast(this)->JSSetVerify();
+      break;
+    case JS_MAP_TYPE:
+      JSMap::cast(this)->JSMapVerify();
+      break;
     case JS_WEAK_MAP_TYPE:
       JSWeakMap::cast(this)->JSWeakMapVerify();
       break;
@@ -263,6 +269,12 @@
 void JSObject::JSObjectVerify() {
   VerifyHeapPointer(properties());
   VerifyHeapPointer(elements());
+
+  if (GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS) {
+    ASSERT(this->elements()->IsFixedArray());
+    ASSERT(this->elements()->length() >= 2);
+  }
+
   if (HasFastProperties()) {
     CHECK_EQ(map()->unused_property_fields(),
              (map()->inobject_properties() + properties()->length() -
@@ -494,6 +506,22 @@
 }
 
 
+void JSSet::JSSetVerify() {
+  CHECK(IsJSSet());
+  JSObjectVerify();
+  VerifyHeapPointer(table());
+  ASSERT(table()->IsHashTable() || table()->IsUndefined());
+}
+
+
+void JSMap::JSMapVerify() {
+  CHECK(IsJSMap());
+  JSObjectVerify();
+  VerifyHeapPointer(table());
+  ASSERT(table()->IsHashTable() || table()->IsUndefined());
+}
+
+
 void JSWeakMap::JSWeakMapVerify() {
   CHECK(IsJSWeakMap());
   JSObjectVerify();
diff --git a/src/objects-inl.h b/src/objects-inl.h
index cebf9be..dc3aa46 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -67,6 +67,13 @@
 }
 
 
+#define TYPE_CHECKER(type, instancetype)                                \
+  bool Object::Is##type() {                                             \
+  return Object::IsHeapObject() &&                                      \
+      HeapObject::cast(this)->map()->instance_type() == instancetype;   \
+  }
+
+
 #define CAST_ACCESSOR(type)                     \
   type* type::cast(Object* object) {            \
     ASSERT(object->Is##type());                 \
@@ -112,6 +119,11 @@
   }
 
 
+bool Object::IsFixedArrayBase() {
+  return IsFixedArray() || IsFixedDoubleArray();
+}
+
+
 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
   // There is a constraint on the object; check.
   if (!this->IsJSObject()) return false;
@@ -147,10 +159,7 @@
 }
 
 
-bool Object::IsHeapNumber() {
-  return Object::IsHeapObject()
-    && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
-}
+TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
 
 
 bool Object::IsString() {
@@ -403,16 +412,8 @@
 }
 
 
-bool Object::IsByteArray() {
-  return Object::IsHeapObject()
-    && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
-}
-
-
-bool Object::IsFreeSpace() {
-  return Object::IsHeapObject()
-    && HeapObject::cast(this)->map()->instance_type() == FREE_SPACE_TYPE;
-}
+TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
+TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
 
 
 bool Object::IsFiller() {
@@ -422,11 +423,7 @@
 }
 
 
-bool Object::IsExternalPixelArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-          EXTERNAL_PIXEL_ARRAY_TYPE;
-}
+TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
 
 
 bool Object::IsExternalArray() {
@@ -439,60 +436,14 @@
 }
 
 
-bool Object::IsExternalByteArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_BYTE_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalUnsignedByteArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalShortArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_SHORT_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalUnsignedShortArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalIntArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_INT_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalUnsignedIntArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalFloatArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_FLOAT_ARRAY_TYPE;
-}
-
-
-bool Object::IsExternalDoubleArray() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() ==
-      EXTERNAL_DOUBLE_ARRAY_TYPE;
-}
+TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
+TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
+TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
+TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
+TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
+TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
+TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
+TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
 
 
 bool MaybeObject::IsFailure() {
@@ -549,42 +500,14 @@
 }
 
 
-bool Object::IsJSFunctionProxy() {
-  return Object::IsHeapObject() &&
-      HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE;
-}
-
-
-bool Object::IsJSWeakMap() {
-  return Object::IsJSObject() &&
-      HeapObject::cast(this)->map()->instance_type() == JS_WEAK_MAP_TYPE;
-}
-
-
-bool Object::IsJSContextExtensionObject() {
-  return IsHeapObject()
-      && (HeapObject::cast(this)->map()->instance_type() ==
-          JS_CONTEXT_EXTENSION_OBJECT_TYPE);
-}
-
-
-bool Object::IsMap() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
-}
-
-
-bool Object::IsFixedArray() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
-}
-
-
-bool Object::IsFixedDoubleArray() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() ==
-          FIXED_DOUBLE_ARRAY_TYPE;
-}
+TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
+TYPE_CHECKER(JSSet, JS_SET_TYPE)
+TYPE_CHECKER(JSMap, JS_MAP_TYPE)
+TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
+TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
+TYPE_CHECKER(Map, MAP_TYPE)
+TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
+TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
 
 
 bool Object::IsDescriptorArray() {
@@ -647,10 +570,7 @@
 }
 
 
-bool Object::IsJSFunction() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
-}
+TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
 
 
 template <> inline bool Is<JSFunction>(Object* obj) {
@@ -658,43 +578,12 @@
 }
 
 
-bool Object::IsCode() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
-}
-
-
-bool Object::IsOddball() {
-  return Object::IsHeapObject()
-    && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
-}
-
-
-bool Object::IsJSGlobalPropertyCell() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type()
-      == JS_GLOBAL_PROPERTY_CELL_TYPE;
-}
-
-
-bool Object::IsSharedFunctionInfo() {
-  return Object::IsHeapObject() &&
-      (HeapObject::cast(this)->map()->instance_type() ==
-       SHARED_FUNCTION_INFO_TYPE);
-}
-
-
-bool Object::IsJSValue() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
-}
-
-
-bool Object::IsJSMessageObject() {
-  return Object::IsHeapObject()
-      && (HeapObject::cast(this)->map()->instance_type() ==
-          JS_MESSAGE_OBJECT_TYPE);
-}
+TYPE_CHECKER(Code, CODE_TYPE)
+TYPE_CHECKER(Oddball, ODDBALL_TYPE)
+TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
+TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
+TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
+TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
 
 
 bool Object::IsStringWrapper() {
@@ -702,10 +591,7 @@
 }
 
 
-bool Object::IsForeign() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == FOREIGN_TYPE;
-}
+TYPE_CHECKER(Foreign, FOREIGN_TYPE)
 
 
 bool Object::IsBoolean() {
@@ -714,16 +600,8 @@
 }
 
 
-bool Object::IsJSArray() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
-}
-
-
-bool Object::IsJSRegExp() {
-  return Object::IsHeapObject()
-      && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
-}
+TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
+TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
 
 
 template <> inline bool Is<JSArray>(Object* obj) {
@@ -760,7 +638,10 @@
     return false;
   }
 #ifdef DEBUG
-  reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
+  if (FLAG_verify_heap) {
+    reinterpret_cast<JSFunctionResultCache*>(this)->
+        JSFunctionResultCacheVerify();
+  }
 #endif
   return true;
 }
@@ -772,7 +653,9 @@
     return false;
   }
 #ifdef DEBUG
-  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
+  if (FLAG_verify_heap) {
+    reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
+  }
 #endif
   return true;
 }
@@ -821,18 +704,8 @@
 }
 
 
-bool Object::IsJSGlobalObject() {
-  return IsHeapObject() &&
-      (HeapObject::cast(this)->map()->instance_type() ==
-       JS_GLOBAL_OBJECT_TYPE);
-}
-
-
-bool Object::IsJSBuiltinsObject() {
-  return IsHeapObject() &&
-      (HeapObject::cast(this)->map()->instance_type() ==
-       JS_BUILTINS_OBJECT_TYPE);
-}
+TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
+TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
 
 
 bool Object::IsUndetectableObject() {
@@ -1300,7 +1173,6 @@
 
 FixedArrayBase* JSObject::elements() {
   Object* array = READ_FIELD(this, kElementsOffset);
-  ASSERT(array->HasValidElements());
   return static_cast<FixedArrayBase*>(array);
 }
 
@@ -1740,7 +1612,11 @@
                 old_length * kDoubleSize);
   } else {
     for (int i = 0; i < old_length; ++i) {
-      set(i, from->get_scalar(i));
+      if (from->is_the_hole(i)) {
+        set_the_hole(i);
+      } else {
+        set(i, from->get_scalar(i));
+      }
     }
   }
   int offset = kHeaderSize + old_length * kDoubleSize;
@@ -1805,15 +1681,13 @@
 }
 
 
-void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
+void FixedArray::NoWriteBarrierSet(FixedArray* array,
+                                   int index,
+                                   Object* value) {
   ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
   ASSERT(index >= 0 && index < array->length());
   ASSERT(!HEAP->InNewSpace(value));
   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
-  array->GetHeap()->incremental_marking()->RecordWrite(
-      array,
-      HeapObject::RawField(array, kHeaderSize + index * kPointerSize),
-      value);
 }
 
 
@@ -1901,10 +1775,12 @@
 }
 
 
-void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
+void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
+                                         int first,
+                                         int second) {
   Object* tmp = array->get(first);
-  fast_set(array, first, array->get(second));
-  fast_set(array, second, tmp);
+  NoWriteBarrierSet(array, first, array->get(second));
+  NoWriteBarrierSet(array, second, tmp);
 }
 
 
@@ -2012,7 +1888,9 @@
 }
 
 
-void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
+void DescriptorArray::Set(int descriptor_number,
+                          Descriptor* desc,
+                          const WhitenessWitness&) {
   // Range check.
   ASSERT(descriptor_number < number_of_descriptors());
 
@@ -2020,26 +1898,53 @@
   ASSERT(!HEAP->InNewSpace(desc->GetKey()));
   ASSERT(!HEAP->InNewSpace(desc->GetValue()));
 
-  fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
+  NoWriteBarrierSet(this,
+                    ToKeyIndex(descriptor_number),
+                    desc->GetKey());
   FixedArray* content_array = GetContentArray();
-  fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
-  fast_set(content_array, ToDetailsIndex(descriptor_number),
-           desc->GetDetails().AsSmi());
+  NoWriteBarrierSet(content_array,
+                    ToValueIndex(descriptor_number),
+                    desc->GetValue());
+  NoWriteBarrierSet(content_array,
+                    ToDetailsIndex(descriptor_number),
+                    desc->GetDetails().AsSmi());
 }
 
 
-void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
+void DescriptorArray::CopyFrom(int index,
+                               DescriptorArray* src,
+                               int src_index,
+                               const WhitenessWitness& witness) {
   Descriptor desc;
   src->Get(src_index, &desc);
-  Set(index, &desc);
+  Set(index, &desc, witness);
 }
 
 
-void DescriptorArray::Swap(int first, int second) {
-  fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
+void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
+  NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
   FixedArray* content_array = GetContentArray();
-  fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
-  fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
+  NoWriteBarrierSwap(content_array,
+                     ToValueIndex(first),
+                     ToValueIndex(second));
+  NoWriteBarrierSwap(content_array,
+                     ToDetailsIndex(first),
+                     ToDetailsIndex(second));
+}
+
+
+DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
+    : marking_(array->GetHeap()->incremental_marking()) {
+  marking_->EnterNoMarkingScope();
+  if (array->number_of_descriptors() > 0) {
+    ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
+    ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
+  }
+}
+
+
+DescriptorArray::WhitenessWitness::~WhitenessWitness() {
+  marking_->LeaveNoMarkingScope();
 }
 
 
@@ -2142,6 +2047,8 @@
 CAST_ACCESSOR(JSRegExp)
 CAST_ACCESSOR(JSProxy)
 CAST_ACCESSOR(JSFunctionProxy)
+CAST_ACCESSOR(JSSet)
+CAST_ACCESSOR(JSMap)
 CAST_ACCESSOR(JSWeakMap)
 CAST_ACCESSOR(Foreign)
 CAST_ACCESSOR(ByteArray)
@@ -2979,6 +2886,21 @@
 }
 
 
+bool Code::is_compiled_optimizable() {
+  ASSERT(kind() == FUNCTION);
+  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+  return FullCodeFlagsIsCompiledOptimizable::decode(flags);
+}
+
+
+void Code::set_compiled_optimizable(bool value) {
+  ASSERT(kind() == FUNCTION);
+  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
+  flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
+  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
+}
+
+
 int Code::allow_osr_at_loop_nesting_level() {
   ASSERT(kind() == FUNCTION);
   return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
@@ -3304,7 +3226,7 @@
 ACCESSORS(Map, constructor, Object, kConstructorOffset)
 
 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
-ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
+ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
 ACCESSORS(JSFunction,
           next_function_link,
           Object,
@@ -3547,8 +3469,23 @@
 }
 
 
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, strict_mode,
-               kStrictModeFunction)
+StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
+  return BooleanBit::get(compiler_hints(), kStrictModeFunction)
+      ? kStrictMode : kNonStrictMode;
+}
+
+
+void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
+  ASSERT(strict_mode_flag == kStrictMode ||
+         strict_mode_flag == kNonStrictMode);
+  bool value = strict_mode_flag == kStrictMode;
+  set_compiler_hints(
+      BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
+}
+
+
+BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
+            kStrictModeFunction)
 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
                name_should_print_as_anonymous,
@@ -3823,7 +3760,36 @@
 }
 
 
+FixedArray* JSFunction::literals() {
+  ASSERT(!shared()->bound());
+  return literals_or_bindings();
+}
+
+
+void JSFunction::set_literals(FixedArray* literals) {
+  ASSERT(!shared()->bound());
+  set_literals_or_bindings(literals);
+}
+
+
+FixedArray* JSFunction::function_bindings() {
+  ASSERT(shared()->bound());
+  return literals_or_bindings();
+}
+
+
+void JSFunction::set_function_bindings(FixedArray* bindings) {
+  ASSERT(shared()->bound());
+  // Bound function literal may be initialized to the empty fixed array
+  // before the bindings are set.
+  ASSERT(bindings == GetHeap()->empty_fixed_array() ||
+         bindings->map() == GetHeap()->fixed_cow_array_map());
+  set_literals_or_bindings(bindings);
+}
+
+
 int JSFunction::NumberOfLiterals() {
+  ASSERT(!shared()->bound());
   return literals()->length();
 }
 
@@ -3870,6 +3836,8 @@
 }
 
 
+ACCESSORS(JSSet, table, Object, kTableOffset)
+ACCESSORS(JSMap, table, Object, kTableOffset)
 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
 
@@ -4056,14 +4024,16 @@
       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
   Map* map = fixed_array->map();
     ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
-          (map == GetHeap()->fixed_array_map() ||
-           map == GetHeap()->fixed_cow_array_map())) ||
-         (kind == FAST_DOUBLE_ELEMENTS &&
-          fixed_array->IsFixedDoubleArray()) ||
-         (kind == DICTIONARY_ELEMENTS &&
-          fixed_array->IsFixedArray() &&
-          fixed_array->IsDictionary()) ||
-         (kind > DICTIONARY_ELEMENTS));
+            (map == GetHeap()->fixed_array_map() ||
+             map == GetHeap()->fixed_cow_array_map())) ||
+           (kind == FAST_DOUBLE_ELEMENTS &&
+            fixed_array->IsFixedDoubleArray()) ||
+           (kind == DICTIONARY_ELEMENTS &&
+            fixed_array->IsFixedArray() &&
+            fixed_array->IsDictionary()) ||
+           (kind > DICTIONARY_ELEMENTS));
+    ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
+           (elements()->IsFixedArray() && elements()->length() >= 2));
 #endif
   return kind;
 }
@@ -4407,7 +4377,7 @@
   WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
   FixedArray::set(index, key, mode);
   FixedArray::set(index+1, value, mode);
-  FixedArray::fast_set(this, index+2, details.AsSmi());
+  FixedArray::set(index+2, details.AsSmi());
 }
 
 
@@ -4456,27 +4426,31 @@
 }
 
 
-bool ObjectHashTableShape::IsMatch(JSReceiver* key, Object* other) {
-  return key == JSReceiver::cast(other);
+template <int entrysize>
+bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
+  return key->SameValue(other);
 }
 
 
-uint32_t ObjectHashTableShape::Hash(JSReceiver* key) {
-  MaybeObject* maybe_hash = key->GetIdentityHash(OMIT_CREATION);
-  ASSERT(!maybe_hash->IsFailure());
-  return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
+template <int entrysize>
+uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
+  ASSERT(!key->IsUndefined() && !key->IsNull());
+  MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
+  return Smi::cast(maybe_hash->ToObjectChecked())->value();
 }
 
 
-uint32_t ObjectHashTableShape::HashForObject(JSReceiver* key, Object* other) {
-  MaybeObject* maybe_hash =
-      JSReceiver::cast(other)->GetIdentityHash(OMIT_CREATION);
-  ASSERT(!maybe_hash->IsFailure());
-  return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
+template <int entrysize>
+uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
+                                                        Object* other) {
+  ASSERT(!other->IsUndefined() && !other->IsNull());
+  MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
+  return Smi::cast(maybe_hash->ToObjectChecked())->value();
 }
 
 
-MaybeObject* ObjectHashTableShape::AsObject(JSReceiver* key) {
+template <int entrysize>
+MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
   return key;
 }
 
@@ -4534,6 +4508,12 @@
 }
 
 
+MaybeObject* FixedDoubleArray::Copy() {
+  if (length() == 0) return this;
+  return GetHeap()->CopyFixedDoubleArray(this);
+}
+
+
 Relocatable::Relocatable(Isolate* isolate) {
   ASSERT(isolate == Isolate::Current());
   isolate_ = isolate;
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index fc75732..b788504 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -245,54 +245,6 @@
 }
 
 
-static void PrintElementsKind(FILE* out, ElementsKind kind) {
-  switch (kind) {
-    case FAST_SMI_ONLY_ELEMENTS:
-      PrintF(out, "FAST_SMI_ONLY_ELEMENTS");
-      break;
-    case FAST_ELEMENTS:
-      PrintF(out, "FAST_ELEMENTS");
-      break;
-    case FAST_DOUBLE_ELEMENTS:
-      PrintF(out, "FAST_DOUBLE_ELEMENTS");
-      break;
-    case DICTIONARY_ELEMENTS:
-      PrintF(out, "DICTIONARY_ELEMENTS");
-      break;
-    case NON_STRICT_ARGUMENTS_ELEMENTS:
-      PrintF(out, "NON_STRICT_ARGUMENTS_ELEMENTS");
-      break;
-    case EXTERNAL_BYTE_ELEMENTS:
-      PrintF(out, "EXTERNAL_BYTE_ELEMENTS");
-      break;
-    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
-      PrintF(out, "EXTERNAL_UNSIGNED_BYTE_ELEMENTS");
-      break;
-    case EXTERNAL_SHORT_ELEMENTS:
-      PrintF(out, "EXTERNAL_SHORT_ELEMENTS");
-      break;
-    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
-      PrintF(out, "EXTERNAL_UNSIGNED_SHORT_ELEMENTS");
-      break;
-    case EXTERNAL_INT_ELEMENTS:
-      PrintF(out, "EXTERNAL_INT_ELEMENTS");
-      break;
-    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
-      PrintF(out, "EXTERNAL_UNSIGNED_INT_ELEMENTS");
-      break;
-    case EXTERNAL_FLOAT_ELEMENTS:
-      PrintF(out, "EXTERNAL_FLOAT_ELEMENTS");
-      break;
-    case EXTERNAL_DOUBLE_ELEMENTS:
-      PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
-      break;
-    case EXTERNAL_PIXEL_ELEMENTS:
-      PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
-      break;
-  }
-}
-
-
 void JSObject::PrintProperties(FILE* out) {
   if (HasFastProperties()) {
     DescriptorArray* descs = map()->instance_descriptors();
diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc
index 20a7b31..a796283 100644
--- a/src/objects-visiting.cc
+++ b/src/objects-visiting.cc
@@ -94,6 +94,16 @@
     case JS_GLOBAL_PROPERTY_CELL_TYPE:
       return kVisitPropertyCell;
 
+    case JS_SET_TYPE:
+      return GetVisitorIdForSize(kVisitStruct,
+                                 kVisitStructGeneric,
+                                 JSSet::kSize);
+
+    case JS_MAP_TYPE:
+      return GetVisitorIdForSize(kVisitStruct,
+                                 kVisitStructGeneric,
+                                 JSMap::kSize);
+
     case JS_WEAK_MAP_TYPE:
       return kVisitJSWeakMap;
 
diff --git a/src/objects.cc b/src/objects.cc
index 5612732..9a87ac5 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -55,6 +55,54 @@
 namespace v8 {
 namespace internal {
 
+void PrintElementsKind(FILE* out, ElementsKind kind) {
+  switch (kind) {
+    case FAST_SMI_ONLY_ELEMENTS:
+      PrintF(out, "FAST_SMI_ONLY_ELEMENTS");
+      break;
+    case FAST_ELEMENTS:
+      PrintF(out, "FAST_ELEMENTS");
+      break;
+    case FAST_DOUBLE_ELEMENTS:
+      PrintF(out, "FAST_DOUBLE_ELEMENTS");
+      break;
+    case DICTIONARY_ELEMENTS:
+      PrintF(out, "DICTIONARY_ELEMENTS");
+      break;
+    case NON_STRICT_ARGUMENTS_ELEMENTS:
+      PrintF(out, "NON_STRICT_ARGUMENTS_ELEMENTS");
+      break;
+    case EXTERNAL_BYTE_ELEMENTS:
+      PrintF(out, "EXTERNAL_BYTE_ELEMENTS");
+      break;
+    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+      PrintF(out, "EXTERNAL_UNSIGNED_BYTE_ELEMENTS");
+      break;
+    case EXTERNAL_SHORT_ELEMENTS:
+      PrintF(out, "EXTERNAL_SHORT_ELEMENTS");
+      break;
+    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+      PrintF(out, "EXTERNAL_UNSIGNED_SHORT_ELEMENTS");
+      break;
+    case EXTERNAL_INT_ELEMENTS:
+      PrintF(out, "EXTERNAL_INT_ELEMENTS");
+      break;
+    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+      PrintF(out, "EXTERNAL_UNSIGNED_INT_ELEMENTS");
+      break;
+    case EXTERNAL_FLOAT_ELEMENTS:
+      PrintF(out, "EXTERNAL_FLOAT_ELEMENTS");
+      break;
+    case EXTERNAL_DOUBLE_ELEMENTS:
+      PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
+      break;
+    case EXTERNAL_PIXEL_ELEMENTS:
+      PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
+      break;
+  }
+}
+
+
 // Getters and setters are stored in a fixed array property.  These are
 // constants for their indices.
 const int kGetterIndex = 0;
@@ -154,7 +202,7 @@
 MaybeObject* Object::GetPropertyWithReceiver(Object* receiver,
                                              String* name,
                                              PropertyAttributes* attributes) {
-  LookupResult result;
+  LookupResult result(name->GetIsolate());
   Lookup(name, &result);
   MaybeObject* value = GetProperty(receiver, &result, name, attributes);
   ASSERT(*attributes <= ABSENT);
@@ -234,6 +282,14 @@
 }
 
 
+Handle<Object> Object::GetElement(Handle<Object> object, uint32_t index) {
+  Isolate* isolate = object->IsHeapObject()
+      ? Handle<HeapObject>::cast(object)->GetIsolate()
+      : Isolate::Current();
+  CALL_HEAP_FUNCTION(isolate, object->GetElement(index), Object);
+}
+
+
 MaybeObject* JSProxy::GetElementWithHandler(Object* receiver,
                                             uint32_t index) {
   String* name;
@@ -310,7 +366,7 @@
       case FIELD:
       case CONSTANT_FUNCTION: {
         // Search ALL_CAN_READ accessors in prototype chain.
-        LookupResult r;
+        LookupResult r(GetIsolate());
         result->holder()->LookupRealNamedPropertyInPrototypes(name, &r);
         if (r.IsProperty()) {
           return GetPropertyWithFailedAccessCheck(receiver,
@@ -323,7 +379,7 @@
       case INTERCEPTOR: {
         // If the object has an interceptor, try real named properties.
         // No access check in GetPropertyAttributeWithInterceptor.
-        LookupResult r;
+        LookupResult r(GetIsolate());
         result->holder()->LookupRealNamedProperty(name, &r);
         if (r.IsProperty()) {
           return GetPropertyWithFailedAccessCheck(receiver,
@@ -370,7 +426,7 @@
       case CONSTANT_FUNCTION: {
         if (!continue_search) break;
         // Search ALL_CAN_READ accessors in prototype chain.
-        LookupResult r;
+        LookupResult r(GetIsolate());
         result->holder()->LookupRealNamedPropertyInPrototypes(name, &r);
         if (r.IsProperty()) {
           return GetPropertyAttributeWithFailedAccessCheck(receiver,
@@ -384,7 +440,7 @@
       case INTERCEPTOR: {
         // If the object has an interceptor, try real named properties.
         // No access check in GetPropertyAttributeWithInterceptor.
-        LookupResult r;
+        LookupResult r(GetIsolate());
         if (continue_search) {
           result->holder()->LookupRealNamedProperty(name, &r);
         } else {
@@ -404,7 +460,7 @@
     }
   }
 
-  GetHeap()->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+  GetIsolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
   return ABSENT;
 }
 
@@ -528,6 +584,21 @@
 }
 
 
+Handle<Object> Object::GetProperty(Handle<Object> object,
+                                   Handle<Object> receiver,
+                                   LookupResult* result,
+                                   Handle<String> key,
+                                   PropertyAttributes* attributes) {
+  Isolate* isolate = object->IsHeapObject()
+      ? Handle<HeapObject>::cast(object)->GetIsolate()
+      : Isolate::Current();
+  CALL_HEAP_FUNCTION(
+      isolate,
+      object->GetProperty(*receiver, result, *key, attributes),
+      Object);
+}
+
+
 MaybeObject* Object::GetProperty(Object* receiver,
                                  LookupResult* result,
                                  String* name,
@@ -700,6 +771,49 @@
 }
 
 
+MaybeObject* Object::GetHash(CreationFlag flag) {
+  // The object is either a number, a string, an odd-ball,
+  // a real JS object, or a Harmony proxy.
+  if (IsNumber()) {
+    uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
+    return Smi::FromInt(hash & Smi::kMaxValue);
+  }
+  if (IsString()) {
+    uint32_t hash = String::cast(this)->Hash();
+    return Smi::FromInt(hash);
+  }
+  if (IsOddball()) {
+    uint32_t hash = Oddball::cast(this)->to_string()->Hash();
+    return Smi::FromInt(hash);
+  }
+  if (IsJSReceiver()) {
+    return JSReceiver::cast(this)->GetIdentityHash(flag);
+  }
+
+  UNREACHABLE();
+  return Smi::FromInt(0);
+}
+
+
+bool Object::SameValue(Object* other) {
+  if (other == this) return true;
+  if (!IsHeapObject() || !other->IsHeapObject()) return false;
+
+  // The object is either a number, a string, an odd-ball,
+  // a real JS object, or a Harmony proxy.
+  if (IsNumber() && other->IsNumber()) {
+    double this_value = Number();
+    double other_value = other->Number();
+    return (this_value == other_value) ||
+        (isnan(this_value) && isnan(other_value));
+  }
+  if (IsString() && other->IsString()) {
+    return String::cast(this)->Equals(String::cast(other));
+  }
+  return false;
+}
+
+
 void Object::ShortPrint(FILE* out) {
   HeapStringAllocator allocator;
   StringStream accumulator(&allocator);
@@ -1074,6 +1188,27 @@
 }
 
 
+void JSObject::PrintElementsTransition(
+    FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements,
+    ElementsKind to_kind, FixedArrayBase* to_elements) {
+  if (from_kind != to_kind) {
+    PrintF(file, "elements transition [");
+    PrintElementsKind(file, from_kind);
+    PrintF(file, " -> ");
+    PrintElementsKind(file, to_kind);
+    PrintF(file, "] in ");
+    JavaScriptFrame::PrintTop(file, false, true);
+    PrintF(file, " for ");
+    ShortPrint(file);
+    PrintF(file, " from ");
+    from_elements->ShortPrint(file);
+    PrintF(file, " to ");
+    to_elements->ShortPrint(file);
+    PrintF(file, "\n");
+  }
+}
+
+
 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
   Heap* heap = GetHeap();
   if (!heap->Contains(this)) {
@@ -1102,6 +1237,10 @@
     case FIXED_ARRAY_TYPE:
       accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length());
       break;
+    case FIXED_DOUBLE_ARRAY_TYPE:
+      accumulator->Add("<FixedDoubleArray[%u]>",
+                       FixedDoubleArray::cast(this)->length());
+      break;
     case BYTE_ARRAY_TYPE:
       accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
       break;
@@ -1247,6 +1386,8 @@
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
     case JS_VALUE_TYPE:
     case JS_ARRAY_TYPE:
+    case JS_SET_TYPE:
+    case JS_MAP_TYPE:
     case JS_WEAK_MAP_TYPE:
     case JS_REGEXP_TYPE:
     case JS_GLOBAL_PROXY_TYPE:
@@ -1658,7 +1799,7 @@
     PropertyAttributes attributes,
     StrictModeFlag strict_mode) {
   // Check local property, ignore interceptor.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookupRealNamedProperty(name, &result);
   if (result.IsFound()) {
     // An existing property, a map transition or a null descriptor was
@@ -1840,7 +1981,7 @@
                                      Object* value,
                                      PropertyAttributes attributes,
                                      StrictModeFlag strict_mode) {
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookup(name, &result);
   return SetProperty(&result, name, value, attributes, strict_mode);
 }
@@ -2006,9 +2147,9 @@
     PropertyAttributes attributes,
     bool* found,
     StrictModeFlag strict_mode) {
-  LookupResult result;
-  LookupCallbackSetterInPrototypes(name, &result);
   Heap* heap = GetHeap();
+  LookupResult result(heap->isolate());
+  LookupCallbackSetterInPrototypes(name, &result);
   if (result.IsFound()) {
     *found = true;
     if (result.type() == CALLBACKS) {
@@ -2020,7 +2161,7 @@
     } else if (result.type() == HANDLER) {
       // We could not find a local property so let's check whether there is an
       // accessor that wants to handle the property.
-      LookupResult accessor_result;
+      LookupResult accessor_result(heap->isolate());
       LookupCallbackSetterInPrototypes(name, &accessor_result);
       if (accessor_result.IsFound()) {
         if (accessor_result.type() == CALLBACKS) {
@@ -2085,6 +2226,51 @@
 }
 
 
+static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
+  ASSERT(!map.is_null());
+  for (int i = 0; i < maps->length(); ++i) {
+    if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
+  }
+  return false;
+}
+
+
+template <class T>
+static Handle<T> MaybeNull(T* p) {
+  if (p == NULL) return Handle<T>::null();
+  return Handle<T>(p);
+}
+
+
+Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
+  ElementsKind elms_kind = elements_kind();
+  if (elms_kind == FAST_DOUBLE_ELEMENTS) {
+    bool dummy = true;
+    Handle<Map> fast_map =
+        MaybeNull(LookupElementsTransitionMap(FAST_ELEMENTS, &dummy));
+    if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) {
+      return fast_map;
+    }
+    return Handle<Map>::null();
+  }
+  if (elms_kind == FAST_SMI_ONLY_ELEMENTS) {
+    bool dummy = true;
+    Handle<Map> double_map =
+        MaybeNull(LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, &dummy));
+    // In the current implementation, if the DOUBLE map doesn't exist, the
+    // FAST map can't exist either.
+    if (double_map.is_null()) return Handle<Map>::null();
+    Handle<Map> fast_map =
+        MaybeNull(double_map->LookupElementsTransitionMap(FAST_ELEMENTS,
+                                                          &dummy));
+    if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) {
+      return fast_map;
+    }
+    if (ContainsMap(candidates, double_map)) return double_map;
+  }
+  return Handle<Map>::null();
+}
+
 static Map* GetElementsTransitionMapFromDescriptor(Object* descriptor_contents,
                                                    ElementsKind elements_kind) {
   if (descriptor_contents->IsMap()) {
@@ -2268,6 +2454,15 @@
 }
 
 
+Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
+                                               ElementsKind to_kind) {
+  Isolate* isolate = object->GetIsolate();
+  CALL_HEAP_FUNCTION(isolate,
+                     object->GetElementsTransitionMap(to_kind),
+                     Map);
+}
+
+
 MaybeObject* JSObject::GetElementsTransitionMap(ElementsKind to_kind) {
   Map* current_map = map();
   ElementsKind from_kind = current_map->elements_kind();
@@ -2423,7 +2618,7 @@
         case INTERCEPTOR: {
           // Try lookup real named properties. Note that only property can be
           // set is callbacks marked as ALL_CAN_WRITE on the prototype chain.
-          LookupResult r;
+          LookupResult r(GetIsolate());
           LookupRealNamedProperty(name, &r);
           if (r.IsProperty()) {
             return SetPropertyWithFailedAccessCheck(&r,
@@ -2441,10 +2636,10 @@
     }
   }
 
-  Heap* heap = GetHeap();
-  HandleScope scope(heap->isolate());
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
   Handle<Object> value_handle(value);
-  heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_SET);
+  isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
   return *value_handle;
 }
 
@@ -2507,6 +2702,7 @@
   *found = true;  // except where defined otherwise...
   Isolate* isolate = GetHeap()->isolate();
   Handle<JSProxy> proxy(this);
+  Handle<Object> handler(this->handler());  // Trap might morph proxy.
   Handle<String> name(name_raw);
   Handle<Object> value(value_raw);
   Handle<Object> args[] = { name };
@@ -2530,7 +2726,9 @@
     Handle<Object> configurable(v8::internal::GetProperty(desc, conf_name));
     ASSERT(!isolate->has_pending_exception());
     if (configurable->IsFalse()) {
-      Handle<Object> args[] = { Handle<Object>(proxy->handler()), proxy, name };
+      Handle<String> trap =
+          isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor");
+      Handle<Object> args[] = { handler, trap, name };
       Handle<Object> error = isolate->factory()->NewTypeError(
           "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
       return isolate->Throw(*error);
@@ -2610,6 +2808,7 @@
   Isolate* isolate = GetIsolate();
   HandleScope scope(isolate);
   Handle<JSProxy> proxy(this);
+  Handle<Object> handler(this->handler());  // Trap might morph proxy.
   Handle<JSReceiver> receiver(receiver_raw);
   Handle<Object> name(name_raw);
 
@@ -2639,7 +2838,9 @@
   if (isolate->has_pending_exception()) return NONE;
 
   if (configurable->IsFalse()) {
-    Handle<Object> args[] = { Handle<Object>(proxy->handler()), proxy, name };
+    Handle<String> trap =
+        isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor");
+    Handle<Object> args[] = { handler, trap, name };
     Handle<Object> error = isolate->factory()->NewTypeError(
         "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
     isolate->Throw(*error);
@@ -2859,12 +3060,12 @@
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
-  LookupResult result;
+  Isolate* isolate = GetIsolate();
+  LookupResult result(isolate);
   LocalLookup(name, &result);
   // Check access rights if needed.
   if (IsAccessCheckNeeded()) {
-    Heap* heap = GetHeap();
-    if (!heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_SET)) {
+    if (!isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
       return SetPropertyWithFailedAccessCheck(&result,
                                               name,
                                               value,
@@ -2935,7 +3136,7 @@
       String* name,
       bool continue_search) {
   // Check local property, ignore interceptor.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookupRealNamedProperty(name, &result);
   if (result.IsProperty()) return result.GetAttributes();
 
@@ -3011,7 +3212,7 @@
         ? NONE : ABSENT;
   }
   // Named property.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   Lookup(key, &result);
   return GetPropertyAttribute(receiver, &result, key, true);
 }
@@ -3060,7 +3261,7 @@
     return ABSENT;
   }
   // Named property.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookup(name, &result);
   return GetPropertyAttribute(this, &result, name, false);
 }
@@ -3075,7 +3276,9 @@
   if (result->IsMap() &&
       Map::cast(result)->EquivalentToForNormalization(fast, mode)) {
 #ifdef DEBUG
-    Map::cast(result)->SharedMapVerify();
+    if (FLAG_verify_heap) {
+      Map::cast(result)->SharedMapVerify();
+    }
     if (FLAG_enable_slow_asserts) {
       // The cached map should match newly created normalized map bit-by-bit.
       Object* fresh;
@@ -3111,6 +3314,15 @@
 }
 
 
+void JSObject::UpdateMapCodeCache(Handle<JSObject> object,
+                                  Handle<String> name,
+                                  Handle<Code> code) {
+  Isolate* isolate = object->GetIsolate();
+  CALL_HEAP_FUNCTION_VOID(isolate,
+                          object->UpdateMapCodeCache(*name, *code));
+}
+
+
 MaybeObject* JSObject::UpdateMapCodeCache(String* name, Code* code) {
   if (map()->is_shared()) {
     // Fast case maps are never marked as shared.
@@ -3356,7 +3568,7 @@
   do {
     // Generate a random 32-bit hash value but limit range to fit
     // within a smi.
-    hash_value = V8::Random(isolate) & Smi::kMaxValue;
+    hash_value = V8::RandomPrivate(isolate) & Smi::kMaxValue;
     attempts++;
   } while (hash_value == 0 && attempts < 30);
   hash_value = hash_value != 0 ? hash_value : 1;  // never return 0
@@ -3377,6 +3589,9 @@
   Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_symbol());
   if (stored_value->IsSmi()) return stored_value;
 
+  // Do not generate permanent identity hash code if not requested.
+  if (flag == OMIT_CREATION) return GetHeap()->undefined_value();
+
   Smi* hash = GenerateIdentityHash();
   MaybeObject* result = SetHiddenProperty(GetHeap()->identity_hash_symbol(),
                                           hash);
@@ -3567,7 +3782,7 @@
 MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name,
                                                      DeleteMode mode) {
   // Check local property, ignore interceptor.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookupRealNamedProperty(name, &result);
   if (!result.IsProperty()) return GetHeap()->true_value();
 
@@ -3716,7 +3931,7 @@
   if (name->AsArrayIndex(&index)) {
     return DeleteElement(index, mode);
   } else {
-    LookupResult result;
+    LookupResult result(isolate);
     LocalLookup(name, &result);
     if (!result.IsProperty()) return isolate->heap()->true_value();
     // Ignore attributes if forcing a deletion.
@@ -3927,15 +4142,16 @@
 
 
 // Tests for the fast common case for property enumeration:
-// - This object and all prototypes has an enum cache (which means that it has
-//   no interceptors and needs no access checks).
+// - This object and all prototypes has an enum cache (which means that
+//   it is no proxy, has no interceptors and needs no access checks).
 // - This object has no elements.
 // - No prototype has enumerable properties/elements.
-bool JSObject::IsSimpleEnum() {
+bool JSReceiver::IsSimpleEnum() {
   Heap* heap = GetHeap();
   for (Object* o = this;
        o != heap->null_value();
        o = JSObject::cast(o)->GetPrototype()) {
+    if (!o->IsJSObject()) return false;
     JSObject* curr = JSObject::cast(o);
     if (!curr->map()->instance_descriptors()->HasEnumCache()) return false;
     ASSERT(!curr->HasNamedInterceptor());
@@ -4065,19 +4281,27 @@
 }
 
 
-// Search for a getter or setter in an elements dictionary.  Returns either
-// undefined if the element is read-only, or the getter/setter pair (fixed
-// array) if there is an existing one, or the hole value if the element does
-// not exist or is a normal non-getter/setter data element.
-static Object* FindGetterSetterInDictionary(NumberDictionary* dictionary,
-                                            uint32_t index,
-                                            Heap* heap) {
+// Search for a getter or setter in an elements dictionary and update its
+// attributes.  Returns either undefined if the element is read-only, or the
+// getter/setter pair (fixed array) if there is an existing one, or the hole
+// value if the element does not exist or is a normal non-getter/setter data
+// element.
+static Object* UpdateGetterSetterInDictionary(NumberDictionary* dictionary,
+                                              uint32_t index,
+                                              PropertyAttributes attributes,
+                                              Heap* heap) {
   int entry = dictionary->FindEntry(index);
   if (entry != NumberDictionary::kNotFound) {
     Object* result = dictionary->ValueAt(entry);
     PropertyDetails details = dictionary->DetailsAt(entry);
     if (details.IsReadOnly()) return heap->undefined_value();
-    if (details.type() == CALLBACKS && result->IsFixedArray()) return result;
+    if (details.type() == CALLBACKS && result->IsFixedArray()) {
+      if (details.attributes() != attributes) {
+        dictionary->DetailsAtPut(entry,
+                                 PropertyDetails(attributes, CALLBACKS, index));
+      }
+      return result;
+    }
   }
   return heap->the_hole_value();
 }
@@ -4119,8 +4343,10 @@
         // elements.
         return heap->undefined_value();
       case DICTIONARY_ELEMENTS: {
-        Object* probe =
-            FindGetterSetterInDictionary(element_dictionary(), index, heap);
+        Object* probe = UpdateGetterSetterInDictionary(element_dictionary(),
+                                                       index,
+                                                       attributes,
+                                                       heap);
         if (!probe->IsTheHole()) return probe;
         // Otherwise allow to override it.
         break;
@@ -4137,7 +4363,10 @@
           FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
           if (arguments->IsDictionary()) {
             NumberDictionary* dictionary = NumberDictionary::cast(arguments);
-            probe = FindGetterSetterInDictionary(dictionary, index, heap);
+            probe = UpdateGetterSetterInDictionary(dictionary,
+                                                   index,
+                                                   attributes,
+                                                   heap);
             if (!probe->IsTheHole()) return probe;
           }
         }
@@ -4146,7 +4375,7 @@
     }
   } else {
     // Lookup the name.
-    LookupResult result;
+    LookupResult result(heap->isolate());
     LocalLookup(name, &result);
     if (result.IsProperty()) {
       if (result.IsReadOnly()) return heap->undefined_value();
@@ -4176,8 +4405,8 @@
 
 
 bool JSObject::CanSetCallback(String* name) {
-  ASSERT(!IsAccessCheckNeeded()
-         || Isolate::Current()->MayNamedAccess(this, name, v8::ACCESS_SET));
+  ASSERT(!IsAccessCheckNeeded() ||
+         GetIsolate()->MayNamedAccess(this, name, v8::ACCESS_SET));
 
   // Check if there is an API defined callback object which prohibits
   // callback overwriting in this object or it's prototype chain.
@@ -4185,7 +4414,7 @@
   // certain accessors such as window.location should not be allowed
   // to be overwritten because allowing overwriting could potentially
   // cause security problems.
-  LookupResult callback_result;
+  LookupResult callback_result(GetIsolate());
   LookupCallback(name, &callback_result);
   if (callback_result.IsProperty()) {
     Object* obj = callback_result.GetCallbackObject();
@@ -4382,7 +4611,7 @@
     }
   } else {
     // Lookup the name.
-    LookupResult result;
+    LookupResult result(isolate);
     LocalLookup(name, &result);
     // ES5 forbids turning a property into an accessor if it's not
     // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
@@ -4440,7 +4669,7 @@
     for (Object* obj = this;
          obj != heap->null_value();
          obj = JSObject::cast(obj)->GetPrototype()) {
-      LookupResult result;
+      LookupResult result(heap->isolate());
       JSObject::cast(obj)->LocalLookup(name, &result);
       if (result.IsProperty()) {
         if (result.IsReadOnly()) return heap->undefined_value();
@@ -4548,7 +4777,7 @@
   Map::cast(result)->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
 
 #ifdef DEBUG
-  if (Map::cast(result)->is_shared()) {
+  if (FLAG_verify_heap && Map::cast(result)->is_shared()) {
     Map::cast(result)->SharedMapVerify();
   }
 #endif
@@ -4571,6 +4800,13 @@
   return new_map;
 }
 
+void Map::UpdateCodeCache(Handle<Map> map,
+                          Handle<String> name,
+                          Handle<Code> code) {
+  Isolate* isolate = map->GetIsolate();
+  CALL_HEAP_FUNCTION_VOID(isolate,
+                          map->UpdateCodeCache(*name, *code));
+}
 
 MaybeObject* Map::UpdateCodeCache(String* name, Code* code) {
   // Allocate the code cache if not present.
@@ -4960,7 +5196,16 @@
 }
 
 
-MaybeObject* PolymorphicCodeCache::Update(MapList* maps,
+void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> cache,
+                                  MapHandleList* maps,
+                                  Code::Flags flags,
+                                  Handle<Code> code) {
+  Isolate* isolate = cache->GetIsolate();
+  CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code));
+}
+
+
+MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps,
                                           Code::Flags flags,
                                           Code* code) {
   // Initialize cache if necessary.
@@ -4988,13 +5233,14 @@
 }
 
 
-Object* PolymorphicCodeCache::Lookup(MapList* maps, Code::Flags flags) {
+Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps,
+                                            Code::Flags flags) {
   if (!cache()->IsUndefined()) {
     PolymorphicCodeCacheHashTable* hash_table =
         PolymorphicCodeCacheHashTable::cast(cache());
-    return hash_table->Lookup(maps, flags);
+    return Handle<Object>(hash_table->Lookup(maps, flags));
   } else {
-    return GetHeap()->undefined_value();
+    return GetIsolate()->factory()->undefined_value();
   }
 }
 
@@ -5005,12 +5251,12 @@
 class PolymorphicCodeCacheHashTableKey : public HashTableKey {
  public:
   // Callers must ensure that |maps| outlives the newly constructed object.
-  PolymorphicCodeCacheHashTableKey(MapList* maps, int code_flags)
+  PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags)
       : maps_(maps),
         code_flags_(code_flags) {}
 
   bool IsMatch(Object* other) {
-    MapList other_maps(kDefaultListAllocationSize);
+    MapHandleList other_maps(kDefaultListAllocationSize);
     int other_flags;
     FromObject(other, &other_flags, &other_maps);
     if (code_flags_ != other_flags) return false;
@@ -5026,7 +5272,7 @@
     for (int i = 0; i < maps_->length(); ++i) {
       bool match_found = false;
       for (int j = 0; j < other_maps.length(); ++j) {
-        if (maps_->at(i)->EquivalentTo(other_maps.at(j))) {
+        if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) {
           match_found = true;
           break;
         }
@@ -5036,7 +5282,7 @@
     return true;
   }
 
-  static uint32_t MapsHashHelper(MapList* maps, int code_flags) {
+  static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
     uint32_t hash = code_flags;
     for (int i = 0; i < maps->length(); ++i) {
       hash ^= maps->at(i)->Hash();
@@ -5049,7 +5295,7 @@
   }
 
   uint32_t HashForObject(Object* obj) {
-    MapList other_maps(kDefaultListAllocationSize);
+    MapHandleList other_maps(kDefaultListAllocationSize);
     int other_flags;
     FromObject(obj, &other_flags, &other_maps);
     return MapsHashHelper(&other_maps, other_flags);
@@ -5067,29 +5313,32 @@
     FixedArray* list = FixedArray::cast(obj);
     list->set(0, Smi::FromInt(code_flags_));
     for (int i = 0; i < maps_->length(); ++i) {
-      list->set(i + 1, maps_->at(i));
+      list->set(i + 1, *maps_->at(i));
     }
     return list;
   }
 
  private:
-  static MapList* FromObject(Object* obj, int* code_flags, MapList* maps) {
+  static MapHandleList* FromObject(Object* obj,
+                                   int* code_flags,
+                                   MapHandleList* maps) {
     FixedArray* list = FixedArray::cast(obj);
     maps->Rewind(0);
     *code_flags = Smi::cast(list->get(0))->value();
     for (int i = 1; i < list->length(); ++i) {
-      maps->Add(Map::cast(list->get(i)));
+      maps->Add(Handle<Map>(Map::cast(list->get(i))));
     }
     return maps;
   }
 
-  MapList* maps_;  // weak.
+  MapHandleList* maps_;  // weak.
   int code_flags_;
   static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
 };
 
 
-Object* PolymorphicCodeCacheHashTable::Lookup(MapList* maps, int code_flags) {
+Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps,
+                                              int code_flags) {
   PolymorphicCodeCacheHashTableKey key(maps, code_flags);
   int entry = FindEntry(&key);
   if (entry == kNotFound) return GetHeap()->undefined_value();
@@ -5097,7 +5346,7 @@
 }
 
 
-MaybeObject* PolymorphicCodeCacheHashTable::Put(MapList* maps,
+MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps,
                                                 int code_flags,
                                                 Code* code) {
   PolymorphicCodeCacheHashTableKey key(maps, code_flags);
@@ -5232,9 +5481,9 @@
     if (IsEmpty()) return;  // Do nothing for empty descriptor array.
     FixedArray::cast(bridge_storage)->
       set(kEnumCacheBridgeCacheIndex, new_cache);
-    fast_set(FixedArray::cast(bridge_storage),
-             kEnumCacheBridgeEnumIndex,
-             get(kEnumerationIndexIndex));
+    NoWriteBarrierSet(FixedArray::cast(bridge_storage),
+                      kEnumCacheBridgeEnumIndex,
+                      get(kEnumerationIndexIndex));
     set(kEnumerationIndexIndex, bridge_storage);
   }
 }
@@ -5295,10 +5544,16 @@
       ++new_size;
     }
   }
+
+  DescriptorArray* new_descriptors;
   { MaybeObject* maybe_result = Allocate(new_size);
-    if (!maybe_result->ToObject(&result)) return maybe_result;
+    if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
+      return maybe_result;
+    }
   }
-  DescriptorArray* new_descriptors = DescriptorArray::cast(result);
+
+  DescriptorArray::WhitenessWitness witness(new_descriptors);
+
   // Set the enumeration index in the descriptors and set the enumeration index
   // in the result.
   int enumeration_index = NextEnumerationIndex();
@@ -5326,16 +5581,16 @@
     }
     if (IsNullDescriptor(from_index)) continue;
     if (remove_transitions && IsTransition(from_index)) continue;
-    new_descriptors->CopyFrom(to_index++, this, from_index);
+    new_descriptors->CopyFrom(to_index++, this, from_index, witness);
   }
 
-  new_descriptors->Set(to_index++, descriptor);
+  new_descriptors->Set(to_index++, descriptor, witness);
   if (replacing) from_index++;
 
   for (; from_index < number_of_descriptors(); from_index++) {
     if (IsNullDescriptor(from_index)) continue;
     if (remove_transitions && IsTransition(from_index)) continue;
-    new_descriptors->CopyFrom(to_index++, this, from_index);
+    new_descriptors->CopyFrom(to_index++, this, from_index, witness);
   }
 
   ASSERT(to_index == new_descriptors->number_of_descriptors());
@@ -5357,16 +5612,21 @@
   }
 
   // Allocate the new descriptor array.
-  Object* result;
+  DescriptorArray* new_descriptors;
   { MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed);
-    if (!maybe_result->ToObject(&result)) return maybe_result;
+    if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
+      return maybe_result;
+    }
   }
-  DescriptorArray* new_descriptors = DescriptorArray::cast(result);
+
+  DescriptorArray::WhitenessWitness witness(new_descriptors);
 
   // Copy the content.
   int next_descriptor = 0;
   for (int i = 0; i < number_of_descriptors(); i++) {
-    if (IsProperty(i)) new_descriptors->CopyFrom(next_descriptor++, this, i);
+    if (IsProperty(i)) {
+      new_descriptors->CopyFrom(next_descriptor++, this, i, witness);
+    }
   }
   ASSERT(next_descriptor == new_descriptors->number_of_descriptors());
 
@@ -5374,7 +5634,7 @@
 }
 
 
-void DescriptorArray::SortUnchecked() {
+void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
   // In-place heap sort.
   int len = number_of_descriptors();
 
@@ -5395,7 +5655,7 @@
         }
       }
       if (child_hash <= parent_hash) break;
-      Swap(parent_index, child_index);
+      NoWriteBarrierSwapDescriptors(parent_index, child_index);
       // Now element at child_index could be < its children.
       parent_index = child_index;  // parent_hash remains correct.
     }
@@ -5404,8 +5664,8 @@
   // Extract elements and create sorted array.
   for (int i = len - 1; i > 0; --i) {
     // Put max element at the back of the array.
-    Swap(0, i);
-    // Sift down the new top element.
+    NoWriteBarrierSwapDescriptors(0, i);
+    // Shift down the new top element.
     int parent_index = 0;
     const uint32_t parent_hash = GetKey(parent_index)->Hash();
     const int max_parent_index = (i / 2) - 1;
@@ -5420,15 +5680,15 @@
         }
       }
       if (child_hash <= parent_hash) break;
-      Swap(parent_index, child_index);
+      NoWriteBarrierSwapDescriptors(parent_index, child_index);
       parent_index = child_index;
     }
   }
 }
 
 
-void DescriptorArray::Sort() {
-  SortUnchecked();
+void DescriptorArray::Sort(const WhitenessWitness& witness) {
+  SortUnchecked(witness);
   SLOW_ASSERT(IsSortedNoDuplicates());
 }
 
@@ -5513,24 +5773,6 @@
 }
 
 
-int String::Utf8Length() {
-  if (IsAsciiRepresentation()) return length();
-  // Attempt to flatten before accessing the string.  It probably
-  // doesn't make Utf8Length faster, but it is very likely that
-  // the string will be accessed later (for example by WriteUtf8)
-  // so it's still a good idea.
-  Heap* heap = GetHeap();
-  TryFlatten();
-  Access<StringInputBuffer> buffer(
-      heap->isolate()->objects_string_input_buffer());
-  buffer->Reset(0, this);
-  int result = 0;
-  while (buffer->has_more())
-    result += unibrow::Utf8::Length(buffer->GetNext());
-  return result;
-}
-
-
 String::FlatContent String::GetFlatContent() {
   int length = this->length();
   StringShape shape(this);
@@ -5954,6 +6196,73 @@
 }
 
 
+// This method determines the type of string involved and then gets the UTF8
+// length of the string.  It doesn't flatten the string and has log(n) recursion
+// for a string of length n.
+int String::Utf8Length(String* input, int from, int to) {
+  if (from == to) return 0;
+  int total = 0;
+  while (true) {
+    if (input->IsAsciiRepresentation()) return total + to - from;
+    switch (StringShape(input).representation_tag()) {
+      case kConsStringTag: {
+        ConsString* str = ConsString::cast(input);
+        String* first = str->first();
+        String* second = str->second();
+        int first_length = first->length();
+        if (first_length - from < to - first_length) {
+          if (first_length > from) {
+            // Left hand side is shorter.
+            total += Utf8Length(first, from, first_length);
+            input = second;
+            from = 0;
+            to -= first_length;
+          } else {
+            // We only need the right hand side.
+            input = second;
+            from -= first_length;
+            to -= first_length;
+          }
+        } else {
+          if (first_length <= to) {
+            // Right hand side is shorter.
+            total += Utf8Length(second, 0, to - first_length);
+            input = first;
+            to = first_length;
+          } else {
+            // We only need the left hand side.
+            input = first;
+          }
+        }
+        continue;
+      }
+      case kExternalStringTag:
+      case kSeqStringTag: {
+        Vector<const uc16> vector = input->GetFlatContent().ToUC16Vector();
+        const uc16* p = vector.start();
+        for (int i = from; i < to; i++) {
+          total += unibrow::Utf8::Length(p[i]);
+        }
+        return total;
+      }
+      case kSlicedStringTag: {
+        SlicedString* str = SlicedString::cast(input);
+        int offset = str->offset();
+        input = str->parent();
+        from += offset;
+        to += offset;
+        continue;
+      }
+      default:
+        break;
+    }
+    UNREACHABLE();
+    return 0;
+  }
+  return 0;
+}
+
+
 void Relocatable::PostGarbageCollectionProcessing() {
   Isolate* isolate = Isolate::Current();
   Relocatable* current = isolate->relocatable_top();
@@ -6851,6 +7160,57 @@
 }
 
 
+bool SharedFunctionInfo::EnsureCompiled(Handle<SharedFunctionInfo> shared,
+                                        ClearExceptionFlag flag) {
+  return shared->is_compiled() || CompileLazy(shared, flag);
+}
+
+
+static bool CompileLazyHelper(CompilationInfo* info,
+                              ClearExceptionFlag flag) {
+  // Compile the source information to a code object.
+  ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
+  ASSERT(!info->isolate()->has_pending_exception());
+  bool result = Compiler::CompileLazy(info);
+  ASSERT(result != Isolate::Current()->has_pending_exception());
+  if (!result && flag == CLEAR_EXCEPTION) {
+    info->isolate()->clear_pending_exception();
+  }
+  return result;
+}
+
+
+bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared,
+                                     ClearExceptionFlag flag) {
+  CompilationInfo info(shared);
+  return CompileLazyHelper(&info, flag);
+}
+
+
+bool JSFunction::CompileLazy(Handle<JSFunction> function,
+                             ClearExceptionFlag flag) {
+  bool result = true;
+  if (function->shared()->is_compiled()) {
+    function->ReplaceCode(function->shared()->code());
+    function->shared()->set_code_age(0);
+  } else {
+    CompilationInfo info(function);
+    result = CompileLazyHelper(&info, flag);
+    ASSERT(!result || function->is_compiled());
+  }
+  return result;
+}
+
+
+bool JSFunction::CompileOptimized(Handle<JSFunction> function,
+                                  int osr_ast_id,
+                                  ClearExceptionFlag flag) {
+  CompilationInfo info(function);
+  info.SetOptimizing(osr_ast_id);
+  return CompileLazyHelper(&info, flag);
+}
+
+
 bool JSFunction::IsInlineable() {
   if (IsBuiltin()) return false;
   SharedFunctionInfo* shared_info = shared();
@@ -7033,7 +7393,7 @@
        obj = obj->GetPrototype()) {
     JSObject* js_object = JSObject::cast(obj);
     for (int i = 0; i < this_property_assignments_count(); i++) {
-      LookupResult result;
+      LookupResult result(heap->isolate());
       String* name = GetThisPropertyAssignmentName(i);
       js_object->LocalLookupRealNamedProperty(name, &result);
       if (result.IsProperty() && result.type() == CALLBACKS) {
@@ -7391,6 +7751,8 @@
 
 
 void Code::CopyFrom(const CodeDesc& desc) {
+  ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT);
+
   // copy code
   memmove(instruction_start(), desc.buffer, desc.instr_size);
 
@@ -7410,16 +7772,17 @@
     RelocInfo::Mode mode = it.rinfo()->rmode();
     if (mode == RelocInfo::EMBEDDED_OBJECT) {
       Handle<Object> p = it.rinfo()->target_object_handle(origin);
-      it.rinfo()->set_target_object(*p);
+      it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER);
     } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
       Handle<JSGlobalPropertyCell> cell  = it.rinfo()->target_cell_handle();
-      it.rinfo()->set_target_cell(*cell);
+      it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER);
     } else if (RelocInfo::IsCodeTarget(mode)) {
       // rewrite code handles in inline cache targets to direct
       // pointers to the first instruction in the code object
       Handle<Object> p = it.rinfo()->target_object_handle(origin);
       Code* code = Code::cast(*p);
-      it.rinfo()->set_target_address(code->instruction_start());
+      it.rinfo()->set_target_address(code->instruction_start(),
+                                     SKIP_WRITE_BARRIER);
     } else {
       it.rinfo()->apply(delta);
     }
@@ -7847,13 +8210,15 @@
     new_map = Map::cast(object);
   }
 
+  FixedArrayBase* old_elements_raw = elements();
   ElementsKind elements_kind = GetElementsKind();
   switch (elements_kind) {
     case FAST_SMI_ONLY_ELEMENTS:
     case FAST_ELEMENTS: {
       AssertNoAllocation no_gc;
       WriteBarrierMode mode(new_elements->GetWriteBarrierMode(no_gc));
-      CopyFastElementsToFast(FixedArray::cast(elements()), new_elements, mode);
+      CopyFastElementsToFast(FixedArray::cast(old_elements_raw),
+                             new_elements, mode);
       set_map(new_map);
       set_elements(new_elements);
       break;
@@ -7861,7 +8226,7 @@
     case DICTIONARY_ELEMENTS: {
       AssertNoAllocation no_gc;
       WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc);
-      CopySlowElementsToFast(NumberDictionary::cast(elements()),
+      CopySlowElementsToFast(NumberDictionary::cast(old_elements_raw),
                              new_elements,
                              mode);
       set_map(new_map);
@@ -7873,7 +8238,7 @@
       WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc);
       // The object's map and the parameter map are unchanged, the unaliased
       // arguments are copied to the new backing store.
-      FixedArray* parameter_map = FixedArray::cast(elements());
+      FixedArray* parameter_map = FixedArray::cast(old_elements_raw);
       FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
       if (arguments->IsDictionary()) {
         CopySlowElementsToFast(NumberDictionary::cast(arguments),
@@ -7886,7 +8251,7 @@
       break;
     }
     case FAST_DOUBLE_ELEMENTS: {
-      FixedDoubleArray* old_elements = FixedDoubleArray::cast(elements());
+      FixedDoubleArray* old_elements = FixedDoubleArray::cast(old_elements_raw);
       uint32_t old_length = static_cast<uint32_t>(old_elements->length());
       // Fill out the new array with this content and array holes.
       for (uint32_t i = 0; i < old_length; i++) {
@@ -7924,6 +8289,11 @@
       break;
   }
 
+  if (FLAG_trace_elements_transitions) {
+    PrintElementsTransition(stdout, elements_kind, old_elements_raw,
+                            FAST_ELEMENTS, new_elements);
+  }
+
   // Update the length if necessary.
   if (IsJSArray()) {
     JSArray::cast(this)->set_length(Smi::FromInt(length));
@@ -7953,19 +8323,21 @@
   }
   Map* new_map = Map::cast(obj);
 
+  FixedArrayBase* old_elements = elements();
+  ElementsKind elements_kind(GetElementsKind());
   AssertNoAllocation no_gc;
-  switch (GetElementsKind()) {
+  switch (elements_kind) {
     case FAST_SMI_ONLY_ELEMENTS:
     case FAST_ELEMENTS: {
-      elems->Initialize(FixedArray::cast(elements()));
+      elems->Initialize(FixedArray::cast(old_elements));
       break;
     }
     case FAST_DOUBLE_ELEMENTS: {
-      elems->Initialize(FixedDoubleArray::cast(elements()));
+      elems->Initialize(FixedDoubleArray::cast(old_elements));
       break;
     }
     case DICTIONARY_ELEMENTS: {
-      elems->Initialize(NumberDictionary::cast(elements()));
+      elems->Initialize(NumberDictionary::cast(old_elements));
       break;
     }
     default:
@@ -7973,6 +8345,11 @@
       break;
   }
 
+  if (FLAG_trace_elements_transitions) {
+    PrintElementsTransition(stdout, elements_kind, old_elements,
+                            FAST_DOUBLE_ELEMENTS, elems);
+  }
+
   ASSERT(new_map->has_fast_double_elements());
   set_map(new_map);
   ASSERT(elems->IsFixedDoubleArray());
@@ -7992,13 +8369,14 @@
 
   uint32_t new_length = static_cast<uint32_t>(len->Number());
 
-  switch (GetElementsKind()) {
+  FixedArrayBase* old_elements = elements();
+  ElementsKind elements_kind = GetElementsKind();
+  switch (elements_kind) {
     case FAST_SMI_ONLY_ELEMENTS:
     case FAST_ELEMENTS:
     case FAST_DOUBLE_ELEMENTS: {
       // Make sure we never try to shrink dense arrays into sparse arrays.
-      ASSERT(static_cast<uint32_t>(
-          FixedArrayBase::cast(elements())->length()) <= new_length);
+      ASSERT(static_cast<uint32_t>(old_elements->length()) <= new_length);
       MaybeObject* result = NormalizeElements();
       if (result->IsFailure()) return result;
 
@@ -8030,6 +8408,12 @@
       UNREACHABLE();
       break;
   }
+
+  if (FLAG_trace_elements_transitions) {
+    PrintElementsTransition(stdout, elements_kind, old_elements,
+                            DICTIONARY_ELEMENTS, elements());
+  }
+
   return this;
 }
 
@@ -8957,6 +9341,10 @@
     Map* new_map;
     if (!maybe_new_map->To<Map>(&new_map)) return maybe_new_map;
     set_map(new_map);
+    if (FLAG_trace_elements_transitions) {
+      PrintElementsTransition(stdout, FAST_SMI_ONLY_ELEMENTS, elements(),
+                              FAST_ELEMENTS, elements());
+    }
   }
   // Increase backing store capacity if that's been decided previously.
   if (new_capacity != capacity) {
@@ -9313,6 +9701,51 @@
 }
 
 
+MUST_USE_RESULT MaybeObject* JSObject::TransitionElementsKind(
+    ElementsKind to_kind) {
+  ElementsKind from_kind = map()->elements_kind();
+  FixedArrayBase* elms = FixedArrayBase::cast(elements());
+  uint32_t capacity = static_cast<uint32_t>(elms->length());
+  uint32_t length = capacity;
+  if (IsJSArray()) {
+    CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
+  }
+  if (from_kind == FAST_SMI_ONLY_ELEMENTS) {
+    if (to_kind == FAST_DOUBLE_ELEMENTS) {
+      MaybeObject* maybe_result =
+          SetFastDoubleElementsCapacityAndLength(capacity, length);
+      if (maybe_result->IsFailure()) return maybe_result;
+      return this;
+    } else if (to_kind == FAST_ELEMENTS) {
+      MaybeObject* maybe_new_map = GetElementsTransitionMap(FAST_ELEMENTS);
+      Map* new_map;
+      if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+      set_map(new_map);
+      return this;
+    }
+  } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    MaybeObject* maybe_result = SetFastElementsCapacityAndLength(
+        capacity, length, kDontAllowSmiOnlyElements);
+    if (maybe_result->IsFailure()) return maybe_result;
+    return this;
+  }
+  // This method should never be called for any other case than the ones
+  // handled above.
+  UNREACHABLE();
+  return GetIsolate()->heap()->null_value();
+}
+
+
+// static
+bool Map::IsValidElementsTransition(ElementsKind from_kind,
+                                    ElementsKind to_kind) {
+  return
+      (from_kind == FAST_SMI_ONLY_ELEMENTS &&
+          (to_kind == FAST_DOUBLE_ELEMENTS || to_kind == FAST_ELEMENTS)) ||
+      (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS);
+}
+
+
 MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
                                                    Object* value) {
   uint32_t old_len = 0;
@@ -9579,7 +10012,7 @@
     String* name,
     PropertyAttributes* attributes) {
   // Check local property in holder, ignore interceptor.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookupRealNamedProperty(name, &result);
   if (result.IsProperty()) {
     return GetProperty(receiver, &result, name, attributes);
@@ -9597,7 +10030,7 @@
     String* name,
     PropertyAttributes* attributes) {
   // Check local property in holder, ignore interceptor.
-  LookupResult result;
+  LookupResult result(GetIsolate());
   LocalLookupRealNamedProperty(name, &result);
   if (result.IsProperty()) {
     return GetProperty(receiver, &result, name, attributes);
@@ -9648,15 +10081,15 @@
 
 bool JSObject::HasRealNamedProperty(String* key) {
   // Check access rights if needed.
+  Isolate* isolate = GetIsolate();
   if (IsAccessCheckNeeded()) {
-    Heap* heap = GetHeap();
-    if (!heap->isolate()->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
-      heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+    if (!isolate->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
+      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
       return false;
     }
   }
 
-  LookupResult result;
+  LookupResult result(isolate);
   LocalLookupRealNamedProperty(key, &result);
   return result.IsProperty() && (result.type() != INTERCEPTOR);
 }
@@ -9725,15 +10158,15 @@
 
 bool JSObject::HasRealNamedCallbackProperty(String* key) {
   // Check access rights if needed.
+  Isolate* isolate = GetIsolate();
   if (IsAccessCheckNeeded()) {
-    Heap* heap = GetHeap();
-    if (!heap->isolate()->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
-      heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+    if (!isolate->MayNamedAccess(this, key, v8::ACCESS_HAS)) {
+      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
       return false;
     }
   }
 
-  LookupResult result;
+  LookupResult result(isolate);
   LocalLookupRealNamedProperty(key, &result);
   return result.IsProperty() && (result.type() == CALLBACKS);
 }
@@ -10598,7 +11031,9 @@
 
 template class HashTable<MapCacheShape, HashTableKey*>;
 
-template class HashTable<ObjectHashTableShape, JSReceiver*>;
+template class HashTable<ObjectHashTableShape<1>, Object*>;
+
+template class HashTable<ObjectHashTableShape<2>, Object*>;
 
 template class Dictionary<StringDictionaryShape, String*>;
 
@@ -11089,6 +11524,16 @@
 }
 
 
+Handle<JSGlobalPropertyCell> GlobalObject::EnsurePropertyCell(
+    Handle<GlobalObject> global,
+    Handle<String> name) {
+  Isolate* isolate = global->GetIsolate();
+  CALL_HEAP_FUNCTION(isolate,
+                     global->EnsurePropertyCell(*name),
+                     JSGlobalPropertyCell);
+}
+
+
 MaybeObject* GlobalObject::EnsurePropertyCell(String* name) {
   ASSERT(!HasFastProperties());
   int entry = property_dictionary()->FindEntry(name);
@@ -11326,7 +11771,7 @@
                                             SharedFunctionInfo* value) {
   StringSharedKey key(src,
                       context->closure()->shared(),
-                      value->strict_mode() ? kStrictMode : kNonStrictMode);
+                      value->strict_mode_flag());
   Object* obj;
   { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -11375,8 +11820,8 @@
     int entry_index = EntryToIndex(entry);
     int value_index = entry_index + 1;
     if (get(value_index) == value) {
-      fast_set(this, entry_index, null_value);
-      fast_set(this, value_index, null_value);
+      NoWriteBarrierSet(this, entry_index, null_value);
+      NoWriteBarrierSet(this, value_index, null_value);
       ElementRemoved();
     }
   }
@@ -11848,14 +12293,15 @@
   }
 
   // Allocate the instance descriptor.
-  Object* descriptors_unchecked;
-  { MaybeObject* maybe_descriptors_unchecked =
+  DescriptorArray* descriptors;
+  { MaybeObject* maybe_descriptors =
         DescriptorArray::Allocate(instance_descriptor_length);
-    if (!maybe_descriptors_unchecked->ToObject(&descriptors_unchecked)) {
-      return maybe_descriptors_unchecked;
+    if (!maybe_descriptors->To<DescriptorArray>(&descriptors)) {
+      return maybe_descriptors;
     }
   }
-  DescriptorArray* descriptors = DescriptorArray::cast(descriptors_unchecked);
+
+  DescriptorArray::WhitenessWitness witness(descriptors);
 
   int inobject_props = obj->map()->inobject_properties();
   int number_of_allocated_fields =
@@ -11893,7 +12339,7 @@
                                      JSFunction::cast(value),
                                      details.attributes(),
                                      details.index());
-        descriptors->Set(next_descriptor++, &d);
+        descriptors->Set(next_descriptor++, &d, witness);
       } else if (type == NORMAL) {
         if (current_offset < inobject_props) {
           obj->InObjectPropertyAtPut(current_offset,
@@ -11907,13 +12353,13 @@
                           current_offset++,
                           details.attributes(),
                           details.index());
-        descriptors->Set(next_descriptor++, &d);
+        descriptors->Set(next_descriptor++, &d, witness);
       } else if (type == CALLBACKS) {
         CallbacksDescriptor d(String::cast(key),
                               value,
                               details.attributes(),
                               details.index());
-        descriptors->Set(next_descriptor++, &d);
+        descriptors->Set(next_descriptor++, &d, witness);
       } else {
         UNREACHABLE();
       }
@@ -11921,7 +12367,7 @@
   }
   ASSERT(current_offset == number_of_fields);
 
-  descriptors->Sort();
+  descriptors->Sort(witness);
   // Allocate new map.
   Object* new_map;
   { MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors();
@@ -11944,20 +12390,74 @@
 }
 
 
-Object* ObjectHashTable::Lookup(JSReceiver* key) {
+bool ObjectHashSet::Contains(Object* key) {
   // If the object does not have an identity hash, it was never used as a key.
-  MaybeObject* maybe_hash = key->GetIdentityHash(OMIT_CREATION);
-  if (maybe_hash->IsFailure()) return GetHeap()->undefined_value();
+  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
+    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return false;
+  }
+  return (FindEntry(key) != kNotFound);
+}
+
+
+MaybeObject* ObjectHashSet::Add(Object* key) {
+  // Make sure the key object has an identity hash code.
+  int hash;
+  { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
+    if (maybe_hash->IsFailure()) return maybe_hash;
+    hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
+  }
+  int entry = FindEntry(key);
+
+  // Check whether key is already present.
+  if (entry != kNotFound) return this;
+
+  // Check whether the hash set should be extended and add entry.
+  Object* obj;
+  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
+    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+  }
+  ObjectHashSet* table = ObjectHashSet::cast(obj);
+  entry = table->FindInsertionEntry(hash);
+  table->set(EntryToIndex(entry), key);
+  table->ElementAdded();
+  return table;
+}
+
+
+MaybeObject* ObjectHashSet::Remove(Object* key) {
+  // If the object does not have an identity hash, it was never used as a key.
+  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
+    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return this;
+  }
+  int entry = FindEntry(key);
+
+  // Check whether key is actually present.
+  if (entry == kNotFound) return this;
+
+  // Remove entry and try to shrink this hash set.
+  set_null(EntryToIndex(entry));
+  ElementRemoved();
+  return Shrink(key);
+}
+
+
+Object* ObjectHashTable::Lookup(Object* key) {
+  // If the object does not have an identity hash, it was never used as a key.
+  { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
+    if (maybe_hash->ToObjectUnchecked()->IsUndefined()) {
+      return GetHeap()->undefined_value();
+    }
+  }
   int entry = FindEntry(key);
   if (entry == kNotFound) return GetHeap()->undefined_value();
   return get(EntryToIndex(entry) + 1);
 }
 
 
-MaybeObject* ObjectHashTable::Put(JSReceiver* key, Object* value) {
+MaybeObject* ObjectHashTable::Put(Object* key, Object* value) {
   // Make sure the key object has an identity hash code.
   int hash;
-  { MaybeObject* maybe_hash = key->GetIdentityHash(ALLOW_CREATION);
+  { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
     if (maybe_hash->IsFailure()) return maybe_hash;
     hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
   }
@@ -11987,7 +12487,7 @@
 }
 
 
-void ObjectHashTable::AddEntry(int entry, JSReceiver* key, Object* value) {
+void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
   set(EntryToIndex(entry), key);
   set(EntryToIndex(entry) + 1, value);
   ElementAdded();
diff --git a/src/objects.h b/src/objects.h
index b95fa57..f7d2180 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -40,6 +40,7 @@
 #endif
 #include "v8checks.h"
 
+
 //
 // Most object types in the V8 JavaScript are described in this file.
 //
@@ -52,6 +53,8 @@
 //       - JSReceiver  (suitable for property access)
 //         - JSObject
 //           - JSArray
+//           - JSSet
+//           - JSMap
 //           - JSWeakMap
 //           - JSRegExp
 //           - JSFunction
@@ -173,6 +176,8 @@
 static const int kElementsKindCount =
     LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
 
+void PrintElementsKind(FILE* out, ElementsKind kind);
+
 // PropertyDetails captures type and attributes for a property.
 // They are used both in property dictionaries and instance descriptors.
 class PropertyDetails BASE_EMBEDDED {
@@ -628,6 +633,8 @@
   JS_BUILTINS_OBJECT_TYPE,
   JS_GLOBAL_PROXY_TYPE,
   JS_ARRAY_TYPE,
+  JS_SET_TYPE,
+  JS_MAP_TYPE,
   JS_WEAK_MAP_TYPE,
 
   JS_REGEXP_TYPE,
@@ -820,6 +827,8 @@
   V(JSArray)                                   \
   V(JSProxy)                                   \
   V(JSFunctionProxy)                           \
+  V(JSSet)                                     \
+  V(JSMap)                                     \
   V(JSWeakMap)                                 \
   V(JSRegExp)                                  \
   V(HashTable)                                 \
@@ -857,6 +866,8 @@
   HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
 #undef IS_TYPE_FUNCTION_DECL
 
+  inline bool IsFixedArrayBase();
+
   // Returns true if this object is an instance of the specified
   // function template.
   inline bool IsInstanceOf(FunctionTemplateInfo* type);
@@ -912,13 +923,22 @@
       Object* receiver,
       String* key,
       PropertyAttributes* attributes);
+
+  static Handle<Object> GetProperty(Handle<Object> object,
+                                    Handle<Object> receiver,
+                                    LookupResult* result,
+                                    Handle<String> key,
+                                    PropertyAttributes* attributes);
+
   MUST_USE_RESULT MaybeObject* GetProperty(Object* receiver,
                                            LookupResult* result,
                                            String* key,
                                            PropertyAttributes* attributes);
+
   MUST_USE_RESULT MaybeObject* GetPropertyWithDefinedGetter(Object* receiver,
                                                             JSReceiver* getter);
 
+  static Handle<Object> GetElement(Handle<Object> object, uint32_t index);
   inline MaybeObject* GetElement(uint32_t index);
   // For use when we know that no exception can be thrown.
   inline Object* GetElementNoExceptionThrown(uint32_t index);
@@ -927,6 +947,16 @@
   // Return the object's prototype (might be Heap::null_value()).
   Object* GetPrototype();
 
+  // Returns the permanent hash code associated with this object depending on
+  // the actual object type.  Might return a failure in case no hash was
+  // created yet or GC was caused by creation.
+  MUST_USE_RESULT MaybeObject* GetHash(CreationFlag flag);
+
+  // Checks whether this object has the same value as the given one.  This
+  // function is implemented according to ES5, section 9.12 and can be used
+  // to implement the Harmony "egal" function.
+  bool SameValue(Object* other);
+
   // Tries to convert an object to an array index.  Returns true and sets
   // the output parameter if it succeeds.
   inline bool ToArrayIndex(uint32_t* index);
@@ -1351,6 +1381,9 @@
                                           StrictModeFlag strict_mode,
                                           bool check_prototype);
 
+  // Tests for the fast common case for property enumeration.
+  bool IsSimpleEnum();
+
   // Returns the class name ([[Class]] property in the specification).
   String* class_name();
 
@@ -1376,7 +1409,7 @@
                                             bool skip_hidden_prototypes);
 
   // Retrieves a permanent object identity hash code. The undefined value might
-  // be returned in case no has been created yet and OMIT_CREATION was used.
+  // be returned in case no hash was created yet and OMIT_CREATION was used.
   inline MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag);
 
   // Lookup a property.  If found, the result is valid and has
@@ -1603,9 +1636,6 @@
   MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode);
   MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode);
 
-  // Tests for the fast common case for property enumeration.
-  bool IsSimpleEnum();
-
   inline void ValidateSmiOnlyElements();
 
   // Makes sure that this object can contain non-smi Object as elements.
@@ -1786,9 +1816,13 @@
 
   // Returns a new map with all transitions dropped from the object's current
   // map and the ElementsKind set.
+  static Handle<Map> GetElementsTransitionMap(Handle<JSObject> object,
+                                              ElementsKind to_kind);
   MUST_USE_RESULT MaybeObject* GetElementsTransitionMap(
       ElementsKind elements_kind);
 
+  MUST_USE_RESULT MaybeObject* TransitionElementsKind(ElementsKind to_kind);
+
   // Converts a descriptor of any other type to a real field,
   // backed by the properties array.  Descriptors of visible
   // types, such as CONSTANT_FUNCTION, keep their enumeration order.
@@ -1835,6 +1869,10 @@
   // dictionary.  Returns the backing after conversion.
   MUST_USE_RESULT MaybeObject* NormalizeElements();
 
+  static void UpdateMapCodeCache(Handle<JSObject> object,
+                                 Handle<String> name,
+                                 Handle<Code> code);
+
   MUST_USE_RESULT MaybeObject* UpdateMapCodeCache(String* name, Code* code);
 
   // Transform slow named properties to fast variants.
@@ -1896,6 +1934,10 @@
   void PrintElements(FILE* out);
 #endif
 
+  void PrintElementsTransition(
+      FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements,
+      ElementsKind to_kind, FixedArrayBase* to_elements);
+
 #ifdef DEBUG
   // Structure for collecting spill information about JSObjects.
   class SpillInformation {
@@ -2162,7 +2204,9 @@
  protected:
   // Set operation on FixedArray without using write barriers. Can
   // only be used for storing old space objects or smis.
-  static inline void fast_set(FixedArray* array, int index, Object* value);
+  static inline void NoWriteBarrierSet(FixedArray* array,
+                                       int index,
+                                       Object* value);
 
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(FixedArray);
@@ -2185,6 +2229,9 @@
   // Checking for the hole.
   inline bool is_the_hole(int index);
 
+  // Copy operations
+  MUST_USE_RESULT inline MaybeObject* Copy();
+
   // Garbage collection support.
   inline static int SizeFor(int length) {
     return kHeaderSize + length * kDoubleSize;
@@ -2224,6 +2271,9 @@
 };
 
 
+class IncrementalMarking;
+
+
 // DescriptorArrays are fixed arrays used to hold instance descriptors.
 // The format of the these objects is:
 // TODO(1399): It should be possible to make room for bit_field3 in the map
@@ -2265,7 +2315,7 @@
   // Set next enumeration index and flush any enum cache.
   void SetNextEnumerationIndex(int value) {
     if (!IsEmpty()) {
-      fast_set(this, kEnumerationIndexIndex, Smi::FromInt(value));
+      set(kEnumerationIndexIndex, Smi::FromInt(value));
     }
   }
   bool HasEnumCache() {
@@ -2302,13 +2352,27 @@
   inline bool IsNullDescriptor(int descriptor_number);
   inline bool IsDontEnum(int descriptor_number);
 
+  class WhitenessWitness {
+   public:
+    inline explicit WhitenessWitness(DescriptorArray* array);
+    inline ~WhitenessWitness();
+
+   private:
+    IncrementalMarking* marking_;
+  };
+
   // Accessor for complete descriptor.
   inline void Get(int descriptor_number, Descriptor* desc);
-  inline void Set(int descriptor_number, Descriptor* desc);
+  inline void Set(int descriptor_number,
+                  Descriptor* desc,
+                  const WhitenessWitness&);
 
   // Transfer complete descriptor from another descriptor array to
   // this one.
-  inline void CopyFrom(int index, DescriptorArray* src, int src_index);
+  inline void CopyFrom(int index,
+                       DescriptorArray* src,
+                       int src_index,
+                       const WhitenessWitness&);
 
   // Copy the descriptor array, insert a new descriptor and optionally
   // remove map transitions.  If the descriptor is already present, it is
@@ -2325,11 +2389,11 @@
 
   // Sort the instance descriptors by the hash codes of their keys.
   // Does not check for duplicates.
-  void SortUnchecked();
+  void SortUnchecked(const WhitenessWitness&);
 
   // Sort the instance descriptors by the hash codes of their keys.
   // Checks the result for duplicates.
-  void Sort();
+  void Sort(const WhitenessWitness&);
 
   // Search the instance descriptors for given name.
   inline int Search(String* name);
@@ -2422,10 +2486,12 @@
         NULL_DESCRIPTOR;
   }
   // Swap operation on FixedArray without using write barriers.
-  static inline void fast_swap(FixedArray* array, int first, int second);
+  static inline void NoWriteBarrierSwap(FixedArray* array,
+                                        int first,
+                                        int second);
 
   // Swap descriptor first and second.
-  inline void Swap(int first, int second);
+  inline void NoWriteBarrierSwapDescriptors(int first, int second);
 
   FixedArray* GetContentArray() {
     return FixedArray::cast(get(kContentArrayIndex));
@@ -2567,12 +2633,12 @@
 
   // Update the number of elements in the hash table.
   void SetNumberOfElements(int nof) {
-    fast_set(this, kNumberOfElementsIndex, Smi::FromInt(nof));
+    set(kNumberOfElementsIndex, Smi::FromInt(nof));
   }
 
   // Update the number of deleted elements in the hash table.
   void SetNumberOfDeletedElements(int nod) {
-    fast_set(this, kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
+    set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
   }
 
   // Sets the capacity of the hash table.
@@ -2582,7 +2648,7 @@
     // and non-zero.
     ASSERT(capacity > 0);
     ASSERT(capacity <= kMaxCapacity);
-    fast_set(this, kCapacityIndex, Smi::FromInt(capacity));
+    set(kCapacityIndex, Smi::FromInt(capacity));
   }
 
 
@@ -2790,7 +2856,7 @@
 
   // Accessors for next enumeration index.
   void SetNextEnumerationIndex(int index) {
-    this->fast_set(this, kNextEnumerationIndexIndex, Smi::FromInt(index));
+    this->set(kNextEnumerationIndexIndex, Smi::FromInt(index));
   }
 
   int NextEnumerationIndex() {
@@ -2931,20 +2997,41 @@
 };
 
 
+template <int entrysize>
 class ObjectHashTableShape {
  public:
-  static inline bool IsMatch(JSReceiver* key, Object* other);
-  static inline uint32_t Hash(JSReceiver* key);
-  static inline uint32_t HashForObject(JSReceiver* key, Object* object);
-  MUST_USE_RESULT static inline MaybeObject* AsObject(JSReceiver* key);
+  static inline bool IsMatch(Object* key, Object* other);
+  static inline uint32_t Hash(Object* key);
+  static inline uint32_t HashForObject(Object* key, Object* object);
+  MUST_USE_RESULT static inline MaybeObject* AsObject(Object* key);
   static const int kPrefixSize = 0;
-  static const int kEntrySize = 2;
+  static const int kEntrySize = entrysize;
 };
 
 
-// ObjectHashTable maps keys that are JavaScript objects to object values by
+// ObjectHashSet holds keys that are arbitrary objects by using the identity
+// hash of the key for hashing purposes.
+class ObjectHashSet: public HashTable<ObjectHashTableShape<1>, Object*> {
+ public:
+  static inline ObjectHashSet* cast(Object* obj) {
+    ASSERT(obj->IsHashTable());
+    return reinterpret_cast<ObjectHashSet*>(obj);
+  }
+
+  // Looks up whether the given key is part of this hash set.
+  bool Contains(Object* key);
+
+  // Adds the given key to this hash set.
+  MUST_USE_RESULT MaybeObject* Add(Object* key);
+
+  // Removes the given key from this hash set.
+  MUST_USE_RESULT MaybeObject* Remove(Object* key);
+};
+
+
+// ObjectHashTable maps keys that are arbitrary objects to object values by
 // using the identity hash of the key for hashing purposes.
-class ObjectHashTable: public HashTable<ObjectHashTableShape, JSReceiver*> {
+class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> {
  public:
   static inline ObjectHashTable* cast(Object* obj) {
     ASSERT(obj->IsHashTable());
@@ -2953,16 +3040,16 @@
 
   // Looks up the value associated with the given key. The undefined value is
   // returned in case the key is not present.
-  Object* Lookup(JSReceiver* key);
+  Object* Lookup(Object* key);
 
   // Adds (or overwrites) the value associated with the given key. Mapping a
   // key to the undefined value causes removal of the whole entry.
-  MUST_USE_RESULT MaybeObject* Put(JSReceiver* key, Object* value);
+  MUST_USE_RESULT MaybeObject* Put(Object* key, Object* value);
 
  private:
   friend class MarkCompactCollector;
 
-  void AddEntry(int entry, JSReceiver* key, Object* value);
+  void AddEntry(int entry, Object* key, Object* value);
   void RemoveEntry(int entry, Heap* heap);
   inline void RemoveEntry(int entry);
 
@@ -3020,6 +3107,9 @@
     return reinterpret_cast<SerializedScopeInfo*>(object);
   }
 
+  // Return the type of this scope.
+  ScopeType Type();
+
   // Does this scope call eval?
   bool CallsEval();
 
@@ -3035,6 +3125,9 @@
   // Return if this has context slots besides MIN_CONTEXT_SLOTS;
   bool HasHeapAllocatedLocals();
 
+  // Return if contexts are allocated for this scope.
+  bool HasContext();
+
   // Lookup support for serialized scope info. Returns the
   // the stack slot index for a given slot name if the slot is
   // present; otherwise returns a value < 0. The name must be a symbol
@@ -3057,7 +3150,7 @@
   // function context slot index if the function name is present (named
   // function expressions, only), otherwise returns a value < 0. The name
   // must be a symbol (canonicalized).
-  int FunctionContextSlotIndex(String* name);
+  int FunctionContextSlotIndex(String* name, VariableMode* mode);
 
   static Handle<SerializedScopeInfo> Create(Scope* scope);
 
@@ -3746,6 +3839,11 @@
   inline bool has_debug_break_slots();
   inline void set_has_debug_break_slots(bool value);
 
+  // [compiled_with_optimizing]: For FUNCTION kind, tells if it has
+  // been compiled with IsOptimizing set to true.
+  inline bool is_compiled_optimizable();
+  inline void set_compiled_optimizable(bool value);
+
   // [allow_osr_at_loop_nesting_level]: For FUNCTION kind, tells for
   // how long the function has been marked for OSR and therefore which
   // level of loop nesting we are willing to do on-stack replacement
@@ -3941,6 +4039,7 @@
   class FullCodeFlagsHasDeoptimizationSupportField:
       public BitField<bool, 0, 1> {};  // NOLINT
   class FullCodeFlagsHasDebugBreakSlotsField: public BitField<bool, 1, 1> {};
+  class FullCodeFlagsIsCompiledOptimizable: public BitField<bool, 2, 1> {};
 
   static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
 
@@ -4122,6 +4221,9 @@
     return elements_kind() == DICTIONARY_ELEMENTS;
   }
 
+  static bool IsValidElementsTransition(ElementsKind from_kind,
+                                        ElementsKind to_kind);
+
   // Tells whether the map is attached to SharedFunctionInfo
   // (for inobject slack tracking).
   inline void set_attached_to_shared_function_info(bool value);
@@ -4230,6 +4332,9 @@
   inline void ClearCodeCache(Heap* heap);
 
   // Update code cache.
+  static void UpdateCodeCache(Handle<Map> map,
+                              Handle<String> name,
+                              Handle<Code> code);
   MUST_USE_RESULT MaybeObject* UpdateCodeCache(String* name, Code* code);
 
   // Returns the found code or undefined if absent.
@@ -4288,6 +4393,13 @@
   MaybeObject* AddElementsTransition(ElementsKind elements_kind,
                                      Map* transitioned_map);
 
+  // Returns the transitioned map for this map with the most generic
+  // elements_kind that's found in |candidates|, or null handle if no match is
+  // found at all.
+  Handle<Map> FindTransitionedMap(MapHandleList* candidates);
+  Map* FindTransitionedMap(MapList* candidates);
+
+
   // Dispatched behavior.
 #ifdef OBJECT_PRINT
   inline void MapPrint() {
@@ -4796,7 +4908,11 @@
   DECL_BOOLEAN_ACCESSORS(optimization_disabled)
 
   // Indicates whether the function is a strict mode function.
-  DECL_BOOLEAN_ACCESSORS(strict_mode)
+  inline bool strict_mode();
+
+  // Indicates the mode of the function.
+  inline StrictModeFlag strict_mode_flag();
+  inline void set_strict_mode_flag(StrictModeFlag strict_mode_flag);
 
   // False if the function definitely does not allocate an arguments object.
   DECL_BOOLEAN_ACCESSORS(uses_arguments)
@@ -4888,6 +5004,13 @@
   void SharedFunctionInfoVerify();
 #endif
 
+  // Helpers to compile the shared code.  Returns true on success, false on
+  // failure (e.g., stack overflow during compilation).
+  static bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
+                             ClearExceptionFlag flag);
+  static bool CompileLazy(Handle<SharedFunctionInfo> shared,
+                          ClearExceptionFlag flag);
+
   // Casting.
   static inline SharedFunctionInfo* cast(Object* obj);
 
@@ -5038,7 +5161,7 @@
  public:
   // Constants for optimizing codegen for strict mode function and
   // native tests.
-  // Allows to use byte-widgh instructions.
+  // Allows to use byte-width instructions.
   static const int kStrictModeBitWithinByte =
       (kStrictModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
 
@@ -5109,6 +5232,14 @@
   // recompiled the next time it is executed.
   void MarkForLazyRecompilation();
 
+  // Helpers to compile this function.  Returns true on success, false on
+  // failure (e.g., stack overflow during compilation).
+  static bool CompileLazy(Handle<JSFunction> function,
+                          ClearExceptionFlag flag);
+  static bool CompileOptimized(Handle<JSFunction> function,
+                               int osr_ast_id,
+                               ClearExceptionFlag flag);
+
   // Tells whether or not the function is already marked for lazy
   // recompilation.
   inline bool IsMarkedForLazyRecompilation();
@@ -5116,7 +5247,8 @@
   // Check whether or not this function is inlineable.
   bool IsInlineable();
 
-  // [literals]: Fixed array holding the materialized literals.
+  // [literals_or_bindings]: Fixed array holding either
+  // the materialized literals or the bindings of a bound function.
   //
   // If the function contains object, regexp or array literals, the
   // literals array prefix contains the object, regexp, and array
@@ -5125,7 +5257,17 @@
   // or array functions.  Performing a dynamic lookup, we might end up
   // using the functions from a new context that we should not have
   // access to.
-  DECL_ACCESSORS(literals, FixedArray)
+  //
+  // On bound functions, the array is a (copy-on-write) fixed-array containing
+  // the function that was bound, bound this-value and any bound
+  // arguments. Bound functions never contain literals.
+  DECL_ACCESSORS(literals_or_bindings, FixedArray)
+
+  inline FixedArray* literals();
+  inline void set_literals(FixedArray* literals);
+
+  inline FixedArray* function_bindings();
+  inline void set_function_bindings(FixedArray* bindings);
 
   // The initial map for an object created by this constructor.
   inline Map* initial_map();
@@ -5213,6 +5355,11 @@
   static const int kLiteralsPrefixSize = 1;
   static const int kLiteralGlobalContextIndex = 0;
 
+  // Layout of the bound-function binding array.
+  static const int kBoundFunctionIndex = 0;
+  static const int kBoundThisIndex = 1;
+  static const int kBoundArgumentsStartIndex = 2;
+
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSFunction);
 };
@@ -5285,6 +5432,11 @@
   }
 
   // Ensure that the global object has a cell for the given property name.
+  static Handle<JSGlobalPropertyCell> EnsurePropertyCell(
+      Handle<GlobalObject> global,
+      Handle<String> name);
+  // TODO(kmillikin): This function can be eliminated once the stub cache is
+  // full handlified (and the static helper can be written directly).
   MUST_USE_RESULT MaybeObject* EnsurePropertyCell(String* name);
 
   // Casting.
@@ -5757,10 +5909,17 @@
  public:
   DECL_ACCESSORS(cache, Object)
 
-  MUST_USE_RESULT MaybeObject* Update(MapList* maps,
+  static void Update(Handle<PolymorphicCodeCache> cache,
+                     MapHandleList* maps,
+                     Code::Flags flags,
+                     Handle<Code> code);
+
+  MUST_USE_RESULT MaybeObject* Update(MapHandleList* maps,
                                       Code::Flags flags,
                                       Code* code);
-  Object* Lookup(MapList* maps, Code::Flags flags);
+
+  // Returns an undefined value if the entry is not found.
+  Handle<Object> Lookup(MapHandleList* maps, Code::Flags flags);
 
   static inline PolymorphicCodeCache* cast(Object* obj);
 
@@ -5785,8 +5944,11 @@
 class PolymorphicCodeCacheHashTable
     : public HashTable<CodeCacheHashTableShape, HashTableKey*> {
  public:
-  Object* Lookup(MapList* maps, int code_kind);
-  MUST_USE_RESULT MaybeObject* Put(MapList* maps, int code_kind, Code* code);
+  Object* Lookup(MapHandleList* maps, int code_kind);
+
+  MUST_USE_RESULT MaybeObject* Put(MapHandleList* maps,
+                                   int code_kind,
+                                   Code* code);
 
   static inline PolymorphicCodeCacheHashTable* cast(Object* obj);
 
@@ -6057,7 +6219,8 @@
       RobustnessFlag robustness_flag = FAST_STRING_TRAVERSAL,
       int* length_output = 0);
 
-  int Utf8Length();
+  inline int Utf8Length() { return Utf8Length(this, 0, length()); }
+  static int Utf8Length(String* input, int from, int to);
 
   // Return a 16 bit Unicode representation of the string.
   // The string should be nearly flat, otherwise the performance of
@@ -6917,6 +7080,60 @@
 };
 
 
+// The JSSet describes EcmaScript Harmony maps
+class JSSet: public JSObject {
+ public:
+  // [set]: the backing hash set containing keys.
+  DECL_ACCESSORS(table, Object)
+
+  // Casting.
+  static inline JSSet* cast(Object* obj);
+
+#ifdef OBJECT_PRINT
+  inline void JSSetPrint() {
+    JSSetPrint(stdout);
+  }
+  void JSSetPrint(FILE* out);
+#endif
+#ifdef DEBUG
+  void JSSetVerify();
+#endif
+
+  static const int kTableOffset = JSObject::kHeaderSize;
+  static const int kSize = kTableOffset + kPointerSize;
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(JSSet);
+};
+
+
+// The JSMap describes EcmaScript Harmony maps
+class JSMap: public JSObject {
+ public:
+  // [table]: the backing hash table mapping keys to values.
+  DECL_ACCESSORS(table, Object)
+
+  // Casting.
+  static inline JSMap* cast(Object* obj);
+
+#ifdef OBJECT_PRINT
+  inline void JSMapPrint() {
+    JSMapPrint(stdout);
+  }
+  void JSMapPrint(FILE* out);
+#endif
+#ifdef DEBUG
+  void JSMapVerify();
+#endif
+
+  static const int kTableOffset = JSObject::kHeaderSize;
+  static const int kSize = kTableOffset + kPointerSize;
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(JSMap);
+};
+
+
 // The JSWeakMap describes EcmaScript Harmony weak maps
 class JSWeakMap: public JSObject {
  public:
diff --git a/src/parser.cc b/src/parser.cc
index 3bf2cf9..3c6c4ba 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -407,9 +407,9 @@
 }
 
 
-Scope* Parser::NewScope(Scope* parent, Scope::Type type, bool inside_with) {
+Scope* Parser::NewScope(Scope* parent, ScopeType type) {
   Scope* result = new(zone()) Scope(parent, type);
-  result->Initialize(inside_with);
+  result->Initialize();
   return result;
 }
 
@@ -459,13 +459,31 @@
 
 
 // ----------------------------------------------------------------------------
-// LexicalScope is a support class to facilitate manipulation of the
-// Parser's scope stack. The constructor sets the parser's top scope
-// to the incoming scope, and the destructor resets it.
-//
-// Additionally, it stores transient information used during parsing.
-// These scopes are not kept around after parsing or referenced by syntax
-// trees so they can be stack-allocated and hence used by the pre-parser.
+// LexicalScope and SaveScope are stack allocated support classes to facilitate
+// anipulation of the Parser's scope stack. The constructor sets the parser's
+// top scope to the incoming scope, and the destructor resets it. Additionally,
+// LexicalScope stores transient information used during parsing.
+
+
+class SaveScope BASE_EMBEDDED {
+ public:
+  SaveScope(Parser* parser, Scope* scope)
+      : parser_(parser),
+        previous_top_scope_(parser->top_scope_) {
+    parser->top_scope_ = scope;
+  }
+
+  ~SaveScope() {
+    parser_->top_scope_ = previous_top_scope_;
+  }
+
+ private:
+  // Bookkeeping
+  Parser* parser_;
+  // Previous values
+  Scope* previous_top_scope_;
+};
+
 
 class LexicalScope BASE_EMBEDDED {
  public:
@@ -516,7 +534,6 @@
   // Previous values
   LexicalScope* lexical_scope_parent_;
   Scope* previous_scope_;
-  int previous_with_nesting_level_;
   unsigned previous_ast_node_id_;
 };
 
@@ -529,11 +546,9 @@
     parser_(parser),
     lexical_scope_parent_(parser->lexical_scope_),
     previous_scope_(parser->top_scope_),
-    previous_with_nesting_level_(parser->with_nesting_level_),
     previous_ast_node_id_(isolate->ast_node_id()) {
   parser->top_scope_ = scope;
   parser->lexical_scope_ = this;
-  parser->with_nesting_level_ = 0;
   isolate->set_ast_node_id(AstNode::kDeclarationsId + 1);
 }
 
@@ -541,7 +556,6 @@
 LexicalScope::~LexicalScope() {
   parser_->top_scope_ = previous_scope_;
   parser_->lexical_scope_ = lexical_scope_parent_;
-  parser_->with_nesting_level_ = previous_with_nesting_level_;
   parser_->isolate()->set_ast_node_id(previous_ast_node_id_);
 }
 
@@ -578,7 +592,6 @@
       script_(script),
       scanner_(isolate_->unicode_cache()),
       top_scope_(NULL),
-      with_nesting_level_(0),
       lexical_scope_(NULL),
       target_stack_(NULL),
       allow_natives_syntax_(allow_natives_syntax),
@@ -623,6 +636,7 @@
                                         bool in_global_context,
                                         StrictModeFlag strict_mode,
                                         ZoneScope* zone_scope) {
+  ASSERT(top_scope_ == NULL);
   ASSERT(target_stack_ == NULL);
   if (pre_data_ != NULL) pre_data_->Initialize();
 
@@ -630,18 +644,16 @@
   mode_ = FLAG_lazy ? PARSE_LAZILY : PARSE_EAGERLY;
   if (allow_natives_syntax_ || extension_ != NULL) mode_ = PARSE_EAGERLY;
 
-  Scope::Type type =
-    in_global_context
-      ? Scope::GLOBAL_SCOPE
-      : Scope::EVAL_SCOPE;
+  ScopeType type = in_global_context ? GLOBAL_SCOPE : EVAL_SCOPE;
   Handle<String> no_name = isolate()->factory()->empty_symbol();
 
   FunctionLiteral* result = NULL;
-  { Scope* scope = NewScope(top_scope_, type, inside_with());
+  { Scope* scope = NewScope(top_scope_, type);
+    scope->set_start_position(0);
+    scope->set_end_position(source->length());
     LexicalScope lexical_scope(this, scope, isolate());
-    if (strict_mode == kStrictMode) {
-      top_scope_->EnableStrictMode();
-    }
+    ASSERT(top_scope_->strict_mode_flag() == kNonStrictMode);
+    top_scope_->SetStrictModeFlag(strict_mode);
     ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16);
     bool ok = true;
     int beg_loc = scanner().location().beg_pos;
@@ -665,8 +677,6 @@
           lexical_scope.only_simple_this_property_assignments(),
           lexical_scope.this_property_assignments(),
           0,
-          0,
-          source->length(),
           FunctionLiteral::ANONYMOUS_EXPRESSION,
           false);  // Does not have duplicate parameters.
     } else if (stack_overflow_) {
@@ -714,6 +724,7 @@
                                    ZoneScope* zone_scope) {
   Handle<SharedFunctionInfo> shared_info = info->shared_info();
   scanner_.Initialize(source);
+  ASSERT(top_scope_ == NULL);
   ASSERT(target_stack_ == NULL);
 
   Handle<String> name(String::cast(shared_info->name()));
@@ -727,16 +738,15 @@
 
   {
     // Parse the function literal.
-    Scope* scope = NewScope(top_scope_, Scope::GLOBAL_SCOPE, inside_with());
+    Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE);
     if (!info->closure().is_null()) {
       scope = Scope::DeserializeScopeChain(info, scope);
     }
     LexicalScope lexical_scope(this, scope, isolate());
-
-    if (shared_info->strict_mode()) {
-      top_scope_->EnableStrictMode();
-    }
-
+    ASSERT(scope->strict_mode_flag() == kNonStrictMode ||
+           scope->strict_mode_flag() == info->strict_mode_flag());
+    ASSERT(info->strict_mode_flag() == shared_info->strict_mode_flag());
+    scope->SetStrictModeFlag(shared_info->strict_mode_flag());
     FunctionLiteral::Type type = shared_info->is_expression()
         ? (shared_info->is_anonymous()
               ? FunctionLiteral::ANONYMOUS_EXPRESSION
@@ -1128,14 +1138,14 @@
   // In harmony mode we allow additionally the following productions
   // SourceElement:
   //    LetDeclaration
+  //    ConstDeclaration
 
   if (peek() == Token::FUNCTION) {
     return ParseFunctionDeclaration(ok);
-  } else if (peek() == Token::LET) {
+  } else if (peek() == Token::LET || peek() == Token::CONST) {
     return ParseVariableStatement(kSourceElement, ok);
-  } else {
-    return ParseStatement(labels, ok);
   }
+  return ParseStatement(labels, ok);
 }
 
 
@@ -1183,7 +1193,7 @@
             directive->Equals(isolate()->heap()->use_strict()) &&
             token_loc.end_pos - token_loc.beg_pos ==
               isolate()->heap()->use_strict()->length() + 2) {
-          top_scope_->EnableStrictMode();
+          top_scope_->SetStrictModeFlag(kStrictMode);
           // "use strict" is the only directive for now.
           directive_prologue = false;
         }
@@ -1321,7 +1331,7 @@
       //    FunctionDeclaration
       // Common language extension is to allow function declaration in place
       // of any statement. This language extension is disabled in strict mode.
-      if (top_scope_->is_strict_mode()) {
+      if (top_scope_->is_strict_mode() || harmony_scoping_) {
         ReportMessageAt(scanner().peek_location(), "strict_function",
                         Vector<const char*>::empty());
         *ok = false;
@@ -1353,6 +1363,10 @@
   // If we are inside a function, a declaration of a var/const variable is a
   // truly local variable, and the scope of the variable is always the function
   // scope.
+  // Let/const variables in harmony mode are always added to the immediately
+  // enclosing scope.
+  Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY)
+      ? top_scope_ : top_scope_->DeclarationScope();
 
   // If a function scope exists, then we can statically declare this
   // variable and also set its mode. In any case, a Declaration node
@@ -1362,9 +1376,8 @@
   // to the calling function context.
   // Similarly, strict mode eval scope does not leak variable declarations to
   // the caller's scope so we declare all locals, too.
-
-  Scope* declaration_scope = mode == LET ? top_scope_
-      : top_scope_->DeclarationScope();
+  // Also for block scoped let/const bindings the variable can be
+  // statically declared.
   if (declaration_scope->is_function_scope() ||
       declaration_scope->is_strict_mode_eval_scope() ||
       declaration_scope->is_block_scope()) {
@@ -1389,6 +1402,7 @@
         // We only have vars, consts and lets in declarations.
         ASSERT(var->mode() == VAR ||
                var->mode() == CONST ||
+               var->mode() == CONST_HARMONY ||
                var->mode() == LET);
         if (harmony_scoping_) {
           // In harmony mode we treat re-declarations as early errors. See
@@ -1400,8 +1414,8 @@
           *ok = false;
           return NULL;
         }
-        const char* type = (var->mode() == VAR) ? "var" :
-                           (var->mode() == CONST) ? "const" : "let";
+        const char* type = (var->mode() == VAR)
+            ? "var" : var->is_const_mode() ? "const" : "let";
         Handle<String> type_string =
             isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED);
         Expression* expression =
@@ -1429,12 +1443,13 @@
   // a performance issue since it may lead to repeated
   // Runtime::DeclareContextSlot() calls.
   VariableProxy* proxy = declaration_scope->NewUnresolved(
-      name, false, scanner().location().beg_pos);
+      name, scanner().location().beg_pos);
   declaration_scope->AddDeclaration(
       new(zone()) Declaration(proxy, mode, fun, top_scope_));
 
   // For global const variables we bind the proxy to a variable.
-  if (mode == CONST && declaration_scope->is_global_scope()) {
+  if ((mode == CONST || mode == CONST_HARMONY) &&
+      declaration_scope->is_global_scope()) {
     ASSERT(resolve);  // should be set by all callers
     Variable::Kind kind = Variable::NORMAL;
     var = new(zone()) Variable(declaration_scope, name, CONST, true, kind);
@@ -1582,20 +1597,14 @@
 
   // Construct block expecting 16 statements.
   Block* body = new(zone()) Block(isolate(), labels, 16, false);
-  Scope* saved_scope = top_scope_;
-  Scope* block_scope = NewScope(top_scope_,
-                                Scope::BLOCK_SCOPE,
-                                inside_with());
-  if (top_scope_->is_strict_mode()) {
-    block_scope->EnableStrictMode();
-  }
-  top_scope_ = block_scope;
+  Scope* block_scope = NewScope(top_scope_, BLOCK_SCOPE);
 
   // Parse the statements and collect escaping labels.
-  TargetCollector collector;
-  Target target(&this->target_stack_, &collector);
   Expect(Token::LBRACE, CHECK_OK);
-  {
+  block_scope->set_start_position(scanner().location().beg_pos);
+  { SaveScope save_scope(this, block_scope);
+    TargetCollector collector;
+    Target target(&this->target_stack_, &collector);
     Target target_body(&this->target_stack_, body);
     InitializationBlockFinder block_finder(top_scope_, target_stack_);
 
@@ -1608,8 +1617,7 @@
     }
   }
   Expect(Token::RBRACE, CHECK_OK);
-  top_scope_ = saved_scope;
-
+  block_scope->set_end_position(scanner().location().end_pos);
   block_scope = block_scope->FinalizeBlockScope();
   body->set_block_scope(block_scope);
   return body;
@@ -1623,6 +1631,7 @@
 
   Handle<String> ignore;
   Block* result = ParseVariableDeclarations(var_context,
+                                            NULL,
                                             &ignore,
                                             CHECK_OK);
   ExpectSemicolon(CHECK_OK);
@@ -1641,12 +1650,24 @@
 // *var is untouched; in particular, it is the caller's responsibility
 // to initialize it properly. This mechanism is used for the parsing
 // of 'for-in' loops.
-Block* Parser::ParseVariableDeclarations(VariableDeclarationContext var_context,
-                                         Handle<String>* out,
-                                         bool* ok) {
+Block* Parser::ParseVariableDeclarations(
+    VariableDeclarationContext var_context,
+    VariableDeclarationProperties* decl_props,
+    Handle<String>* out,
+    bool* ok) {
   // VariableDeclarations ::
-  //   ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[',']
-
+  //   ('var' | 'const' | 'let') (Identifier ('=' AssignmentExpression)?)+[',']
+  //
+  // The ES6 Draft Rev3 specifies the following grammar for const declarations
+  //
+  // ConstDeclaration ::
+  //   const ConstBinding (',' ConstBinding)* ';'
+  // ConstBinding ::
+  //   Identifier '=' AssignmentExpression
+  //
+  // TODO(ES6):
+  // ConstBinding ::
+  //   BindingPattern '=' AssignmentExpression
   VariableMode mode = VAR;
   // True if the binding needs initialization. 'let' and 'const' declared
   // bindings are created uninitialized by their declaration nodes and
@@ -1659,19 +1680,32 @@
     Consume(Token::VAR);
   } else if (peek() == Token::CONST) {
     Consume(Token::CONST);
-    if (top_scope_->is_strict_mode()) {
+    if (harmony_scoping_) {
+      if (var_context != kSourceElement &&
+          var_context != kForStatement) {
+        // In harmony mode 'const' declarations are only allowed in source
+        // element positions.
+        ReportMessage("unprotected_const", Vector<const char*>::empty());
+        *ok = false;
+        return NULL;
+      }
+      mode = CONST_HARMONY;
+      init_op = Token::INIT_CONST_HARMONY;
+    } else if (top_scope_->is_strict_mode()) {
       ReportMessage("strict_const", Vector<const char*>::empty());
       *ok = false;
       return NULL;
+    } else {
+      mode = CONST;
+      init_op = Token::INIT_CONST;
     }
-    mode = CONST;
     is_const = true;
     needs_init = true;
-    init_op = Token::INIT_CONST;
   } else if (peek() == Token::LET) {
     Consume(Token::LET);
     if (var_context != kSourceElement &&
         var_context != kForStatement) {
+      // Let declarations are only allowed in source element positions.
       ASSERT(var_context == kStatement);
       ReportMessage("unprotected_let", Vector<const char*>::empty());
       *ok = false;
@@ -1684,7 +1718,7 @@
     UNREACHABLE();  // by current callers
   }
 
-  Scope* declaration_scope = (mode == LET)
+  Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY)
       ? top_scope_ : top_scope_->DeclarationScope();
   // The scope of a var/const declared variable anywhere inside a function
   // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can
@@ -1729,8 +1763,10 @@
     // If we have a const declaration, in an inner scope, the proxy is always
     // bound to the declared variable (independent of possibly surrounding with
     // statements).
-    Declare(name, mode, NULL, is_const /* always bound for CONST! */,
-            CHECK_OK);
+    // For let/const declarations in harmony mode, we can also immediately
+    // pre-resolve the proxy because it resides in the same scope as the
+    // declaration.
+    Declare(name, mode, NULL, mode != VAR, CHECK_OK);
     nvars++;
     if (declaration_scope->num_var_or_const() > kMaxNumFunctionLocals) {
       ReportMessageAt(scanner().location(), "too_many_variables",
@@ -1769,7 +1805,8 @@
     Scope* initialization_scope = is_const ? declaration_scope : top_scope_;
     Expression* value = NULL;
     int position = -1;
-    if (peek() == Token::ASSIGN) {
+    // Harmony consts have non-optional initializers.
+    if (peek() == Token::ASSIGN || mode == CONST_HARMONY) {
       Expect(Token::ASSIGN, CHECK_OK);
       position = scanner().location().beg_pos;
       value = ParseAssignmentExpression(var_context != kForStatement, CHECK_OK);
@@ -1781,6 +1818,7 @@
       } else {
         fni_->RemoveLastFunction();
       }
+      if (decl_props != NULL) *decl_props = kHasInitializers;
     }
 
     // Make sure that 'const x' and 'let x' initialize 'x' to undefined.
@@ -1807,7 +1845,6 @@
     // declaration statement has been executed. This is important in
     // browsers where the global object (window) has lots of
     // properties defined in prototype objects.
-
     if (initialization_scope->is_global_scope()) {
       // Compute the arguments for the runtime call.
       ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3);
@@ -1832,9 +1869,7 @@
       } else {
         // Add strict mode.
         // We may want to pass singleton to avoid Literal allocations.
-        StrictModeFlag flag = initialization_scope->is_strict_mode()
-            ? kStrictMode
-            : kNonStrictMode;
+        StrictModeFlag flag = initialization_scope->strict_mode_flag();
         arguments->Add(NewNumberLiteral(flag));
 
         // Be careful not to assign a value to the global variable if
@@ -1871,18 +1906,14 @@
     // dynamically looked-up variables and constants (the start context
     // for constant lookups is always the function context, while it is
     // the top context for var declared variables). Sigh...
-    // For 'let' declared variables the initialization is in the same scope
-    // as the declaration. Thus dynamic lookups are unnecessary even if the
-    // block scope is inside a with.
+    // For 'let' and 'const' declared variables in harmony mode the
+    // initialization is in the same scope as the declaration. Thus dynamic
+    // lookups are unnecessary even if the block scope is inside a with.
     if (value != NULL) {
-      bool in_with = (mode == VAR) ? inside_with() : false;
-      VariableProxy* proxy =
-          initialization_scope->NewUnresolved(name, in_with);
+      VariableProxy* proxy = initialization_scope->NewUnresolved(name);
       Assignment* assignment =
           new(zone()) Assignment(isolate(), init_op, proxy, value, position);
-      if (block) {
-        block->AddStatement(new(zone()) ExpressionStatement(assignment));
-      }
+      block->AddStatement(new(zone()) ExpressionStatement(assignment));
     }
 
     if (fni_ != NULL) fni_->Leave();
@@ -2105,10 +2136,14 @@
   Expression* expr = ParseExpression(true, CHECK_OK);
   Expect(Token::RPAREN, CHECK_OK);
 
-  ++with_nesting_level_;
   top_scope_->DeclarationScope()->RecordWithStatement();
-  Statement* stmt = ParseStatement(labels, CHECK_OK);
-  --with_nesting_level_;
+  Scope* with_scope = NewScope(top_scope_, WITH_SCOPE);
+  Statement* stmt;
+  { SaveScope save_scope(this, with_scope);
+    with_scope->set_start_position(scanner().peek_location().beg_pos);
+    stmt = ParseStatement(labels, CHECK_OK);
+    with_scope->set_end_position(scanner().location().end_pos);
+  }
   return new(zone()) WithStatement(expr, stmt);
 }
 
@@ -2233,6 +2268,8 @@
     Consume(Token::CATCH);
 
     Expect(Token::LPAREN, CHECK_OK);
+    catch_scope = NewScope(top_scope_, CATCH_SCOPE);
+    catch_scope->set_start_position(scanner().location().beg_pos);
     name = ParseIdentifier(CHECK_OK);
 
     if (top_scope_->is_strict_mode() && IsEvalOrArguments(name)) {
@@ -2245,21 +2282,15 @@
 
     if (peek() == Token::LBRACE) {
       Target target(&this->target_stack_, &catch_collector);
-      catch_scope = NewScope(top_scope_, Scope::CATCH_SCOPE, inside_with());
-      if (top_scope_->is_strict_mode()) {
-        catch_scope->EnableStrictMode();
-      }
       VariableMode mode = harmony_scoping_ ? LET : VAR;
       catch_variable = catch_scope->DeclareLocal(name, mode);
 
-      Scope* saved_scope = top_scope_;
-      top_scope_ = catch_scope;
+      SaveScope save_scope(this, catch_scope);
       catch_block = ParseBlock(NULL, CHECK_OK);
-      top_scope_ = saved_scope;
     } else {
       Expect(Token::LBRACE, CHECK_OK);
     }
-
+    catch_scope->set_end_position(scanner().location().end_pos);
     tok = peek();
   }
 
@@ -2365,16 +2396,22 @@
 
   Statement* init = NULL;
 
+  // Create an in-between scope for let-bound iteration variables.
+  Scope* saved_scope = top_scope_;
+  Scope* for_scope = NewScope(top_scope_, BLOCK_SCOPE);
+  top_scope_ = for_scope;
+
   Expect(Token::FOR, CHECK_OK);
   Expect(Token::LPAREN, CHECK_OK);
+  for_scope->set_start_position(scanner().location().beg_pos);
   if (peek() != Token::SEMICOLON) {
     if (peek() == Token::VAR || peek() == Token::CONST) {
       Handle<String> name;
       Block* variable_statement =
-          ParseVariableDeclarations(kForStatement, &name, CHECK_OK);
+          ParseVariableDeclarations(kForStatement, NULL, &name, CHECK_OK);
 
       if (peek() == Token::IN && !name.is_null()) {
-        VariableProxy* each = top_scope_->NewUnresolved(name, inside_with());
+        VariableProxy* each = top_scope_->NewUnresolved(name);
         ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
         Target target(&this->target_stack_, loop);
 
@@ -2387,12 +2424,73 @@
         Block* result = new(zone()) Block(isolate(), NULL, 2, false);
         result->AddStatement(variable_statement);
         result->AddStatement(loop);
+        top_scope_ = saved_scope;
+        for_scope->set_end_position(scanner().location().end_pos);
+        for_scope = for_scope->FinalizeBlockScope();
+        ASSERT(for_scope == NULL);
         // Parsed for-in loop w/ variable/const declaration.
         return result;
       } else {
         init = variable_statement;
       }
+    } else if (peek() == Token::LET) {
+      Handle<String> name;
+      VariableDeclarationProperties decl_props = kHasNoInitializers;
+      Block* variable_statement =
+          ParseVariableDeclarations(kForStatement,
+                                    &decl_props,
+                                    &name,
+                                    CHECK_OK);
+      bool accept_IN = !name.is_null() && decl_props != kHasInitializers;
+      if (peek() == Token::IN && accept_IN) {
+        // Rewrite a for-in statement of the form
+        //
+        //   for (let x in e) b
+        //
+        // into
+        //
+        //   <let x' be a temporary variable>
+        //   for (x' in e) {
+        //     let x;
+        //     x = x';
+        //     b;
+        //   }
 
+        // TODO(keuchel): Move the temporary variable to the block scope, after
+        // implementing stack allocated block scoped variables.
+        Variable* temp = top_scope_->DeclarationScope()->NewTemporary(name);
+        VariableProxy* temp_proxy = new(zone()) VariableProxy(isolate(), temp);
+        VariableProxy* each = top_scope_->NewUnresolved(name, inside_with());
+        ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+        Target target(&this->target_stack_, loop);
+
+        Expect(Token::IN, CHECK_OK);
+        Expression* enumerable = ParseExpression(true, CHECK_OK);
+        Expect(Token::RPAREN, CHECK_OK);
+
+        Statement* body = ParseStatement(NULL, CHECK_OK);
+        Block* body_block = new(zone()) Block(isolate(), NULL, 3, false);
+        Assignment* assignment = new(zone()) Assignment(isolate(),
+                                                        Token::ASSIGN,
+                                                        each,
+                                                        temp_proxy,
+                                                        RelocInfo::kNoPosition);
+        Statement* assignment_statement =
+            new(zone()) ExpressionStatement(assignment);
+        body_block->AddStatement(variable_statement);
+        body_block->AddStatement(assignment_statement);
+        body_block->AddStatement(body);
+        loop->Initialize(temp_proxy, enumerable, body_block);
+        top_scope_ = saved_scope;
+        for_scope->set_end_position(scanner().location().end_pos);
+        for_scope = for_scope->FinalizeBlockScope();
+        body_block->set_block_scope(for_scope);
+        // Parsed for-in loop w/ let declaration.
+        return loop;
+
+      } else {
+        init = variable_statement;
+      }
     } else {
       Expression* expression = ParseExpression(false, CHECK_OK);
       if (peek() == Token::IN) {
@@ -2414,6 +2512,10 @@
 
         Statement* body = ParseStatement(NULL, CHECK_OK);
         if (loop) loop->Initialize(expression, enumerable, body);
+        top_scope_ = saved_scope;
+        for_scope->set_end_position(scanner().location().end_pos);
+        for_scope = for_scope->FinalizeBlockScope();
+        ASSERT(for_scope == NULL);
         // Parsed for-in loop.
         return loop;
 
@@ -2444,8 +2546,31 @@
   Expect(Token::RPAREN, CHECK_OK);
 
   Statement* body = ParseStatement(NULL, CHECK_OK);
-  if (loop) loop->Initialize(init, cond, next, body);
-  return loop;
+  top_scope_ = saved_scope;
+  for_scope->set_end_position(scanner().location().end_pos);
+  for_scope = for_scope->FinalizeBlockScope();
+  if (for_scope != NULL) {
+    // Rewrite a for statement of the form
+    //
+    //   for (let x = i; c; n) b
+    //
+    // into
+    //
+    //   {
+    //     let x = i;
+    //     for (; c; n) b
+    //   }
+    ASSERT(init != NULL);
+    Block* result = new(zone()) Block(isolate(), NULL, 2, false);
+    result->AddStatement(init);
+    result->AddStatement(loop);
+    result->set_block_scope(for_scope);
+    if (loop) loop->Initialize(NULL, cond, next, body);
+    return result;
+  } else {
+    if (loop) loop->Initialize(init, cond, next, body);
+    return loop;
+  }
 }
 
 
@@ -3065,9 +3190,7 @@
     case Token::FUTURE_STRICT_RESERVED_WORD: {
       Handle<String> name = ParseIdentifier(CHECK_OK);
       if (fni_ != NULL) fni_->PushVariableName(name);
-      result = top_scope_->NewUnresolved(name,
-                                         inside_with(),
-                                         scanner().location().beg_pos);
+      result = top_scope_->NewUnresolved(name, scanner().location().beg_pos);
       break;
     }
 
@@ -3184,9 +3307,11 @@
   // Update the scope information before the pre-parsing bailout.
   int literal_index = lexical_scope_->NextMaterializedLiteralIndex();
 
-  // Allocate a fixed array with all the literals.
-  Handle<FixedArray> literals =
+  // Allocate a fixed array to hold all the object literals.
+  Handle<FixedArray> object_literals =
       isolate()->factory()->NewFixedArray(values->length(), TENURED);
+  Handle<FixedDoubleArray> double_literals;
+  ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
 
   // Fill in the literals.
   bool is_simple = true;
@@ -3198,19 +3323,75 @@
     }
     Handle<Object> boilerplate_value = GetBoilerplateValue(values->at(i));
     if (boilerplate_value->IsUndefined()) {
-      literals->set_the_hole(i);
+      object_literals->set_the_hole(i);
+      if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+        double_literals->set_the_hole(i);
+      }
       is_simple = false;
     } else {
-      literals->set(i, *boilerplate_value);
+      // Examine each literal element, and adjust the ElementsKind if the
+      // literal element is not of a type that can be stored in the current
+      // ElementsKind.  Start with FAST_SMI_ONLY_ELEMENTS, and transition to
+      // FAST_DOUBLE_ELEMENTS and FAST_ELEMENTS as necessary.  Always remember
+      // the tagged value, no matter what the ElementsKind is in case we
+      // ultimately end up in FAST_ELEMENTS.
+      object_literals->set(i, *boilerplate_value);
+      if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+        // Smi only elements. Notice if a transition to FAST_DOUBLE_ELEMENTS or
+        // FAST_ELEMENTS is required.
+        if (!boilerplate_value->IsSmi()) {
+          if (boilerplate_value->IsNumber() && FLAG_smi_only_arrays) {
+            // Allocate a double array on the FAST_DOUBLE_ELEMENTS transition to
+            // avoid over-allocating in TENURED space.
+            double_literals = isolate()->factory()->NewFixedDoubleArray(
+                values->length(), TENURED);
+            // Copy the contents of the FAST_SMI_ONLY_ELEMENT array to the
+            // FAST_DOUBLE_ELEMENTS array so that they are in sync.
+            for (int j = 0; j < i; ++j) {
+              Object* smi_value = object_literals->get(j);
+              if (smi_value->IsTheHole()) {
+                double_literals->set_the_hole(j);
+              } else {
+                double_literals->set(j, Smi::cast(smi_value)->value());
+              }
+            }
+            double_literals->set(i, boilerplate_value->Number());
+            elements_kind = FAST_DOUBLE_ELEMENTS;
+          } else {
+            elements_kind = FAST_ELEMENTS;
+          }
+        }
+      } else if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+        // Continue to store double values in to FAST_DOUBLE_ELEMENTS arrays
+        // until the first value is seen that can't be stored as a double.
+        if (boilerplate_value->IsNumber()) {
+          double_literals->set(i, boilerplate_value->Number());
+        } else {
+          elements_kind = FAST_ELEMENTS;
+        }
+      }
     }
   }
 
   // Simple and shallow arrays can be lazily copied, we transform the
   // elements array to a copy-on-write array.
-  if (is_simple && depth == 1 && values->length() > 0) {
-    literals->set_map(isolate()->heap()->fixed_cow_array_map());
+  if (is_simple && depth == 1 && values->length() > 0 &&
+      elements_kind != FAST_DOUBLE_ELEMENTS) {
+    object_literals->set_map(isolate()->heap()->fixed_cow_array_map());
   }
 
+  Handle<FixedArrayBase> element_values = elements_kind == FAST_DOUBLE_ELEMENTS
+      ? Handle<FixedArrayBase>(double_literals)
+      : Handle<FixedArrayBase>(object_literals);
+
+  // Remember both the literal's constant values as well as the ElementsKind
+  // in a 2-element FixedArray.
+  Handle<FixedArray> literals =
+      isolate()->factory()->NewFixedArray(2, TENURED);
+
+  literals->set(0, Smi::FromInt(elements_kind));
+  literals->set(1, *element_values);
+
   return new(zone()) ArrayLiteral(
       isolate(), literals, values, literal_index, is_simple, depth);
 }
@@ -3715,13 +3896,11 @@
   // hoisted. In harmony block scoping mode they are block scoped, so they
   // are not hoisted.
   Scope* scope = (type == FunctionLiteral::DECLARATION && !harmony_scoping_)
-      ? NewScope(top_scope_->DeclarationScope(), Scope::FUNCTION_SCOPE, false)
-      : NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with());
+      ? NewScope(top_scope_->DeclarationScope(), FUNCTION_SCOPE)
+      : NewScope(top_scope_, FUNCTION_SCOPE);
   ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8);
   int materialized_literal_count;
   int expected_property_count;
-  int start_pos;
-  int end_pos;
   bool only_simple_this_property_assignments;
   Handle<FixedArray> this_property_assignments;
   bool has_duplicate_parameters = false;
@@ -3732,7 +3911,7 @@
     //  FormalParameterList ::
     //    '(' (Identifier)*[','] ')'
     Expect(Token::LPAREN, CHECK_OK);
-    start_pos = scanner().location().beg_pos;
+    scope->set_start_position(scanner().location().beg_pos);
     Scanner::Location name_loc = Scanner::Location::invalid();
     Scanner::Location dupe_loc = Scanner::Location::invalid();
     Scanner::Location reserved_loc = Scanner::Location::invalid();
@@ -3778,13 +3957,21 @@
     // future we can change the AST to only refer to VariableProxies
     // instead of Variables and Proxis as is the case now.
     if (type == FunctionLiteral::NAMED_EXPRESSION) {
-      Variable* fvar = top_scope_->DeclareFunctionVar(function_name);
-      VariableProxy* fproxy =
-          top_scope_->NewUnresolved(function_name, inside_with());
+      VariableMode fvar_mode;
+      Token::Value fvar_init_op;
+      if (harmony_scoping_) {
+        fvar_mode = CONST_HARMONY;
+        fvar_init_op = Token::INIT_CONST_HARMONY;
+      } else {
+        fvar_mode = CONST;
+        fvar_init_op = Token::INIT_CONST;
+      }
+      Variable* fvar = top_scope_->DeclareFunctionVar(function_name, fvar_mode);
+      VariableProxy* fproxy = top_scope_->NewUnresolved(function_name);
       fproxy->BindTo(fvar);
       body->Add(new(zone()) ExpressionStatement(
           new(zone()) Assignment(isolate(),
-                                 Token::INIT_CONST,
+                                 fvar_init_op,
                                  fproxy,
                                  new(zone()) ThisFunction(isolate()),
                                  RelocInfo::kNoPosition)));
@@ -3808,18 +3995,18 @@
         // compile after all.
         is_lazily_compiled = false;
       } else {
-        end_pos = entry.end_pos();
-        if (end_pos <= function_block_pos) {
+        scope->set_end_position(entry.end_pos());
+        if (scope->end_position() <= function_block_pos) {
           // End position greater than end of stream is safe, and hard to check.
           ReportInvalidPreparseData(function_name, CHECK_OK);
         }
         isolate()->counters()->total_preparse_skipped()->Increment(
-            end_pos - function_block_pos);
+            scope->end_position() - function_block_pos);
         // Seek to position just before terminal '}'.
-        scanner().SeekForward(end_pos - 1);
+        scanner().SeekForward(scope->end_position() - 1);
         materialized_literal_count = entry.literal_count();
         expected_property_count = entry.property_count();
-        if (entry.strict_mode()) top_scope_->EnableStrictMode();
+        if (entry.strict_mode()) top_scope_->SetStrictModeFlag(kStrictMode);
         only_simple_this_property_assignments = false;
         this_property_assignments = isolate()->factory()->empty_fixed_array();
         Expect(Token::RBRACE, CHECK_OK);
@@ -3836,12 +4023,13 @@
       this_property_assignments = lexical_scope.this_property_assignments();
 
       Expect(Token::RBRACE, CHECK_OK);
-      end_pos = scanner().location().end_pos;
+      scope->set_end_position(scanner().location().end_pos);
     }
 
     // Validate strict mode.
     if (top_scope_->is_strict_mode()) {
       if (IsEvalOrArguments(function_name)) {
+        int start_pos = scope->start_position();
         int position = function_token_position != RelocInfo::kNoPosition
             ? function_token_position
             : (start_pos > 0 ? start_pos - 1 : start_pos);
@@ -3864,6 +4052,7 @@
         return NULL;
       }
       if (name_is_strict_reserved) {
+        int start_pos = scope->start_position();
         int position = function_token_position != RelocInfo::kNoPosition
             ? function_token_position
             : (start_pos > 0 ? start_pos - 1 : start_pos);
@@ -3879,7 +4068,9 @@
         *ok = false;
         return NULL;
       }
-      CheckOctalLiteral(start_pos, end_pos, CHECK_OK);
+      CheckOctalLiteral(scope->start_position(),
+                        scope->end_position(),
+                        CHECK_OK);
     }
   }
 
@@ -3897,8 +4088,6 @@
                                   only_simple_this_property_assignments,
                                   this_property_assignments,
                                   num_parameters,
-                                  start_pos,
-                                  end_pos,
                                   type,
                                   has_duplicate_parameters);
   function_literal->set_function_token_position(function_token_position);
@@ -5119,17 +5308,16 @@
 
 // Create a Scanner for the preparser to use as input, and preparse the source.
 static ScriptDataImpl* DoPreParse(UC16CharacterStream* source,
-                                  bool allow_lazy,
-                                  ParserRecorder* recorder,
-                                  bool harmony_scoping) {
+                                  int flags,
+                                  ParserRecorder* recorder) {
   Isolate* isolate = Isolate::Current();
   JavaScriptScanner scanner(isolate->unicode_cache());
-  scanner.SetHarmonyScoping(harmony_scoping);
+  scanner.SetHarmonyScoping((flags & kHarmonyScoping) != 0);
   scanner.Initialize(source);
   intptr_t stack_limit = isolate->stack_guard()->real_climit();
   if (!preparser::PreParser::PreParseProgram(&scanner,
                                              recorder,
-                                             allow_lazy,
+                                             flags,
                                              stack_limit)) {
     isolate->StackOverflow();
     return NULL;
@@ -5146,25 +5334,28 @@
 // even if the preparser data is only used once.
 ScriptDataImpl* ParserApi::PartialPreParse(UC16CharacterStream* source,
                                            v8::Extension* extension,
-                                           bool harmony_scoping) {
+                                           int flags) {
   bool allow_lazy = FLAG_lazy && (extension == NULL);
   if (!allow_lazy) {
     // Partial preparsing is only about lazily compiled functions.
     // If we don't allow lazy compilation, the log data will be empty.
     return NULL;
   }
+  flags |= kAllowLazy;
   PartialParserRecorder recorder;
-  return DoPreParse(source, allow_lazy, &recorder, harmony_scoping);
+  return DoPreParse(source, flags, &recorder);
 }
 
 
 ScriptDataImpl* ParserApi::PreParse(UC16CharacterStream* source,
                                     v8::Extension* extension,
-                                    bool harmony_scoping) {
+                                    int flags) {
   Handle<Script> no_script;
-  bool allow_lazy = FLAG_lazy && (extension == NULL);
+  if (FLAG_lazy && (extension == NULL)) {
+    flags |= kAllowLazy;
+  }
   CompleteParserRecorder recorder;
-  return DoPreParse(source, allow_lazy, &recorder, harmony_scoping);
+  return DoPreParse(source, flags, &recorder);
 }
 
 
@@ -5227,7 +5418,7 @@
       Handle<String> source = Handle<String>(String::cast(script->source()));
       result = parser.ParseProgram(source,
                                    info->is_global(),
-                                   info->StrictMode());
+                                   info->strict_mode_flag());
     }
   }
   info->SetFunction(result);
diff --git a/src/parser.h b/src/parser.h
index 359bb38..268b094 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -33,6 +33,7 @@
 #include "preparse-data-format.h"
 #include "preparse-data.h"
 #include "scopes.h"
+#include "preparser.h"
 
 namespace v8 {
 namespace internal {
@@ -43,6 +44,7 @@
 class PositionStack;
 class Target;
 class LexicalScope;
+class SaveScope;
 
 template <typename T> class ZoneListWrapper;
 
@@ -164,13 +166,13 @@
   // Generic preparser generating full preparse data.
   static ScriptDataImpl* PreParse(UC16CharacterStream* source,
                                   v8::Extension* extension,
-                                  bool harmony_scoping);
+                                  int flags);
 
   // Preparser that only does preprocessing that makes sense if only used
   // immediately after.
   static ScriptDataImpl* PartialPreParse(UC16CharacterStream* source,
                                          v8::Extension* extension,
-                                         bool harmony_scoping);
+                                         int flags);
 };
 
 // ----------------------------------------------------------------------------
@@ -459,6 +461,12 @@
     kForStatement
   };
 
+  // If a list of variable declarations includes any initializers.
+  enum VariableDeclarationProperties {
+    kHasInitializers,
+    kHasNoInitializers
+  };
+
   Isolate* isolate() { return isolate_; }
   Zone* zone() { return isolate_->zone(); }
 
@@ -473,7 +481,7 @@
   void ReportInvalidPreparseData(Handle<String> name, bool* ok);
   void ReportMessage(const char* message, Vector<const char*> args);
 
-  bool inside_with() const { return with_nesting_level_ > 0; }
+  bool inside_with() const { return top_scope_->inside_with(); }
   JavaScriptScanner& scanner()  { return scanner_; }
   Mode mode() const { return mode_; }
   ScriptDataImpl* pre_data() const { return pre_data_; }
@@ -492,10 +500,10 @@
   Statement* ParseFunctionDeclaration(bool* ok);
   Statement* ParseNativeDeclaration(bool* ok);
   Block* ParseBlock(ZoneStringList* labels, bool* ok);
-  Block* ParseScopedBlock(ZoneStringList* labels, bool* ok);
   Block* ParseVariableStatement(VariableDeclarationContext var_context,
                                 bool* ok);
   Block* ParseVariableDeclarations(VariableDeclarationContext var_context,
+                                   VariableDeclarationProperties* decl_props,
                                    Handle<String>* out,
                                    bool* ok);
   Statement* ParseExpressionOrLabelledStatement(ZoneStringList* labels,
@@ -515,6 +523,9 @@
   TryStatement* ParseTryStatement(bool* ok);
   DebuggerStatement* ParseDebuggerStatement(bool* ok);
 
+  // Support for hamony block scoped bindings.
+  Block* ParseScopedBlock(ZoneStringList* labels, bool* ok);
+
   Expression* ParseExpression(bool accept_IN, bool* ok);
   Expression* ParseAssignmentExpression(bool accept_IN, bool* ok);
   Expression* ParseConditionalExpression(bool accept_IN, bool* ok);
@@ -669,7 +680,7 @@
     return &empty;
   }
 
-  Scope* NewScope(Scope* parent, Scope::Type type, bool inside_with);
+  Scope* NewScope(Scope* parent, ScopeType type);
 
   Handle<String> LookupSymbol(int symbol_id);
 
@@ -714,7 +725,6 @@
   JavaScriptScanner scanner_;
 
   Scope* top_scope_;
-  int with_nesting_level_;
 
   LexicalScope* lexical_scope_;
   Mode mode_;
@@ -734,6 +744,7 @@
   bool harmony_scoping_;
 
   friend class LexicalScope;
+  friend class SaveScope;
 };
 
 
diff --git a/src/preparser-api.cc b/src/preparser-api.cc
index 899489e..25c7a82 100644
--- a/src/preparser-api.cc
+++ b/src/preparser-api.cc
@@ -188,7 +188,7 @@
   preparser::PreParser::PreParseResult result =
       preparser::PreParser::PreParseProgram(&scanner,
                                             &recorder,
-                                            true,
+                                            internal::kAllowLazy,
                                             stack_limit);
   if (result == preparser::PreParser::kPreParseStackOverflow) {
     return PreParserData::StackOverflow();
diff --git a/src/preparser.cc b/src/preparser.cc
index 9f8e1ee..3313658 100644
--- a/src/preparser.cc
+++ b/src/preparser.cc
@@ -125,11 +125,13 @@
   // In harmony mode we allow additionally the following productions
   // SourceElement:
   //    LetDeclaration
+  //    ConstDeclaration
 
   switch (peek()) {
     case i::Token::FUNCTION:
       return ParseFunctionDeclaration(ok);
     case i::Token::LET:
+    case i::Token::CONST:
       return ParseVariableStatement(kSourceElement, ok);
     default:
       return ParseStatement(ok);
@@ -240,7 +242,7 @@
       i::Scanner::Location start_location = scanner_->peek_location();
       Statement statement = ParseFunctionDeclaration(CHECK_OK);
       i::Scanner::Location end_location = scanner_->location();
-      if (strict_mode()) {
+      if (strict_mode() || harmony_scoping_) {
         ReportMessageAt(start_location.beg_pos, end_location.end_pos,
                         "strict_function", NULL);
         *ok = false;
@@ -312,6 +314,7 @@
 
   Statement result = ParseVariableDeclarations(var_context,
                                                NULL,
+                                               NULL,
                                                CHECK_OK);
   ExpectSemicolon(CHECK_OK);
   return result;
@@ -325,15 +328,37 @@
 // of 'for-in' loops.
 PreParser::Statement PreParser::ParseVariableDeclarations(
     VariableDeclarationContext var_context,
+    VariableDeclarationProperties* decl_props,
     int* num_decl,
     bool* ok) {
   // VariableDeclarations ::
   //   ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[',']
-
+  //
+  // The ES6 Draft Rev3 specifies the following grammar for const declarations
+  //
+  // ConstDeclaration ::
+  //   const ConstBinding (',' ConstBinding)* ';'
+  // ConstBinding ::
+  //   Identifier '=' AssignmentExpression
+  //
+  // TODO(ES6):
+  // ConstBinding ::
+  //   BindingPattern '=' AssignmentExpression
+  bool require_initializer = false;
   if (peek() == i::Token::VAR) {
     Consume(i::Token::VAR);
   } else if (peek() == i::Token::CONST) {
-    if (strict_mode()) {
+    if (harmony_scoping_) {
+      if (var_context != kSourceElement &&
+          var_context != kForStatement) {
+        i::Scanner::Location location = scanner_->peek_location();
+        ReportMessageAt(location.beg_pos, location.end_pos,
+                        "unprotected_const", NULL);
+        *ok = false;
+        return Statement::Default();
+      }
+      require_initializer = true;
+    } else if (strict_mode()) {
       i::Scanner::Location location = scanner_->peek_location();
       ReportMessageAt(location, "strict_const", NULL);
       *ok = false;
@@ -372,9 +397,10 @@
       return Statement::Default();
     }
     nvars++;
-    if (peek() == i::Token::ASSIGN) {
+    if (peek() == i::Token::ASSIGN || require_initializer) {
       Expect(i::Token::ASSIGN, CHECK_OK);
       ParseAssignmentExpression(var_context != kForStatement, CHECK_OK);
+      if (decl_props != NULL) *decl_props = kHasInitializers;
     }
   } while (peek() == i::Token::COMMA);
 
@@ -569,9 +595,14 @@
   if (peek() != i::Token::SEMICOLON) {
     if (peek() == i::Token::VAR || peek() == i::Token::CONST ||
         peek() == i::Token::LET) {
+      bool is_let = peek() == i::Token::LET;
       int decl_count;
-      ParseVariableDeclarations(kForStatement, &decl_count, CHECK_OK);
-      if (peek() == i::Token::IN && decl_count == 1) {
+      VariableDeclarationProperties decl_props = kHasNoInitializers;
+      ParseVariableDeclarations(
+          kForStatement, &decl_props, &decl_count, CHECK_OK);
+      bool accept_IN = decl_count == 1 &&
+          !(is_let && decl_props == kHasInitializers);
+      if (peek() == i::Token::IN && accept_IN) {
         Expect(i::Token::IN, CHECK_OK);
         ParseExpression(true, CHECK_OK);
         Expect(i::Token::RPAREN, CHECK_OK);
@@ -1353,8 +1384,11 @@
 PreParser::Expression PreParser::ParseV8Intrinsic(bool* ok) {
   // CallRuntime ::
   //   '%' Identifier Arguments
-
   Expect(i::Token::MOD, CHECK_OK);
+  if (!allow_natives_syntax_) {
+    *ok = false;
+    return Expression::Default();
+  }
   ParseIdentifier(CHECK_OK);
   ParseArguments(ok);
 
diff --git a/src/preparser.h b/src/preparser.h
index cb1d5fb..6a0b97a 100644
--- a/src/preparser.h
+++ b/src/preparser.h
@@ -118,9 +118,12 @@
   // during parsing.
   static PreParseResult PreParseProgram(i::JavaScriptScanner* scanner,
                                         i::ParserRecorder* log,
-                                        bool allow_lazy,
+                                        int flags,
                                         uintptr_t stack_limit) {
-    return PreParser(scanner, log, stack_limit, allow_lazy).PreParse();
+    bool allow_lazy = (flags & i::kAllowLazy) != 0;
+    bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0;
+    return PreParser(scanner, log, stack_limit,
+                     allow_lazy, allow_natives_syntax).PreParse();
   }
 
  private:
@@ -179,6 +182,12 @@
     kForStatement
   };
 
+  // If a list of variable declarations includes any initializers.
+  enum VariableDeclarationProperties {
+    kHasInitializers,
+    kHasNoInitializers
+  };
+
   class Expression;
 
   class Identifier {
@@ -399,6 +408,16 @@
 
   typedef int Arguments;
 
+  // The Strict Mode (ECMA-262 5th edition, 4.2.2).
+  enum StrictModeFlag {
+    kNonStrictMode,
+    kStrictMode,
+    // This value is never used, but is needed to prevent GCC 4.5 from failing
+    // to compile when we assert that a flag is either kNonStrictMode or
+    // kStrictMode.
+    kInvalidStrictFlag
+  };
+
   class Scope {
    public:
     Scope(Scope** variable, ScopeType type)
@@ -408,7 +427,8 @@
           materialized_literal_count_(0),
           expected_properties_(0),
           with_nesting_count_(0),
-          strict_((prev_ != NULL) && prev_->is_strict()) {
+          strict_mode_flag_((prev_ != NULL) ? prev_->strict_mode_flag()
+                            : kNonStrictMode) {
       *variable = this;
     }
     ~Scope() { *variable_ = prev_; }
@@ -418,8 +438,13 @@
     int expected_properties() { return expected_properties_; }
     int materialized_literal_count() { return materialized_literal_count_; }
     bool IsInsideWith() { return with_nesting_count_ != 0; }
-    bool is_strict() { return strict_; }
-    void set_strict() { strict_ = true; }
+    bool is_strict_mode() { return strict_mode_flag_ == kStrictMode; }
+    StrictModeFlag strict_mode_flag() {
+      return strict_mode_flag_;
+    }
+    void set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
+      strict_mode_flag_ = strict_mode_flag;
+    }
     void EnterWith() { with_nesting_count_++; }
     void LeaveWith() { with_nesting_count_--; }
 
@@ -430,14 +455,15 @@
     int materialized_literal_count_;
     int expected_properties_;
     int with_nesting_count_;
-    bool strict_;
+    StrictModeFlag strict_mode_flag_;
   };
 
   // Private constructor only used in PreParseProgram.
   PreParser(i::JavaScriptScanner* scanner,
             i::ParserRecorder* log,
             uintptr_t stack_limit,
-            bool allow_lazy)
+            bool allow_lazy,
+            bool allow_natives_syntax)
       : scanner_(scanner),
         log_(log),
         scope_(NULL),
@@ -445,7 +471,8 @@
         strict_mode_violation_location_(i::Scanner::Location::invalid()),
         strict_mode_violation_type_(NULL),
         stack_overflow_(false),
-        allow_lazy_(true),
+        allow_lazy_(allow_lazy),
+        allow_natives_syntax_(allow_natives_syntax),
         parenthesized_function_(false),
         harmony_scoping_(scanner->HarmonyScoping()) { }
 
@@ -459,7 +486,7 @@
     if (stack_overflow_) return kPreParseStackOverflow;
     if (!ok) {
       ReportUnexpectedToken(scanner_->current_token());
-    } else if (scope_->is_strict()) {
+    } else if (scope_->is_strict_mode()) {
       CheckOctalLiteral(start_position, scanner_->location().end_pos, &ok);
     }
     return kPreParseSuccess;
@@ -493,6 +520,7 @@
   Statement ParseVariableStatement(VariableDeclarationContext var_context,
                                    bool* ok);
   Statement ParseVariableDeclarations(VariableDeclarationContext var_context,
+                                      VariableDeclarationProperties* decl_props,
                                       int* num_decl,
                                       bool* ok);
   Statement ParseExpressionOrLabelledStatement(bool* ok);
@@ -563,10 +591,10 @@
   bool peek_any_identifier();
 
   void set_strict_mode() {
-    scope_->set_strict();
+    scope_->set_strict_mode_flag(kStrictMode);
   }
 
-  bool strict_mode() { return scope_->is_strict(); }
+  bool strict_mode() { return scope_->strict_mode_flag() == kStrictMode; }
 
   void Consume(i::Token::Value token) { Next(); }
 
@@ -607,6 +635,7 @@
   const char* strict_mode_violation_type_;
   bool stack_overflow_;
   bool allow_lazy_;
+  bool allow_natives_syntax_;
   bool parenthesized_function_;
   bool harmony_scoping_;
 };
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index bae35c8..9812c26 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -1930,9 +1930,11 @@
       SetInternalReference(js_fun, entry,
                            "context", js_fun->unchecked_context(),
                            JSFunction::kContextOffset);
-      TagObject(js_fun->literals(), "(function literals)");
+      TagObject(js_fun->literals_or_bindings(),
+                "(function literals_or_bindings)");
       SetInternalReference(js_fun, entry,
-                           "literals", js_fun->literals(),
+                           "literals_or_bindings",
+                           js_fun->literals_or_bindings(),
                            JSFunction::kLiteralsOffset);
     }
     TagObject(js_obj->properties(), "(object properties)");
@@ -1949,6 +1951,10 @@
       SetInternalReference(obj, entry, 1, cs->first());
       SetInternalReference(obj, entry, 2, cs->second());
     }
+    if (obj->IsSlicedString()) {
+      SlicedString* ss = SlicedString::cast(obj);
+      SetInternalReference(obj, entry, "parent", ss->parent());
+    }
     extract_indexed_refs = false;
   } else if (obj->IsGlobalContext()) {
     Context* context = Context::cast(obj);
@@ -2164,15 +2170,16 @@
 
 
 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
-  if (object->IsJSFunction()) return HEAP->closure_symbol();
+  Heap* heap = object->GetHeap();
+  if (object->IsJSFunction()) return heap->closure_symbol();
   String* constructor_name = object->constructor_name();
-  if (constructor_name == HEAP->Object_symbol()) {
+  if (constructor_name == heap->Object_symbol()) {
     // Look up an immediate "constructor" property, if it is a function,
     // return its name. This is for instances of binding objects, which
     // have prototype constructor type "Object".
     Object* constructor_prop = NULL;
-    LookupResult result;
-    object->LocalLookupRealNamedProperty(HEAP->constructor_symbol(), &result);
+    LookupResult result(heap->isolate());
+    object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
     if (result.IsProperty()) {
       constructor_prop = result.GetLazyValue();
     }
diff --git a/src/property.cc b/src/property.cc
index 7cc2df5..6e043e2 100644
--- a/src/property.cc
+++ b/src/property.cc
@@ -31,6 +31,15 @@
 namespace internal {
 
 
+void LookupResult::Iterate(ObjectVisitor* visitor) {
+  LookupResult* current = this;  // Could be NULL.
+  while (current != NULL) {
+    visitor->VisitPointer(BitCast<Object**>(&current->holder_));
+    current = current->next_;
+  }
+}
+
+
 #ifdef OBJECT_PRINT
 void LookupResult::Print(FILE* out) {
   if (!IsFound()) {
diff --git a/src/property.h b/src/property.h
index ee2e8c8..ffea41e 100644
--- a/src/property.h
+++ b/src/property.h
@@ -164,10 +164,20 @@
 
 class LookupResult BASE_EMBEDDED {
  public:
-  LookupResult()
-      : lookup_type_(NOT_FOUND),
+  explicit LookupResult(Isolate* isolate)
+      : isolate_(isolate),
+        next_(isolate->top_lookup_result()),
+        lookup_type_(NOT_FOUND),
+        holder_(NULL),
         cacheable_(true),
-        details_(NONE, NORMAL) {}
+        details_(NONE, NORMAL) {
+    isolate->SetTopLookupResult(this);
+  }
+
+  ~LookupResult() {
+    ASSERT(isolate_->top_lookup_result() == this);
+    isolate_->SetTopLookupResult(next_);
+  }
 
   void DescriptorResult(JSObject* holder, PropertyDetails details, int number) {
     lookup_type_ = DESCRIPTOR_TYPE;
@@ -215,6 +225,7 @@
 
   void NotFound() {
     lookup_type_ = NOT_FOUND;
+    holder_ = NULL;
   }
 
   JSObject* holder() {
@@ -346,7 +357,12 @@
     return holder()->GetNormalizedProperty(this);
   }
 
+  void Iterate(ObjectVisitor* visitor);
+
  private:
+  Isolate* isolate_;
+  LookupResult* next_;
+
   // Where did we find the result;
   enum {
     NOT_FOUND,
diff --git a/src/proxy.js b/src/proxy.js
index a51f09a..3cd467f 100644
--- a/src/proxy.js
+++ b/src/proxy.js
@@ -32,7 +32,10 @@
 $Proxy.create = function(handler, proto) {
   if (!IS_SPEC_OBJECT(handler))
     throw MakeTypeError("handler_non_object", ["create"])
-  if (!IS_SPEC_OBJECT(proto)) proto = null  // Mozilla does this...
+  if (IS_UNDEFINED(proto))
+    proto = null
+  else if (!(IS_SPEC_OBJECT(proto) || proto === null))
+    throw MakeTypeError("proto_non_object", ["create"])
   return %CreateJSProxy(handler, proto)
 }
 
@@ -41,20 +44,20 @@
     throw MakeTypeError("handler_non_object", ["create"])
   if (!IS_SPEC_FUNCTION(callTrap))
     throw MakeTypeError("trap_function_expected", ["createFunction", "call"])
-  var construct
   if (IS_UNDEFINED(constructTrap)) {
-    construct = DerivedConstructTrap(callTrap)
+    constructTrap = DerivedConstructTrap(callTrap)
   } else if (IS_SPEC_FUNCTION(constructTrap)) {
-    construct = function() {
-      // Make sure the trap receives 'undefined' as this.
-      return %Apply(constructTrap, void 0, arguments, 0, %_ArgumentsLength());
+    // Make sure the trap receives 'undefined' as this.
+    var construct = constructTrap
+    constructTrap = function() {
+      return %Apply(construct, void 0, arguments, 0, %_ArgumentsLength());
     }
   } else {
     throw MakeTypeError("trap_function_expected",
                         ["createFunction", "construct"])
   }
   return %CreateJSFunctionProxy(
-    handler, callTrap, construct, $Function.prototype)
+    handler, callTrap, constructTrap, $Function.prototype)
 }
 
 
@@ -153,9 +156,32 @@
   var enumerableNames = []
   for (var i = 0, count = 0; i < names.length; ++i) {
     var name = names[i]
-    if (this.getOwnPropertyDescriptor(TO_STRING_INLINE(name)).enumerable) {
+    var desc = this.getOwnPropertyDescriptor(TO_STRING_INLINE(name))
+    if (!IS_UNDEFINED(desc) && desc.enumerable) {
       enumerableNames[count++] = names[i]
     }
   }
   return enumerableNames
 }
+
+function DerivedEnumerateTrap() {
+  var names = this.getPropertyNames()
+  var enumerableNames = []
+  for (var i = 0, count = 0; i < names.length; ++i) {
+    var name = names[i]
+    var desc = this.getPropertyDescriptor(TO_STRING_INLINE(name))
+    if (!IS_UNDEFINED(desc) && desc.enumerable) {
+      enumerableNames[count++] = names[i]
+    }
+  }
+  return enumerableNames
+}
+
+function ProxyEnumerate(proxy) {
+  var handler = %GetHandler(proxy)
+  if (IS_UNDEFINED(handler.enumerate)) {
+    return %Apply(DerivedEnumerateTrap, handler, [], 0, 0)
+  } else {
+    return ToStringArray(handler.enumerate(), "enumerate")
+  }
+}
diff --git a/src/regexp.js b/src/regexp.js
index 0ab86f3..f373ceb 100644
--- a/src/regexp.js
+++ b/src/regexp.js
@@ -174,13 +174,6 @@
                         ['RegExp.prototype.exec', this]);
   }
 
-  if (%_ArgumentsLength() === 0) {
-    var regExpInput = LAST_INPUT(lastMatchInfo);
-    if (IS_UNDEFINED(regExpInput)) {
-      throw MakeError('no_input_to_regexp', [this]);
-    }
-    string = regExpInput;
-  }
   string = TO_STRING_INLINE(string);
   var lastIndex = this.lastIndex;
 
@@ -229,14 +222,6 @@
     throw MakeTypeError('incompatible_method_receiver',
                         ['RegExp.prototype.test', this]);
   }
-  if (%_ArgumentsLength() == 0) {
-    var regExpInput = LAST_INPUT(lastMatchInfo);
-    if (IS_UNDEFINED(regExpInput)) {
-      throw MakeError('no_input_to_regexp', [this]);
-    }
-    string = regExpInput;
-  }
-
   string = TO_STRING_INLINE(string);
 
   var lastIndex = this.lastIndex;
diff --git a/src/runtime.cc b/src/runtime.cc
index e0f507e..9c23c2c 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -432,64 +432,77 @@
   // Create the JSArray.
   Handle<JSFunction> constructor(
       JSFunction::GlobalContextFromLiterals(*literals)->array_function());
-  Handle<Object> object = isolate->factory()->NewJSObject(constructor);
+  Handle<JSArray> object =
+      Handle<JSArray>::cast(isolate->factory()->NewJSObject(constructor));
 
-  if (elements->length() > kSmiOnlyLiteralMinimumLength) {
-    Handle<Map> smi_array_map = isolate->factory()->GetElementsTransitionMap(
-        Handle<JSObject>::cast(object),
-        FAST_SMI_ONLY_ELEMENTS);
-    HeapObject::cast(*object)->set_map(*smi_array_map);
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(elements->get(0))->value());
+  Handle<FixedArrayBase> constant_elements_values(
+      FixedArrayBase::cast(elements->get(1)));
+
+  ASSERT(FLAG_smi_only_arrays || constant_elements_kind == FAST_ELEMENTS ||
+         constant_elements_kind == FAST_SMI_ONLY_ELEMENTS);
+  bool allow_literal_kind_transition = FLAG_smi_only_arrays &&
+      constant_elements_kind > object->GetElementsKind();
+
+  if (!FLAG_smi_only_arrays &&
+      constant_elements_values->length() > kSmiOnlyLiteralMinimumLength &&
+      constant_elements_kind != object->GetElementsKind()) {
+    allow_literal_kind_transition = true;
   }
 
-  const bool is_cow =
-      (elements->map() == isolate->heap()->fixed_cow_array_map());
-  Handle<FixedArray> copied_elements =
-      is_cow ? elements : isolate->factory()->CopyFixedArray(elements);
+  // If the ElementsKind of the constant values of the array literal are less
+  // specific than the ElementsKind of the boilerplate array object, change the
+  // boilerplate array object's map to reflect that kind.
+  if (allow_literal_kind_transition) {
+    Handle<Map> transitioned_array_map =
+        isolate->factory()->GetElementsTransitionMap(object,
+                                                     constant_elements_kind);
+    object->set_map(*transitioned_array_map);
+  }
 
-  Handle<FixedArray> content = Handle<FixedArray>::cast(copied_elements);
-  bool has_non_smi = false;
-  if (is_cow) {
-    // Copy-on-write arrays must be shallow (and simple).
-    for (int i = 0; i < content->length(); i++) {
-      Object* current = content->get(i);
-      ASSERT(!current->IsFixedArray());
-      if (!current->IsSmi() && !current->IsTheHole()) {
-        has_non_smi = true;
-      }
-    }
-#if DEBUG
-    for (int i = 0; i < content->length(); i++) {
-      ASSERT(!content->get(i)->IsFixedArray());
-    }
-#endif
+  Handle<FixedArrayBase> copied_elements_values;
+  if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) {
+    ASSERT(FLAG_smi_only_arrays);
+    copied_elements_values = isolate->factory()->CopyFixedDoubleArray(
+        Handle<FixedDoubleArray>::cast(constant_elements_values));
   } else {
-    for (int i = 0; i < content->length(); i++) {
-      Object* current = content->get(i);
-      if (current->IsFixedArray()) {
-        // The value contains the constant_properties of a
-        // simple object or array literal.
-        Handle<FixedArray> fa(FixedArray::cast(content->get(i)));
-        Handle<Object> result =
-            CreateLiteralBoilerplate(isolate, literals, fa);
-        if (result.is_null()) return result;
-        content->set(i, *result);
-        has_non_smi = true;
-      } else {
-        if (!current->IsSmi() && !current->IsTheHole()) {
-          has_non_smi = true;
+    ASSERT(constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+           constant_elements_kind == FAST_ELEMENTS);
+    const bool is_cow =
+        (constant_elements_values->map() ==
+         isolate->heap()->fixed_cow_array_map());
+    if (is_cow) {
+      copied_elements_values = constant_elements_values;
+#if DEBUG
+      Handle<FixedArray> fixed_array_values =
+          Handle<FixedArray>::cast(copied_elements_values);
+      for (int i = 0; i < fixed_array_values->length(); i++) {
+        ASSERT(!fixed_array_values->get(i)->IsFixedArray());
+      }
+#endif
+    } else {
+      Handle<FixedArray> fixed_array_values =
+          Handle<FixedArray>::cast(constant_elements_values);
+      Handle<FixedArray> fixed_array_values_copy =
+          isolate->factory()->CopyFixedArray(fixed_array_values);
+      copied_elements_values = fixed_array_values_copy;
+      for (int i = 0; i < fixed_array_values->length(); i++) {
+        Object* current = fixed_array_values->get(i);
+        if (current->IsFixedArray()) {
+          // The value contains the constant_properties of a
+          // simple object or array literal.
+          Handle<FixedArray> fa(FixedArray::cast(fixed_array_values->get(i)));
+          Handle<Object> result =
+              CreateLiteralBoilerplate(isolate, literals, fa);
+          if (result.is_null()) return result;
+          fixed_array_values_copy->set(i, *result);
         }
       }
     }
   }
-
-  // Set the elements.
-  Handle<JSArray> js_object(Handle<JSArray>::cast(object));
-  isolate->factory()->SetContent(js_object, content);
-
-  if (has_non_smi && js_object->HasFastSmiOnlyElements()) {
-    isolate->factory()->EnsureCanContainNonSmiElements(js_object);
-  }
-
+  object->set_elements(*copied_elements_values);
+  object->set_length(Smi::FromInt(copied_elements_values->length()));
   return object;
 }
 
@@ -704,6 +717,82 @@
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInitialize) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_CHECKED(JSSet, holder, 0);
+  Handle<ObjectHashSet> table = isolate->factory()->NewObjectHashSet(0);
+  holder->set_table(*table);
+  return *holder;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAdd) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(JSSet, holder, 0);
+  Handle<Object> key(args[1]);
+  Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
+  table = ObjectHashSetAdd(table, key);
+  holder->set_table(*table);
+  return isolate->heap()->undefined_symbol();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHas) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(JSSet, holder, 0);
+  Handle<Object> key(args[1]);
+  Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
+  return isolate->heap()->ToBoolean(table->Contains(*key));
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDelete) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(JSSet, holder, 0);
+  Handle<Object> key(args[1]);
+  Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
+  table = ObjectHashSetRemove(table, key);
+  holder->set_table(*table);
+  return isolate->heap()->undefined_symbol();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapInitialize) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_CHECKED(JSMap, holder, 0);
+  Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
+  holder->set_table(*table);
+  return *holder;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGet) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(JSMap, holder, 0);
+  Handle<Object> key(args[1]);
+  return ObjectHashTable::cast(holder->table())->Lookup(*key);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapSet) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 3);
+  CONVERT_ARG_CHECKED(JSMap, holder, 0);
+  Handle<Object> key(args[1]);
+  Handle<Object> value(args[2]);
+  Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
+  Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
+  holder->set_table(*new_table);
+  return *value;
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
@@ -961,7 +1050,7 @@
   HandleScope scope(isolate);
   Handle<FixedArray> elms = isolate->factory()->NewFixedArray(DESCRIPTOR_SIZE);
   Handle<JSArray> desc = isolate->factory()->NewJSArrayWithElements(elms);
-  LookupResult result;
+  LookupResult result(isolate);
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
   CONVERT_ARG_CHECKED(String, name, 1);
 
@@ -992,7 +1081,7 @@
       case JSObject::INTERCEPTED_ELEMENT:
       case JSObject::FAST_ELEMENT: {
         elms->set(IS_ACCESSOR_INDEX, heap->false_value());
-        Handle<Object> value = GetElement(obj, index);
+        Handle<Object> value = Object::GetElement(obj, index);
         RETURN_IF_EMPTY_HANDLE(isolate, value);
         elms->set(VALUE_INDEX, *value);
         elms->set(WRITABLE_INDEX, heap->true_value());
@@ -1036,7 +1125,7 @@
           case NORMAL: {
             // This is a data property.
             elms->set(IS_ACCESSOR_INDEX, heap->false_value());
-            Handle<Object> value = GetElement(obj, index);
+            Handle<Object> value = Object::GetElement(obj, index);
             ASSERT(!value.is_null());
             elms->set(VALUE_INDEX, *value);
             elms->set(WRITABLE_INDEX, heap->ToBoolean(!details.IsReadOnly()));
@@ -1240,7 +1329,7 @@
     if (value->IsUndefined() || is_const_property) {
       // Lookup the property in the global object, and don't set the
       // value of the variable if the property is already there.
-      LookupResult lookup;
+      LookupResult lookup(isolate);
       global->Lookup(*name, &lookup);
       if (lookup.IsProperty()) {
         // We found an existing property. Unless it was an interceptor
@@ -1267,7 +1356,7 @@
       value = function;
     }
 
-    LookupResult lookup;
+    LookupResult lookup(isolate);
     global->LocalLookup(*name, &lookup);
 
     // Compute the property attributes. According to ECMA-262, section
@@ -1275,10 +1364,10 @@
     // non-deletable. However, neither SpiderMonkey nor KJS creates the
     // property as read-only, so we don't either.
     int attr = NONE;
-    if ((flags & kDeclareGlobalsEvalFlag) == 0) {
+    if (!DeclareGlobalsEvalFlag::decode(flags)) {
       attr |= DONT_DELETE;
     }
-    bool is_native = (flags & kDeclareGlobalsNativeFlag) != 0;
+    bool is_native = DeclareGlobalsNativeFlag::decode(flags);
     if (is_const_property || (is_native && is_function_declaration)) {
       attr |= READ_ONLY;
     }
@@ -1303,9 +1392,7 @@
                                                               value,
                                                               attributes));
     } else {
-      StrictModeFlag strict_mode =
-          ((flags & kDeclareGlobalsStrictModeFlag) != 0) ? kStrictMode
-                                                         : kNonStrictMode;
+      StrictModeFlag strict_mode = DeclareGlobalsStrictModeFlag::decode(flags);
       RETURN_IF_EMPTY_HANDLE(isolate,
                              SetProperty(global,
                                          name,
@@ -1399,7 +1486,7 @@
     // not real JSObjects.
     if (initial_value->IsTheHole() &&
         !object->IsJSContextExtensionObject()) {
-      LookupResult lookup;
+      LookupResult lookup(isolate);
       object->Lookup(*name, &lookup);
       if (lookup.IsProperty() && (lookup.type() == CALLBACKS)) {
         return ThrowRedeclarationError(isolate, "const", name);
@@ -1443,7 +1530,7 @@
   // Note that objects can have hidden prototypes, so we need to traverse
   // the whole chain of hidden prototypes to do a 'local' lookup.
   Object* object = global;
-  LookupResult lookup;
+  LookupResult lookup(isolate);
   while (object->IsJSObject() &&
          JSObject::cast(object)->map()->is_hidden_prototype()) {
     JSObject* raw_holder = JSObject::cast(object);
@@ -1497,7 +1584,7 @@
   // add it as a local property even in case of callbacks in the
   // prototype chain (this rules out using SetProperty).
   // We use SetLocalPropertyIgnoreAttributes instead
-  LookupResult lookup;
+  LookupResult lookup(isolate);
   global->LocalLookup(*name, &lookup);
   if (!lookup.IsProperty()) {
     return global->SetLocalPropertyIgnoreAttributes(*name,
@@ -1614,7 +1701,7 @@
     // This is the property that was introduced by the const declaration.
     // Set it if it hasn't been set before.  NOTE: We cannot use
     // GetProperty() to get the current value as it 'unholes' the value.
-    LookupResult lookup;
+    LookupResult lookup(isolate);
     object->LocalLookupRealNamedProperty(*name, &lookup);
     ASSERT(lookup.IsProperty());  // the property was declared
     ASSERT(lookup.IsReadOnly());  // and it was declared as read-only
@@ -1663,19 +1750,6 @@
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_NonSmiElementStored) {
-  ASSERT(args.length() == 1);
-  CONVERT_ARG_CHECKED(JSObject, object, 0);
-  if (object->HasFastSmiOnlyElements()) {
-    MaybeObject* maybe_map = object->GetElementsTransitionMap(FAST_ELEMENTS);
-    Map* map;
-    if (!maybe_map->To<Map>(&map)) return maybe_map;
-    object->set_map(Map::cast(map));
-  }
-  return *object;
-}
-
-
 RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExec) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 4);
@@ -1930,15 +2004,6 @@
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetBound) {
-  HandleScope scope(isolate);
-  ASSERT(args.length() == 1);
-
-  CONVERT_CHECKED(JSFunction, fun, args[0]);
-  fun->shared()->set_bound(true);
-  return isolate->heap()->undefined_value();
-}
-
 RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionRemovePrototype) {
   NoHandleAllocation ha;
   ASSERT(args.length() == 1);
@@ -2017,24 +2082,6 @@
 }
 
 
-// Creates a local, readonly, property called length with the correct
-// length (when read by the user). This effectively overwrites the
-// interceptor used to normally provide the length.
-RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionSetLength) {
-  NoHandleAllocation ha;
-  ASSERT(args.length() == 2);
-  CONVERT_CHECKED(JSFunction, fun, args[0]);
-  CONVERT_CHECKED(Smi, length, args[1]);
-  MaybeObject* maybe_name =
-      isolate->heap()->AllocateStringFromAscii(CStrVector("length"));
-  String* name;
-  if (!maybe_name->To(&name)) return maybe_name;
-  PropertyAttributes attr =
-      static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY);
-  return fun->AddProperty(name, length, attr, kNonStrictMode);
-}
-
-
 RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) {
   NoHandleAllocation ha;
   ASSERT(args.length() == 2);
@@ -2137,13 +2184,12 @@
     Handle<JSFunction> fun = Handle<JSFunction>::cast(code);
     Handle<SharedFunctionInfo> shared(fun->shared());
 
-    if (!EnsureCompiled(shared, KEEP_EXCEPTION)) {
+    if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
       return Failure::Exception();
     }
     // Since we don't store the source for this we should never
     // optimize this.
     shared->code()->set_optimizable(false);
-
     // Set the code, scope info, formal parameter count,
     // and the length of the target function.
     target->shared()->set_code(shared->code());
@@ -4069,11 +4115,6 @@
     return prototype->GetElement(index);
   }
 
-  return GetElement(object, index);
-}
-
-
-MaybeObject* Runtime::GetElement(Handle<Object> object, uint32_t index) {
   return object->GetElement(index);
 }
 
@@ -4162,7 +4203,7 @@
         return value->IsTheHole() ? isolate->heap()->undefined_value() : value;
       }
       // Lookup cache miss.  Perform lookup and update the cache if appropriate.
-      LookupResult result;
+      LookupResult result(isolate);
       receiver->LocalLookup(key, &result);
       if (result.IsProperty() && result.type() == FIELD) {
         int offset = result.GetFieldIndex();
@@ -4217,7 +4258,7 @@
   int unchecked = flag_attr->value();
   RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
   RUNTIME_ASSERT(!obj->IsNull());
-  LookupResult result;
+  LookupResult result(isolate);
   obj->LocalLookupRealNamedProperty(name, &result);
 
   PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked);
@@ -4259,11 +4300,11 @@
   uint32_t index;
   bool is_element = name->AsArrayIndex(&index);
 
-  // Special case for elements if any of the flags are true.
+  // Special case for elements if any of the flags might be involved.
   // If elements are in fast case we always implicitly assume that:
   // DONT_DELETE: false, DONT_ENUM: false, READ_ONLY: false.
-  if (((unchecked & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) &&
-      is_element) {
+  if (is_element && (attr != NONE ||
+      js_object->HasLocalElement(index) == JSObject::DICTIONARY_ELEMENT)) {
     // Normalize the elements to enable attributes on the property.
     if (js_object->IsJSGlobalProxy()) {
       // We do not need to do access checks here since these has already
@@ -4301,7 +4342,7 @@
     return *obj_value;
   }
 
-  LookupResult result;
+  LookupResult result(isolate);
   js_object->LocalLookupRealNamedProperty(*name, &result);
 
   // To be compatible with safari we do not change the value on API objects
@@ -4568,6 +4609,39 @@
 }
 
 
+MaybeObject* TransitionElements(Handle<Object> object,
+                                ElementsKind to_kind,
+                                Isolate* isolate) {
+  HandleScope scope(isolate);
+  if (!object->IsJSObject()) return isolate->ThrowIllegalOperation();
+  ElementsKind from_kind =
+      Handle<JSObject>::cast(object)->map()->elements_kind();
+  if (Map::IsValidElementsTransition(from_kind, to_kind)) {
+    Handle<Object> result =
+        TransitionElementsKind(Handle<JSObject>::cast(object), to_kind);
+    if (result.is_null()) return isolate->ThrowIllegalOperation();
+    return *result;
+  }
+  return isolate->ThrowIllegalOperation();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsSmiToDouble) {
+  NoHandleAllocation ha;
+  RUNTIME_ASSERT(args.length() == 1);
+  Handle<Object> object = args.at<Object>(0);
+  return TransitionElements(object, FAST_DOUBLE_ELEMENTS, isolate);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsDoubleToObject) {
+  NoHandleAllocation ha;
+  RUNTIME_ASSERT(args.length() == 1);
+  Handle<Object> object = args.at<Object>(0);
+  return TransitionElements(object, FAST_ELEMENTS, isolate);
+}
+
+
 // Set the native flag on the function.
 // This is used to decide if we should transform null and undefined
 // into the global object when doing call and apply.
@@ -4751,8 +4825,11 @@
 RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
-  CONVERT_ARG_CHECKED(JSObject, object, 0);
-  return *GetKeysFor(object);
+  CONVERT_ARG_CHECKED(JSReceiver, object, 0);
+  bool threw = false;
+  Handle<JSArray> result = GetKeysFor(object, &threw);
+  if (threw) return Failure::Exception();
+  return *result;
 }
 
 
@@ -4764,14 +4841,16 @@
 RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNamesFast) {
   ASSERT(args.length() == 1);
 
-  CONVERT_CHECKED(JSObject, raw_object, args[0]);
+  CONVERT_CHECKED(JSReceiver, raw_object, args[0]);
 
   if (raw_object->IsSimpleEnum()) return raw_object->map();
 
   HandleScope scope(isolate);
-  Handle<JSObject> object(raw_object);
-  Handle<FixedArray> content = GetKeysInFixedArrayFor(object,
-                                                      INCLUDE_PROTOS);
+  Handle<JSReceiver> object(raw_object);
+  bool threw = false;
+  Handle<FixedArray> content =
+      GetKeysInFixedArrayFor(object, INCLUDE_PROTOS, &threw);
+  if (threw) return Failure::Exception();
 
   // Test again, since cache may have been built by preceding call.
   if (object->IsSimpleEnum()) return object->map();
@@ -4968,8 +5047,11 @@
     object = Handle<JSObject>::cast(proto);
   }
 
-  Handle<FixedArray> contents = GetKeysInFixedArrayFor(object,
-                                                       LOCAL_ONLY);
+  bool threw = false;
+  Handle<FixedArray> contents =
+      GetKeysInFixedArrayFor(object, LOCAL_ONLY, &threw);
+  if (threw) return Failure::Exception();
+
   // Some fast paths through GetKeysInFixedArrayFor reuse a cached
   // property array and since the result is mutable we have to create
   // a fresh clone on each invocation.
@@ -7762,14 +7844,21 @@
   int year, month, day;
   DateYMDFromTime(static_cast<int>(floor(t / 86400000)), year, month, day);
 
-  RUNTIME_ASSERT(res_array->elements()->map() ==
-                 isolate->heap()->fixed_array_map());
-  FixedArray* elms = FixedArray::cast(res_array->elements());
-  RUNTIME_ASSERT(elms->length() == 3);
+  FixedArrayBase* elms_base = FixedArrayBase::cast(res_array->elements());
+  RUNTIME_ASSERT(elms_base->length() == 3);
+  RUNTIME_ASSERT(res_array->GetElementsKind() <= FAST_DOUBLE_ELEMENTS);
 
-  elms->set(0, Smi::FromInt(year));
-  elms->set(1, Smi::FromInt(month));
-  elms->set(2, Smi::FromInt(day));
+  if (res_array->HasFastDoubleElements()) {
+    FixedDoubleArray* elms = FixedDoubleArray::cast(res_array->elements());
+    elms->set(0, year);
+    elms->set(1, month);
+    elms->set(2, day);
+  } else {
+    FixedArray* elms = FixedArray::cast(res_array->elements());
+    elms->set(0, Smi::FromInt(year));
+    elms->set(1, Smi::FromInt(month));
+    elms->set(2, Smi::FromInt(day));
+  }
 
   return isolate->heap()->undefined_value();
 }
@@ -7926,8 +8015,11 @@
 }
 
 
-static SmartArrayPointer<Handle<Object> > GetNonBoundArguments(
-    int bound_argc,
+// Find the arguments of the JavaScript function invocation that called
+// into C++ code. Collect these in a newly allocated array of handles (possibly
+// prefixed by a number of empty handles).
+static SmartArrayPointer<Handle<Object> > GetCallerArguments(
+    int prefix_argc,
     int* total_argc) {
   // Find frame containing arguments passed to the caller.
   JavaScriptFrameIterator it;
@@ -7943,12 +8035,12 @@
                                             inlined_frame_index,
                                             &args_slots);
 
-    *total_argc = bound_argc + args_count;
+    *total_argc = prefix_argc + args_count;
     SmartArrayPointer<Handle<Object> > param_data(
         NewArray<Handle<Object> >(*total_argc));
     for (int i = 0; i < args_count; i++) {
       Handle<Object> val = args_slots[i].GetValue();
-      param_data[bound_argc + i] = val;
+      param_data[prefix_argc + i] = val;
     }
     return param_data;
   } else {
@@ -7956,49 +8048,131 @@
     frame = it.frame();
     int args_count = frame->ComputeParametersCount();
 
-    *total_argc = bound_argc + args_count;
+    *total_argc = prefix_argc + args_count;
     SmartArrayPointer<Handle<Object> > param_data(
         NewArray<Handle<Object> >(*total_argc));
     for (int i = 0; i < args_count; i++) {
       Handle<Object> val = Handle<Object>(frame->GetParameter(i));
-      param_data[bound_argc + i] = val;
+      param_data[prefix_argc + i] = val;
     }
     return param_data;
   }
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 4);
+  CONVERT_ARG_CHECKED(JSFunction, bound_function, 0);
+  RUNTIME_ASSERT(args[3]->IsNumber());
+  Handle<Object> bindee = args.at<Object>(1);
+
+  // TODO(lrn): Create bound function in C++ code from premade shared info.
+  bound_function->shared()->set_bound(true);
+  // Get all arguments of calling function (Function.prototype.bind).
+  int argc = 0;
+  SmartArrayPointer<Handle<Object> > arguments = GetCallerArguments(0, &argc);
+  // Don't count the this-arg.
+  if (argc > 0) {
+    ASSERT(*arguments[0] == args[2]);
+    argc--;
+  } else {
+    ASSERT(args[2]->IsUndefined());
+  }
+  // Initialize array of bindings (function, this, and any existing arguments
+  // if the function was already bound).
+  Handle<FixedArray> new_bindings;
+  int i;
+  if (bindee->IsJSFunction() && JSFunction::cast(*bindee)->shared()->bound()) {
+    Handle<FixedArray> old_bindings(
+        JSFunction::cast(*bindee)->function_bindings());
+    new_bindings =
+        isolate->factory()->NewFixedArray(old_bindings->length() + argc);
+    bindee = Handle<Object>(old_bindings->get(JSFunction::kBoundFunctionIndex));
+    i = 0;
+    for (int n = old_bindings->length(); i < n; i++) {
+      new_bindings->set(i, old_bindings->get(i));
+    }
+  } else {
+    int array_size = JSFunction::kBoundArgumentsStartIndex + argc;
+    new_bindings = isolate->factory()->NewFixedArray(array_size);
+    new_bindings->set(JSFunction::kBoundFunctionIndex, *bindee);
+    new_bindings->set(JSFunction::kBoundThisIndex, args[2]);
+    i = 2;
+  }
+  // Copy arguments, skipping the first which is "this_arg".
+  for (int j = 0; j < argc; j++, i++) {
+    new_bindings->set(i, *arguments[j + 1]);
+  }
+  new_bindings->set_map(isolate->heap()->fixed_cow_array_map());
+  bound_function->set_function_bindings(*new_bindings);
+
+  // Update length.
+  Handle<String> length_symbol = isolate->factory()->length_symbol();
+  Handle<Object> new_length(args.at<Object>(3));
+  PropertyAttributes attr =
+      static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY);
+  ForceSetProperty(bound_function, length_symbol, new_length, attr);
+  return *bound_function;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionGetBindings) {
+  HandleScope handles(isolate);
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_CHECKED(JSObject, callable, 0);
+  if (callable->IsJSFunction()) {
+    Handle<JSFunction> function = Handle<JSFunction>::cast(callable);
+    if (function->shared()->bound()) {
+      Handle<FixedArray> bindings(function->function_bindings());
+      ASSERT(bindings->map() == isolate->heap()->fixed_cow_array_map());
+      return *isolate->factory()->NewJSArrayWithElements(bindings);
+    }
+  }
+  return isolate->heap()->undefined_value();
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) {
   HandleScope scope(isolate);
-  ASSERT(args.length() == 2);
+  ASSERT(args.length() == 1);
   // First argument is a function to use as a constructor.
   CONVERT_ARG_CHECKED(JSFunction, function, 0);
+  RUNTIME_ASSERT(function->shared()->bound());
 
-  // Second argument is either null or an array of bound arguments.
-  Handle<FixedArray> bound_args;
-  int bound_argc = 0;
-  if (!args[1]->IsNull()) {
-    CONVERT_ARG_CHECKED(JSArray, params, 1);
-    RUNTIME_ASSERT(params->HasFastTypeElements());
-    bound_args = Handle<FixedArray>(FixedArray::cast(params->elements()));
-    bound_argc = Smi::cast(params->length())->value();
-  }
+  // The argument is a bound function. Extract its bound arguments
+  // and callable.
+  Handle<FixedArray> bound_args =
+      Handle<FixedArray>(FixedArray::cast(function->function_bindings()));
+  int bound_argc = bound_args->length() - JSFunction::kBoundArgumentsStartIndex;
+  Handle<Object> bound_function(
+      JSReceiver::cast(bound_args->get(JSFunction::kBoundFunctionIndex)));
+  ASSERT(!bound_function->IsJSFunction() ||
+         !Handle<JSFunction>::cast(bound_function)->shared()->bound());
 
   int total_argc = 0;
   SmartArrayPointer<Handle<Object> > param_data =
-      GetNonBoundArguments(bound_argc, &total_argc);
+      GetCallerArguments(bound_argc, &total_argc);
   for (int i = 0; i < bound_argc; i++) {
-    Handle<Object> val = Handle<Object>(bound_args->get(i));
-    param_data[i] = val;
+    param_data[i] = Handle<Object>(bound_args->get(
+        JSFunction::kBoundArgumentsStartIndex + i));
   }
 
+  if (!bound_function->IsJSFunction()) {
+    bool exception_thrown;
+    bound_function = Execution::TryGetConstructorDelegate(bound_function,
+                                                          &exception_thrown);
+    if (exception_thrown) return Failure::Exception();
+  }
+  ASSERT(bound_function->IsJSFunction());
+
   bool exception = false;
   Handle<Object> result =
-      Execution::New(function, total_argc, *param_data, &exception);
+      Execution::New(Handle<JSFunction>::cast(bound_function),
+                     total_argc, *param_data, &exception);
   if (exception) {
-      return Failure::Exception();
+    return Failure::Exception();
   }
-
   ASSERT(!result.is_null());
   return *result;
 }
@@ -8011,7 +8185,8 @@
     prototype = Handle<Object>(function->instance_prototype(), isolate);
   }
   if (function->shared()->CanGenerateInlineConstructor(*prototype)) {
-    ConstructStubCompiler compiler;
+    HandleScope scope(isolate);
+    ConstructStubCompiler compiler(isolate);
     MaybeObject* code = compiler.CompileConstructStub(*function);
     if (!code->IsFailure()) {
       function->shared()->set_construct_stub(
@@ -8075,9 +8250,11 @@
   // available. We cannot use EnsureCompiled because that forces a
   // compilation through the shared function info which makes it
   // impossible for us to optimize.
-  Handle<SharedFunctionInfo> shared(function->shared(), isolate);
-  if (!function->is_compiled()) CompileLazy(function, CLEAR_EXCEPTION);
+  if (!function->is_compiled()) {
+    JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+  }
 
+  Handle<SharedFunctionInfo> shared(function->shared(), isolate);
   if (!function->has_initial_map() &&
       shared->IsInobjectSlackTrackingInProgress()) {
     // The tracking is already in progress for another function. We can only
@@ -8128,7 +8305,7 @@
 
   // Compile the target function.
   ASSERT(!function->is_compiled());
-  if (!CompileLazy(function, KEEP_EXCEPTION)) {
+  if (!JSFunction::CompileLazy(function, KEEP_EXCEPTION)) {
     return Failure::Exception();
   }
 
@@ -8165,7 +8342,9 @@
     function->ReplaceCode(function->shared()->code());
     return function->code();
   }
-  if (CompileOptimized(function, AstNode::kNoNumber, CLEAR_EXCEPTION)) {
+  if (JSFunction::CompileOptimized(function,
+                                   AstNode::kNoNumber,
+                                   CLEAR_EXCEPTION)) {
     return function->code();
   }
   if (FLAG_trace_opt) {
@@ -8406,7 +8585,7 @@
     // Try to compile the optimized code.  A true return value from
     // CompileOptimized means that compilation succeeded, not necessarily
     // that optimization succeeded.
-    if (CompileOptimized(function, ast_id, CLEAR_EXCEPTION) &&
+    if (JSFunction::CompileOptimized(function, ast_id, CLEAR_EXCEPTION) &&
         function->IsOptimized()) {
       DeoptimizationInputData* data = DeoptimizationInputData::cast(
           function->code()->deoptimization_data());
@@ -8762,13 +8941,26 @@
     Handle<Object> receiver = isolate->factory()->the_hole_value();
     Object* value = Context::cast(*holder)->get(index);
     // Check for uninitialized bindings.
-    if (binding_flags == MUTABLE_CHECK_INITIALIZED && value->IsTheHole()) {
-      Handle<Object> reference_error =
-          isolate->factory()->NewReferenceError("not_defined",
-                                                HandleVector(&name, 1));
-      return MakePair(isolate->Throw(*reference_error), NULL);
-    } else {
-      return MakePair(Unhole(isolate->heap(), value, attributes), *receiver);
+    switch (binding_flags) {
+      case MUTABLE_CHECK_INITIALIZED:
+      case IMMUTABLE_CHECK_INITIALIZED_HARMONY:
+        if (value->IsTheHole()) {
+          Handle<Object> reference_error =
+              isolate->factory()->NewReferenceError("not_defined",
+                                                    HandleVector(&name, 1));
+          return MakePair(isolate->Throw(*reference_error), NULL);
+        }
+        // FALLTHROUGH
+      case MUTABLE_IS_INITIALIZED:
+      case IMMUTABLE_IS_INITIALIZED:
+      case IMMUTABLE_IS_INITIALIZED_HARMONY:
+        ASSERT(!value->IsTheHole());
+        return MakePair(value, *receiver);
+      case IMMUTABLE_CHECK_INITIALIZED:
+        return MakePair(Unhole(isolate->heap(), value, attributes), *receiver);
+      case MISSING_BINDING:
+        UNREACHABLE();
+        return MakePair(NULL, NULL);
     }
   }
 
@@ -8947,42 +9139,6 @@
 }
 
 
-// NOTE: These PrintXXX functions are defined for all builds (not just
-// DEBUG builds) because we may want to be able to trace function
-// calls in all modes.
-static void PrintString(String* str) {
-  // not uncommon to have empty strings
-  if (str->length() > 0) {
-    SmartArrayPointer<char> s =
-        str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
-    PrintF("%s", *s);
-  }
-}
-
-
-static void PrintObject(Object* obj) {
-  if (obj->IsSmi()) {
-    PrintF("%d", Smi::cast(obj)->value());
-  } else if (obj->IsString() || obj->IsSymbol()) {
-    PrintString(String::cast(obj));
-  } else if (obj->IsNumber()) {
-    PrintF("%g", obj->Number());
-  } else if (obj->IsFailure()) {
-    PrintF("<failure>");
-  } else if (obj->IsUndefined()) {
-    PrintF("<undefined>");
-  } else if (obj->IsNull()) {
-    PrintF("<null>");
-  } else if (obj->IsTrue()) {
-    PrintF("<true>");
-  } else if (obj->IsFalse()) {
-    PrintF("<false>");
-  } else {
-    PrintF("%p", reinterpret_cast<void*>(obj));
-  }
-}
-
-
 static int StackSize() {
   int n = 0;
   for (JavaScriptFrameIterator it; !it.done(); it.Advance()) n++;
@@ -9001,38 +9157,33 @@
   }
 
   if (result == NULL) {
-    // constructor calls
-    JavaScriptFrameIterator it;
-    JavaScriptFrame* frame = it.frame();
-    if (frame->IsConstructor()) PrintF("new ");
-    // function name
-    Object* fun = frame->function();
-    if (fun->IsJSFunction()) {
-      PrintObject(JSFunction::cast(fun)->shared()->name());
-    } else {
-      PrintObject(fun);
-    }
-    // function arguments
-    // (we are intentionally only printing the actually
-    // supplied parameters, not all parameters required)
-    PrintF("(this=");
-    PrintObject(frame->receiver());
-    const int length = frame->ComputeParametersCount();
-    for (int i = 0; i < length; i++) {
-      PrintF(", ");
-      PrintObject(frame->GetParameter(i));
-    }
-    PrintF(") {\n");
-
+    JavaScriptFrame::PrintTop(stdout, true, false);
+    PrintF(" {\n");
   } else {
     // function result
     PrintF("} -> ");
-    PrintObject(result);
+    result->ShortPrint();
     PrintF("\n");
   }
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceElementsKindTransition) {
+  ASSERT(args.length() == 5);
+  CONVERT_ARG_CHECKED(JSObject, obj, 0);
+  CONVERT_SMI_ARG_CHECKED(from_kind, 1);
+  CONVERT_ARG_CHECKED(FixedArrayBase, from_elements, 2);
+  CONVERT_SMI_ARG_CHECKED(to_kind, 3);
+  CONVERT_ARG_CHECKED(FixedArrayBase, to_elements, 4);
+  NoHandleAllocation ha;
+  PrintF("*");
+  obj->PrintElementsTransition(stdout,
+      static_cast<ElementsKind>(from_kind), *from_elements,
+      static_cast<ElementsKind>(to_kind), *to_elements);
+  return isolate->heap()->undefined_value();
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceEnter) {
   ASSERT(args.length() == 0);
   NoHandleAllocation ha;
@@ -9781,8 +9932,8 @@
         } else if (receiver->HasElement(j)) {
           // Call GetElement on receiver, not its prototype, or getters won't
           // have the correct receiver.
-          element_value = GetElement(receiver, j);
-          if (element_value.is_null()) return false;
+          element_value = Object::GetElement(receiver, j);
+          RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element_value, false);
           visitor->visit(j, element_value);
         }
       }
@@ -9800,8 +9951,8 @@
       while (j < n) {
         HandleScope loop_scope;
         uint32_t index = indices[j];
-        Handle<Object> element = GetElement(receiver, index);
-        if (element.is_null()) return false;
+        Handle<Object> element = Object::GetElement(receiver, index);
+        RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element, false);
         visitor->visit(index, element);
         // Skip to next different index (i.e., omit duplicates).
         do {
@@ -10051,9 +10202,9 @@
   }
 
   Handle<JSObject> jsobject = Handle<JSObject>::cast(object);
-  Handle<Object> tmp1 = GetElement(jsobject, index1);
+  Handle<Object> tmp1 = Object::GetElement(jsobject, index1);
   RETURN_IF_EMPTY_HANDLE(isolate, tmp1);
-  Handle<Object> tmp2 = GetElement(jsobject, index2);
+  Handle<Object> tmp2 = Object::GetElement(jsobject, index2);
   RETURN_IF_EMPTY_HANDLE(isolate, tmp2);
 
   RETURN_IF_EMPTY_HANDLE(isolate,
@@ -10078,7 +10229,11 @@
   if (array->elements()->IsDictionary()) {
     // Create an array and get all the keys into it, then remove all the
     // keys that are not integers in the range 0 to length-1.
-    Handle<FixedArray> keys = GetKeysInFixedArrayFor(array, INCLUDE_PROTOS);
+    bool threw = false;
+    Handle<FixedArray> keys =
+        GetKeysInFixedArrayFor(array, INCLUDE_PROTOS, &threw);
+    if (threw) return Failure::Exception();
+
     int keys_length = keys->length();
     for (int i = 0; i < keys_length; i++) {
       Object* key = keys->get(i);
@@ -10303,7 +10458,7 @@
   // Try local lookup on each of the objects.
   Handle<JSObject> jsproto = obj;
   for (int i = 0; i < length; i++) {
-    LookupResult result;
+    LookupResult result(isolate);
     jsproto->LocalLookup(*name, &result);
     if (result.IsProperty()) {
       // LookupResult is not GC safe as it holds raw object pointers.
@@ -10360,7 +10515,7 @@
   CONVERT_ARG_CHECKED(JSObject, obj, 0);
   CONVERT_ARG_CHECKED(String, name, 1);
 
-  LookupResult result;
+  LookupResult result(isolate);
   obj->Lookup(*name, &result);
   if (result.IsProperty()) {
     return DebugLookupResultValue(isolate->heap(), *obj, *name, &result, NULL);
@@ -10897,7 +11052,11 @@
       if (function_context->has_extension() &&
           !function_context->IsGlobalContext()) {
         Handle<JSObject> ext(JSObject::cast(function_context->extension()));
-        Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS);
+        bool threw = false;
+        Handle<FixedArray> keys =
+            GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS, &threw);
+        if (threw) return Handle<JSObject>();
+
         for (int i = 0; i < keys->length(); i++) {
           // Names of variables introduced by eval are strings.
           ASSERT(keys->get(i)->IsString());
@@ -10945,7 +11104,11 @@
   // be variables introduced by eval.
   if (context->has_extension()) {
     Handle<JSObject> ext(JSObject::cast(context->extension()));
-    Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS);
+    bool threw = false;
+    Handle<FixedArray> keys =
+        GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS, &threw);
+    if (threw) return Handle<JSObject>();
+
     for (int i = 0; i < keys->length(); i++) {
       // Names of variables introduced by eval are strings.
       ASSERT(keys->get(i)->IsString());
@@ -11010,9 +11173,10 @@
 }
 
 
-// Iterate over the actual scopes visible from a stack frame. All scopes are
+// Iterate over the actual scopes visible from a stack frame. The iteration
+// proceeds from the innermost visible nested scope outwards. All scopes are
 // backed by an actual context except the local scope, which is inserted
-// "artifically" in the context chain.
+// "artificially" in the context chain.
 class ScopeIterator {
  public:
   enum ScopeType {
@@ -11032,28 +11196,52 @@
       inlined_frame_index_(inlined_frame_index),
       function_(JSFunction::cast(frame->function())),
       context_(Context::cast(frame->context())),
-      local_done_(false),
-      at_local_(false) {
+      nested_scope_chain_(4) {
 
-    // Check whether the first scope is actually a local scope.
-    // If there is a stack slot for .result then this local scope has been
-    // created for evaluating top level code and it is not a real local scope.
+    // Catch the case when the debugger stops in an internal function.
+    Handle<SharedFunctionInfo> shared_info(function_->shared());
+    if (shared_info->script() == isolate->heap()->undefined_value()) {
+      while (context_->closure() == *function_) {
+        context_ = Handle<Context>(context_->previous(), isolate_);
+      }
+      return;
+    }
+
+    // Check whether we are in global code or function code. If there is a stack
+    // slot for .result then this function has been created for evaluating
+    // global code and it is not a real function.
     // Checking for the existence of .result seems fragile, but the scope info
     // saved with the code object does not otherwise have that information.
-    int index = function_->shared()->scope_info()->
+    int index = shared_info->scope_info()->
         StackSlotIndex(isolate_->heap()->result_symbol());
+
+    // Reparse the code and analyze the scopes.
+    ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
+    Handle<Script> script(Script::cast(shared_info->script()));
+    Scope* scope;
     if (index >= 0) {
-      local_done_ = true;
-    } else if (context_->IsGlobalContext() ||
-               context_->IsFunctionContext()) {
-      at_local_ = true;
-    } else if (context_->closure() != *function_) {
-      // The context_ is a block or with or catch block from the outer function.
-      ASSERT(context_->IsWithContext() ||
-             context_->IsCatchContext() ||
-             context_->IsBlockContext());
-      at_local_ = true;
+      // Global code
+      CompilationInfo info(script);
+      info.MarkAsGlobal();
+      bool result = ParserApi::Parse(&info);
+      ASSERT(result);
+      result = Scope::Analyze(&info);
+      ASSERT(result);
+      scope = info.function()->scope();
+    } else {
+      // Function code
+      CompilationInfo info(shared_info);
+      bool result = ParserApi::Parse(&info);
+      ASSERT(result);
+      result = Scope::Analyze(&info);
+      ASSERT(result);
+      scope = info.function()->scope();
     }
+
+    // Retrieve the scope chain for the current position.
+    int statement_position =
+        shared_info->code()->SourceStatementPosition(frame_->pc());
+    scope->GetNestedScopeChain(&nested_scope_chain_, statement_position);
   }
 
   // More scopes?
@@ -11061,40 +11249,48 @@
 
   // Move to the next scope.
   void Next() {
-    // If at a local scope mark the local scope as passed.
-    if (at_local_) {
-      at_local_ = false;
-      local_done_ = true;
-
-      // If the current context is not associated with the local scope the
-      // current context is the next real scope, so don't move to the next
-      // context in this case.
-      if (context_->closure() != *function_) {
-        return;
-      }
-    }
-
-    // The global scope is always the last in the chain.
-    if (context_->IsGlobalContext()) {
+    ScopeType scope_type = Type();
+    if (scope_type == ScopeTypeGlobal) {
+      // The global scope is always the last in the chain.
+      ASSERT(context_->IsGlobalContext());
       context_ = Handle<Context>();
       return;
     }
-
-    // Move to the next context.
-    context_ = Handle<Context>(context_->previous(), isolate_);
-
-    // If passing the local scope indicate that the current scope is now the
-    // local scope.
-    if (!local_done_ &&
-        (context_->IsGlobalContext() || context_->IsFunctionContext())) {
-      at_local_ = true;
+    if (nested_scope_chain_.is_empty()) {
+      context_ = Handle<Context>(context_->previous(), isolate_);
+    } else {
+      if (nested_scope_chain_.last()->HasContext()) {
+        context_ = Handle<Context>(context_->previous(), isolate_);
+      }
+      nested_scope_chain_.RemoveLast();
     }
   }
 
   // Return the type of the current scope.
   ScopeType Type() {
-    if (at_local_) {
-      return ScopeTypeLocal;
+    if (!nested_scope_chain_.is_empty()) {
+      Handle<SerializedScopeInfo> scope_info = nested_scope_chain_.last();
+      switch (scope_info->Type()) {
+        case FUNCTION_SCOPE:
+          ASSERT(context_->IsFunctionContext() ||
+                 !scope_info->HasContext());
+          return ScopeTypeLocal;
+        case GLOBAL_SCOPE:
+          ASSERT(context_->IsGlobalContext());
+          return ScopeTypeGlobal;
+        case WITH_SCOPE:
+          ASSERT(context_->IsWithContext());
+          return ScopeTypeWith;
+        case CATCH_SCOPE:
+          ASSERT(context_->IsCatchContext());
+          return ScopeTypeCatch;
+        case BLOCK_SCOPE:
+          ASSERT(!scope_info->HasContext() ||
+                 context_->IsBlockContext());
+          return ScopeTypeBlock;
+        case EVAL_SCOPE:
+          UNREACHABLE();
+      }
     }
     if (context_->IsGlobalContext()) {
       ASSERT(context_->global()->IsGlobalObject());
@@ -11120,6 +11316,7 @@
         return Handle<JSObject>(CurrentContext()->global());
       case ScopeIterator::ScopeTypeLocal:
         // Materialize the content of the local scope into a JSObject.
+        ASSERT(nested_scope_chain_.length() == 1);
         return MaterializeLocalScope(isolate_, frame_, inlined_frame_index_);
       case ScopeIterator::ScopeTypeWith:
         // Return the with object.
@@ -11136,13 +11333,30 @@
     return Handle<JSObject>();
   }
 
+  Handle<SerializedScopeInfo> CurrentScopeInfo() {
+    if (!nested_scope_chain_.is_empty()) {
+      return nested_scope_chain_.last();
+    } else if (context_->IsBlockContext()) {
+      return Handle<SerializedScopeInfo>(
+          SerializedScopeInfo::cast(context_->extension()));
+    } else if (context_->IsFunctionContext()) {
+      return Handle<SerializedScopeInfo>(
+          context_->closure()->shared()->scope_info());
+    }
+    return Handle<SerializedScopeInfo>::null();
+  }
+
   // Return the context for this scope. For the local context there might not
   // be an actual context.
   Handle<Context> CurrentContext() {
-    if (at_local_ && context_->closure() != *function_) {
+    if (Type() == ScopeTypeGlobal ||
+        nested_scope_chain_.is_empty()) {
+      return context_;
+    } else if (nested_scope_chain_.last()->HasContext()) {
+      return context_;
+    } else {
       return Handle<Context>();
     }
-    return context_;
   }
 
 #ifdef DEBUG
@@ -11205,8 +11419,7 @@
   int inlined_frame_index_;
   Handle<JSFunction> function_;
   Handle<Context> context_;
-  bool local_done_;
-  bool at_local_;
+  List<Handle<SerializedScopeInfo> > nested_scope_chain_;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(ScopeIterator);
 };
@@ -11521,7 +11734,7 @@
     if (!done) {
       // If the candidate is not compiled compile it to reveal any inner
       // functions which might contain the requested source position.
-      CompileLazyShared(target, KEEP_EXCEPTION);
+      SharedFunctionInfo::CompileLazy(target, KEEP_EXCEPTION);
     }
   }  // End while loop.
 
@@ -11669,46 +11882,65 @@
 
 // Creates a copy of the with context chain. The copy of the context chain is
 // is linked to the function context supplied.
-static Handle<Context> CopyWithContextChain(Isolate* isolate,
-                                            Handle<JSFunction> function,
-                                            Handle<Context> current,
-                                            Handle<Context> base) {
-  // At the end of the chain. Return the base context to link to.
-  if (current->IsFunctionContext() || current->IsGlobalContext()) {
-    return base;
+static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate,
+                                                   Handle<JSFunction> function,
+                                                   Handle<Context> base,
+                                                   JavaScriptFrame* frame,
+                                                   int inlined_frame_index) {
+  HandleScope scope(isolate);
+  List<Handle<SerializedScopeInfo> > scope_chain;
+  List<Handle<Context> > context_chain;
+
+  ScopeIterator it(isolate, frame, inlined_frame_index);
+  for (; it.Type() != ScopeIterator::ScopeTypeGlobal &&
+         it.Type() != ScopeIterator::ScopeTypeLocal ; it.Next()) {
+    ASSERT(!it.Done());
+    scope_chain.Add(it.CurrentScopeInfo());
+    context_chain.Add(it.CurrentContext());
   }
 
-  // Recursively copy the with and catch contexts.
-  HandleScope scope(isolate);
-  Handle<Context> previous(current->previous());
-  Handle<Context> new_previous =
-      CopyWithContextChain(isolate, function, previous, base);
-  Handle<Context> new_current;
-  if (current->IsCatchContext()) {
-    Handle<String> name(String::cast(current->extension()));
-    Handle<Object> thrown_object(current->get(Context::THROWN_OBJECT_INDEX));
-    new_current =
-        isolate->factory()->NewCatchContext(function,
-                                            new_previous,
-                                            name,
-                                            thrown_object);
-  } else if (current->IsBlockContext()) {
-    Handle<SerializedScopeInfo> scope_info(
-        SerializedScopeInfo::cast(current->extension()));
-    new_current =
-        isolate->factory()->NewBlockContext(function, new_previous, scope_info);
-    // Copy context slots.
-    int num_context_slots = scope_info->NumberOfContextSlots();
-    for (int i = Context::MIN_CONTEXT_SLOTS; i < num_context_slots; ++i) {
-      new_current->set(i, current->get(i));
+  // At the end of the chain. Return the base context to link to.
+  Handle<Context> context = base;
+
+  // Iteratively copy and or materialize the nested contexts.
+  while (!scope_chain.is_empty()) {
+    Handle<SerializedScopeInfo> scope_info = scope_chain.RemoveLast();
+    Handle<Context> current = context_chain.RemoveLast();
+    ASSERT(!(scope_info->HasContext() & current.is_null()));
+
+    if (scope_info->Type() == CATCH_SCOPE) {
+      Handle<String> name(String::cast(current->extension()));
+      Handle<Object> thrown_object(current->get(Context::THROWN_OBJECT_INDEX));
+      context =
+          isolate->factory()->NewCatchContext(function,
+                                              context,
+                                              name,
+                                              thrown_object);
+    } else if (scope_info->Type() == BLOCK_SCOPE) {
+      // Materialize the contents of the block scope into a JSObject.
+      Handle<JSObject> block_scope_object =
+          MaterializeBlockScope(isolate, current);
+      if (block_scope_object.is_null()) {
+        return Handle<Context>::null();
+      }
+      // Allocate a new function context for the debug evaluation and set the
+      // extension object.
+      Handle<Context> new_context =
+          isolate->factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS,
+                                                 function);
+      new_context->set_extension(*block_scope_object);
+      new_context->set_previous(*context);
+      context = new_context;
+    } else {
+      ASSERT(scope_info->Type() == WITH_SCOPE);
+      ASSERT(current->IsWithContext());
+      Handle<JSObject> extension(JSObject::cast(current->extension()));
+      context =
+          isolate->factory()->NewWithContext(function, context, extension);
     }
-  } else {
-    ASSERT(current->IsWithContext());
-    Handle<JSObject> extension(JSObject::cast(current->extension()));
-    new_current =
-        isolate->factory()->NewWithContext(function, new_previous, extension);
   }
-  return scope.CloseAndEscape(new_current);
+
+  return scope.CloseAndEscape(context);
 }
 
 
@@ -11846,7 +12078,11 @@
   if (scope_info->HasHeapAllocatedLocals()) {
     function_context = Handle<Context>(frame_context->declaration_context());
   }
-  context = CopyWithContextChain(isolate, go_between, frame_context, context);
+  context = CopyNestedScopeContextChain(isolate,
+                                        go_between,
+                                        context,
+                                        frame,
+                                        inlined_frame_index);
 
   if (additional_context->IsJSObject()) {
     Handle<JSObject> extension = Handle<JSObject>::cast(additional_context);
@@ -12245,7 +12481,7 @@
   // Get the function and make sure it is compiled.
   CONVERT_ARG_CHECKED(JSFunction, func, 0);
   Handle<SharedFunctionInfo> shared(func->shared());
-  if (!EnsureCompiled(shared, KEEP_EXCEPTION)) {
+  if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
     return Failure::Exception();
   }
   func->code()->PrintLn();
@@ -12261,7 +12497,7 @@
   // Get the function and make sure it is compiled.
   CONVERT_ARG_CHECKED(JSFunction, func, 0);
   Handle<SharedFunctionInfo> shared(func->shared());
-  if (!EnsureCompiled(shared, KEEP_EXCEPTION)) {
+  if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
     return Failure::Exception();
   }
   shared->construct_stub()->PrintLn();
@@ -12867,34 +13103,32 @@
                                   Object* caller,
                                   bool* seen_caller) {
   // Only display JS frames.
-  if (!raw_frame->is_java_script())
+  if (!raw_frame->is_java_script()) {
     return false;
+  }
   JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
   Object* raw_fun = frame->function();
   // Not sure when this can happen but skip it just in case.
-  if (!raw_fun->IsJSFunction())
+  if (!raw_fun->IsJSFunction()) {
     return false;
+  }
   if ((raw_fun == caller) && !(*seen_caller)) {
     *seen_caller = true;
     return false;
   }
   // Skip all frames until we've seen the caller.
   if (!(*seen_caller)) return false;
-  // Also, skip the most obvious builtin calls. We recognize builtins
-  // as (1) functions called with the builtins object as the receiver and
-  // as (2) functions from native scripts called with undefined as the
-  // receiver (direct calls to helper functions in the builtins
-  // code). Some builtin calls (such as Number.ADD which is invoked
-  // using 'call') are very difficult to recognize so we're leaving
-  // them in for now.
-  if (frame->receiver()->IsJSBuiltinsObject()) {
-    return false;
-  }
-  JSFunction* fun = JSFunction::cast(raw_fun);
-  Object* raw_script = fun->shared()->script();
-  if (frame->receiver()->IsUndefined() && raw_script->IsScript()) {
-    int script_type = Script::cast(raw_script)->type()->value();
-    return script_type != Script::TYPE_NATIVE;
+  // Also, skip non-visible built-in functions and any call with the builtins
+  // object as receiver, so as to not reveal either the builtins object or
+  // an internal function.
+  // The --builtins-in-stack-traces command line flag allows including
+  // internal call sites in the stack trace for debugging purposes.
+  if (!FLAG_builtins_in_stack_traces) {
+    JSFunction* fun = JSFunction::cast(raw_fun);
+    if (frame->receiver()->IsJSBuiltinsObject() ||
+        (fun->IsBuiltin() && !fun->shared()->native())) {
+      return false;
+    }
   }
   return true;
 }
@@ -13041,7 +13275,9 @@
   }
 
 #ifdef DEBUG
-  cache_handle->JSFunctionResultCacheVerify();
+  if (FLAG_verify_heap) {
+    cache_handle->JSFunctionResultCacheVerify();
+  }
 #endif
 
   // Function invocation may have cleared the cache.  Reread all the data.
@@ -13070,7 +13306,9 @@
   cache_handle->set_finger_index(index);
 
 #ifdef DEBUG
-  cache_handle->JSFunctionResultCacheVerify();
+  if (FLAG_verify_heap) {
+    cache_handle->JSFunctionResultCacheVerify();
+  }
 #endif
 
   return *value;
diff --git a/src/runtime.h b/src/runtime.h
index ed9c2b8..67fc628 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -211,14 +211,14 @@
   /* Reflection */ \
   F(FunctionSetInstanceClassName, 2, 1) \
   F(FunctionSetLength, 2, 1) \
-  F(BoundFunctionSetLength, 2, 1)    \
   F(FunctionSetPrototype, 2, 1) \
   F(FunctionSetReadOnlyPrototype, 1, 1) \
   F(FunctionGetName, 1, 1) \
   F(FunctionSetName, 2, 1) \
   F(FunctionNameShouldPrintAsAnonymous, 1, 1) \
   F(FunctionMarkNameShouldPrintAsAnonymous, 1, 1) \
-  F(FunctionSetBound, 1, 1) \
+  F(FunctionBindArguments, 4, 1) \
+  F(BoundFunctionGetBindings, 1, 1) \
   F(FunctionRemovePrototype, 1, 1) \
   F(FunctionGetSourceCode, 1, 1) \
   F(FunctionGetScript, 1, 1) \
@@ -278,7 +278,7 @@
   \
   /* Literals */ \
   F(MaterializeRegExpLiteral, 4, 1)\
-  F(CreateArrayLiteralBoilerplate, 3, 1) \
+  F(CreateArrayLiteralBoilerplate, 4, 1) \
   F(CloneLiteralBoilerplate, 1, 1) \
   F(CloneShallowLiteralBoilerplate, 1, 1) \
   F(CreateObjectLiteral, 4, 1) \
@@ -296,6 +296,17 @@
   F(GetConstructTrap, 1, 1) \
   F(Fix, 1, 1) \
   \
+  /* Harmony sets */ \
+  F(SetInitialize, 1, 1) \
+  F(SetAdd, 2, 1) \
+  F(SetHas, 2, 1) \
+  F(SetDelete, 2, 1) \
+  \
+  /* Harmony maps */ \
+  F(MapInitialize, 1, 1) \
+  F(MapGet, 2, 1) \
+  F(MapSet, 3, 1) \
+  \
   /* Harmony weakmaps */ \
   F(WeakMapInitialize, 1, 1) \
   F(WeakMapGet, 2, 1) \
@@ -304,7 +315,7 @@
   /* Statements */ \
   F(NewClosure, 3, 1) \
   F(NewObject, 1, 1) \
-  F(NewObjectFromBound, 2, 1) \
+  F(NewObjectFromBound, 1, 1) \
   F(FinalizeInstanceSize, 1, 1) \
   F(Throw, 1, 1) \
   F(ReThrow, 1, 1) \
@@ -330,11 +341,10 @@
   F(InitializeConstContextSlot, 3, 1) \
   F(OptimizeObjectForAddingMultipleProperties, 2, 1) \
   \
-  /* Arrays */ \
-  F(NonSmiElementStored, 1, 1) \
   /* Debugging */ \
   F(DebugPrint, 1, 1) \
   F(DebugTrace, 0, 1) \
+  F(TraceElementsKindTransition, 5, 1) \
   F(TraceEnter, 0, 1) \
   F(TraceExit, 1, 1) \
   F(Abort, 2, 1) \
@@ -370,6 +380,8 @@
   F(HasExternalUnsignedIntElements, 1, 1) \
   F(HasExternalFloatElements, 1, 1) \
   F(HasExternalDoubleElements, 1, 1) \
+  F(TransitionElementsSmiToDouble, 1, 1) \
+  F(TransitionElementsDoubleToObject, 1, 1) \
   F(HaveSameMap, 2, 1) \
   /* profiler */ \
   F(ProfilerResume, 0, 1) \
@@ -628,16 +640,14 @@
 
   static bool IsUpperCaseChar(RuntimeState* runtime_state, uint16_t ch);
 
-  // TODO(1240886): The following three methods are *not* handle safe,
-  // but accept handle arguments. This seems fragile.
+  // TODO(1240886): Some of the following methods are *not* handle safe, but
+  // accept handle arguments. This seems fragile.
 
   // Support getting the characters in a string using [] notation as
   // in Firefox/SpiderMonkey, Safari and Opera.
   MUST_USE_RESULT static MaybeObject* GetElementOrCharAt(Isolate* isolate,
                                                          Handle<Object> object,
                                                          uint32_t index);
-  MUST_USE_RESULT static MaybeObject* GetElement(Handle<Object> object,
-                                                 uint32_t index);
 
   MUST_USE_RESULT static MaybeObject* SetObjectProperty(
       Isolate* isolate,
@@ -677,11 +687,9 @@
 //---------------------------------------------------------------------------
 // Constants used by interface to runtime functions.
 
-enum kDeclareGlobalsFlags {
-  kDeclareGlobalsEvalFlag = 1 << 0,
-  kDeclareGlobalsStrictModeFlag = 1 << 1,
-  kDeclareGlobalsNativeFlag = 1 << 2
-};
+class DeclareGlobalsEvalFlag:       public BitField<bool,           0, 1> {};
+class DeclareGlobalsStrictModeFlag: public BitField<StrictModeFlag, 1, 1> {};
+class DeclareGlobalsNativeFlag:     public BitField<bool,           2, 1> {};
 
 } }  // namespace v8::internal
 
diff --git a/src/runtime.js b/src/runtime.js
index a12f6c7..108b928 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -375,6 +375,12 @@
     return 1;
   }
 
+  // Check if function is bound, if so, get [[BoundFunction]] from it
+  // and use that instead of F.
+  var bindings = %BoundFunctionGetBindings(F);
+  if (bindings) {
+    F = bindings[kBoundFunctionIndex];  // Always a non-bound function.
+  }
   // Get the prototype of F; if it is not an object, throw an error.
   var O = F.prototype;
   if (!IS_SPEC_OBJECT(O)) {
@@ -386,13 +392,6 @@
 }
 
 
-// Get an array of property keys for the given object. Used in
-// for-in statements.
-function GET_KEYS() {
-  return %GetPropertyNames(this);
-}
-
-
 // Filter a given key against an object by checking if the object
 // has a property with the given key; return the key as a string if
 // it has. Otherwise returns 0 (smi). Used in for-in statements.
@@ -463,7 +462,7 @@
   }
 
   // Make sure the arguments list has the right type.
-  if (args != null && !IS_ARRAY(args) && !IS_ARGUMENTS(args)) {
+  if (args != null && !IS_SPEC_OBJECT(args)) {
     throw %MakeTypeError('apply_wrong_args', []);
   }
 
diff --git a/src/scanner.h b/src/scanner.h
index 6651c38..a2e64a9 100644
--- a/src/scanner.h
+++ b/src/scanner.h
@@ -41,6 +41,17 @@
 namespace v8 {
 namespace internal {
 
+
+// General collection of bit-flags that can be passed to scanners and
+// parsers to signify their (initial) mode of operation.
+enum ParsingFlags {
+  kNoParsingFlags = 0,
+  kAllowLazy = 1,
+  kAllowNativesSyntax = 2,
+  kHarmonyScoping = 4
+};
+
+
 // Returns the value (0 .. 15) of a hexadecimal character c.
 // If c is not a legal hexadecimal character, returns a value < 0.
 inline int HexValue(uc32 c) {
diff --git a/src/scopeinfo.cc b/src/scopeinfo.cc
index 1aa5160..8ea5f1e 100644
--- a/src/scopeinfo.cc
+++ b/src/scopeinfo.cc
@@ -51,6 +51,7 @@
     : function_name_(FACTORY->empty_symbol()),
       calls_eval_(scope->calls_eval()),
       is_strict_mode_(scope->is_strict_mode()),
+      type_(scope->type()),
       parameters_(scope->num_parameters()),
       stack_slots_(scope->num_stack_slots()),
       context_slots_(scope->num_heap_slots()),
@@ -138,7 +139,7 @@
       ASSERT(proxy->var()->index() - Context::MIN_CONTEXT_SLOTS ==
              context_modes_.length());
       context_slots_.Add(FACTORY->empty_symbol());
-      context_modes_.Add(INTERNAL);
+      context_modes_.Add(proxy->var()->mode());
     }
   }
 }
@@ -150,6 +151,10 @@
 //
 // - calls eval boolean flag
 //
+// - is strict mode scope
+//
+// - scope type
+//
 // - number of variables in the context object (smi) (= function context
 //   slot index + 1)
 // - list of pairs (name, Var mode) of context-allocated variables (starting
@@ -181,8 +186,9 @@
 //   present)
 
 
-static inline Object** ReadInt(Object** p, int* x) {
-  *x = (reinterpret_cast<Smi*>(*p++))->value();
+template <class T>
+static inline Object** ReadInt(Object** p, T* x) {
+  *x = static_cast<T>((reinterpret_cast<Smi*>(*p++))->value());
   return p;
 }
 
@@ -193,20 +199,21 @@
 }
 
 
-static inline Object** ReadSymbol(Object** p, Handle<String>* s) {
-  *s = Handle<String>(reinterpret_cast<String*>(*p++));
+template <class T>
+static inline Object** ReadObject(Object** p, Handle<T>* s) {
+  *s = Handle<T>::cast(Handle<Object>(*p++));
   return p;
 }
 
 
-template <class Allocator>
-static Object** ReadList(Object** p, List<Handle<String>, Allocator >* list) {
+template <class Allocator, class T>
+static Object** ReadList(Object** p, List<Handle<T>, Allocator >* list) {
   ASSERT(list->is_empty());
   int n;
   p = ReadInt(p, &n);
   while (n-- > 0) {
-    Handle<String> s;
-    p = ReadSymbol(p, &s);
+    Handle<T> s;
+    p = ReadObject(p, &s);
     list->Add(s);
   }
   return p;
@@ -223,7 +230,7 @@
   while (n-- > 0) {
     Handle<String> s;
     int m;
-    p = ReadSymbol(p, &s);
+    p = ReadObject(p, &s);
     p = ReadInt(p, &m);
     list->Add(s);
     modes->Add(static_cast<VariableMode>(m));
@@ -242,9 +249,10 @@
   if (data->length() > 0) {
     Object** p0 = data->data_start();
     Object** p = p0;
-    p = ReadSymbol(p, &function_name_);
+    p = ReadObject(p, &function_name_);
     p = ReadBool(p, &calls_eval_);
     p = ReadBool(p, &is_strict_mode_);
+    p = ReadInt(p, &type_);
     p = ReadList<Allocator>(p, &context_slots_, &context_modes_);
     p = ReadList<Allocator>(p, &parameters_);
     p = ReadList<Allocator>(p, &stack_slots_);
@@ -265,18 +273,19 @@
 }
 
 
-static inline Object** WriteSymbol(Object** p, Handle<String> s) {
+template <class T>
+static inline Object** WriteObject(Object** p, Handle<T> s) {
   *p++ = *s;
   return p;
 }
 
 
-template <class Allocator>
-static Object** WriteList(Object** p, List<Handle<String>, Allocator >* list) {
+template <class Allocator, class T>
+static Object** WriteList(Object** p, List<Handle<T>, Allocator >* list) {
   const int n = list->length();
   p = WriteInt(p, n);
   for (int i = 0; i < n; i++) {
-    p = WriteSymbol(p, list->at(i));
+    p = WriteObject(p, list->at(i));
   }
   return p;
 }
@@ -289,7 +298,7 @@
   const int n = list->length();
   p = WriteInt(p, n);
   for (int i = 0; i < n; i++) {
-    p = WriteSymbol(p, list->at(i));
+    p = WriteObject(p, list->at(i));
     p = WriteInt(p, modes->at(i));
   }
   return p;
@@ -298,8 +307,9 @@
 
 template<class Allocator>
 Handle<SerializedScopeInfo> ScopeInfo<Allocator>::Serialize() {
-  // function name, calls eval, is_strict_mode, length for 3 tables:
-  const int extra_slots = 1 + 1 + 1 + 3;
+  // function name, calls eval, is_strict_mode, scope type,
+  // length for 3 tables:
+  const int extra_slots = 1 + 1 + 1 + 1 + 3;
   int length = extra_slots +
                context_slots_.length() * 2 +
                parameters_.length() +
@@ -311,9 +321,10 @@
 
   Object** p0 = data->data_start();
   Object** p = p0;
-  p = WriteSymbol(p, function_name_);
+  p = WriteObject(p, function_name_);
   p = WriteBool(p, calls_eval_);
   p = WriteBool(p, is_strict_mode_);
+  p = WriteInt(p, type_);
   p = WriteList(p, &context_slots_, &context_modes_);
   p = WriteList(p, &parameters_);
   p = WriteList(p, &stack_slots_);
@@ -361,8 +372,8 @@
 
 Object** SerializedScopeInfo::ContextEntriesAddr() {
   ASSERT(length() > 0);
-  // +3 for function name, calls eval, strict mode.
-  return data_start() + 3;
+  // +4 for function name, calls eval, strict mode, scope type.
+  return data_start() + 4;
 }
 
 
@@ -406,6 +417,16 @@
 }
 
 
+ScopeType SerializedScopeInfo::Type() {
+  ASSERT(length() > 0);
+  // +3 for function name, calls eval, strict mode.
+  Object** p = data_start() + 3;
+  ScopeType type;
+  p = ReadInt(p, &type);
+  return type;
+}
+
+
 int SerializedScopeInfo::NumberOfStackSlots() {
   if (length() > 0) {
     Object** p = StackSlotEntriesAddr();
@@ -439,6 +460,12 @@
 }
 
 
+bool SerializedScopeInfo::HasContext() {
+  return HasHeapAllocatedLocals() ||
+      Type() == WITH_SCOPE;
+}
+
+
 int SerializedScopeInfo::StackSlotIndex(String* name) {
   ASSERT(name->IsSymbol());
   if (length() > 0) {
@@ -513,16 +540,24 @@
 }
 
 
-int SerializedScopeInfo::FunctionContextSlotIndex(String* name) {
+int SerializedScopeInfo::FunctionContextSlotIndex(String* name,
+                                                  VariableMode* mode) {
   ASSERT(name->IsSymbol());
   if (length() > 0) {
     Object** p = data_start();
     if (*p == name) {
       p = ContextEntriesAddr();
       int number_of_context_slots;
-      ReadInt(p, &number_of_context_slots);
+      p = ReadInt(p, &number_of_context_slots);
       ASSERT(number_of_context_slots != 0);
       // The function context slot is the last entry.
+      if (mode != NULL) {
+        // Seek to context slot entry.
+        p += (number_of_context_slots - 1) * 2;
+        // Seek to mode.
+        ++p;
+        ReadInt(p, mode);
+      }
       return number_of_context_slots + Context::MIN_CONTEXT_SLOTS - 1;
     }
   }
diff --git a/src/scopeinfo.h b/src/scopeinfo.h
index 03f321b..eeb3047 100644
--- a/src/scopeinfo.h
+++ b/src/scopeinfo.h
@@ -35,17 +35,10 @@
 namespace v8 {
 namespace internal {
 
-// Scope information represents information about a functions's
-// scopes (currently only one, because we don't do any inlining)
-// and the allocation of the scope's variables. Scope information
-// is stored in a compressed form in FixedArray objects and is used
+// ScopeInfo represents information about different scopes of a source
+// program  and the allocation of the scope's variables. Scope information
+// is stored in a compressed form in SerializedScopeInfo objects and is used
 // at runtime (stack dumps, deoptimization, etc.).
-//
-// Historical note: In other VMs built by this team, ScopeInfo was
-// usually called DebugInfo since the information was used (among
-// other things) for on-demand debugging (Self, Smalltalk). However,
-// DebugInfo seems misleading, since this information is primarily used
-// in debugging-unrelated contexts.
 
 // Forward defined as
 // template <class Allocator = FreeStoreAllocationPolicy> class ScopeInfo;
@@ -83,6 +76,7 @@
   Handle<String> LocalName(int i) const;
   int NumberOfLocals() const;
 
+  ScopeType type() const { return type_; }
   // --------------------------------------------------------------------------
   // Debugging support
 
@@ -94,6 +88,7 @@
   Handle<String> function_name_;
   bool calls_eval_;
   bool is_strict_mode_;
+  ScopeType type_;
   List<Handle<String>, Allocator > parameters_;
   List<Handle<String>, Allocator > stack_slots_;
   List<Handle<String>, Allocator > context_slots_;
diff --git a/src/scopes.cc b/src/scopes.cc
index e67b7f8..3167c4d 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -114,7 +114,7 @@
 
 
 // Dummy constructor
-Scope::Scope(Type type)
+Scope::Scope(ScopeType type)
     : isolate_(Isolate::Current()),
       inner_scopes_(0),
       variables_(false),
@@ -127,7 +127,7 @@
 }
 
 
-Scope::Scope(Scope* outer_scope, Type type)
+Scope::Scope(Scope* outer_scope, ScopeType type)
     : isolate_(Isolate::Current()),
       inner_scopes_(4),
       variables_(),
@@ -146,7 +146,7 @@
 
 
 Scope::Scope(Scope* inner_scope,
-             Type type,
+             ScopeType type,
              Handle<SerializedScopeInfo> scope_info)
     : isolate_(Isolate::Current()),
       inner_scopes_(4),
@@ -156,9 +156,8 @@
       unresolved_(16),
       decls_(4),
       already_resolved_(true) {
-  ASSERT(!scope_info.is_null());
   SetDefaults(type, NULL, scope_info);
-  if (scope_info->HasHeapAllocatedLocals()) {
+  if (!scope_info.is_null() && scope_info->HasHeapAllocatedLocals()) {
     num_heap_slots_ = scope_info_->NumberOfContextSlots();
   }
   AddInnerScope(inner_scope);
@@ -186,7 +185,7 @@
 }
 
 
-void Scope::SetDefaults(Type type,
+void Scope::SetDefaults(ScopeType type,
                         Scope* outer_scope,
                         Handle<SerializedScopeInfo> scope_info) {
   outer_scope_ = outer_scope;
@@ -201,16 +200,17 @@
   scope_contains_with_ = false;
   scope_calls_eval_ = false;
   // Inherit the strict mode from the parent scope.
-  strict_mode_ = (outer_scope != NULL) && outer_scope->strict_mode_;
-  outer_scope_calls_eval_ = false;
+  strict_mode_flag_ = (outer_scope != NULL)
+      ? outer_scope->strict_mode_flag_ : kNonStrictMode;
   outer_scope_calls_non_strict_eval_ = false;
   inner_scope_calls_eval_ = false;
-  outer_scope_is_eval_scope_ = false;
   force_eager_compilation_ = false;
   num_var_or_const_ = 0;
   num_stack_slots_ = 0;
   num_heap_slots_ = 0;
   scope_info_ = scope_info;
+  start_position_ = RelocInfo::kNoPosition;
+  end_position_ = RelocInfo::kNoPosition;
 }
 
 
@@ -224,30 +224,31 @@
   bool contains_with = false;
   while (!context->IsGlobalContext()) {
     if (context->IsWithContext()) {
+      Scope* with_scope = new Scope(current_scope, WITH_SCOPE,
+                                    Handle<SerializedScopeInfo>::null());
+      current_scope = with_scope;
       // All the inner scopes are inside a with.
       contains_with = true;
       for (Scope* s = innermost_scope; s != NULL; s = s->outer_scope()) {
         s->scope_inside_with_ = true;
       }
+    } else if (context->IsFunctionContext()) {
+      SerializedScopeInfo* scope_info =
+          context->closure()->shared()->scope_info();
+      current_scope = new Scope(current_scope, FUNCTION_SCOPE,
+                                Handle<SerializedScopeInfo>(scope_info));
+    } else if (context->IsBlockContext()) {
+      SerializedScopeInfo* scope_info =
+          SerializedScopeInfo::cast(context->extension());
+      current_scope = new Scope(current_scope, BLOCK_SCOPE,
+                                Handle<SerializedScopeInfo>(scope_info));
     } else {
-      if (context->IsFunctionContext()) {
-        SerializedScopeInfo* scope_info =
-            context->closure()->shared()->scope_info();
-        current_scope = new Scope(current_scope, FUNCTION_SCOPE,
-            Handle<SerializedScopeInfo>(scope_info));
-      } else if (context->IsBlockContext()) {
-        SerializedScopeInfo* scope_info =
-            SerializedScopeInfo::cast(context->extension());
-        current_scope = new Scope(current_scope, BLOCK_SCOPE,
-            Handle<SerializedScopeInfo>(scope_info));
-      } else {
-        ASSERT(context->IsCatchContext());
-        String* name = String::cast(context->extension());
-        current_scope = new Scope(current_scope, Handle<String>(name));
-      }
-      if (contains_with) current_scope->RecordWithStatement();
-      if (innermost_scope == NULL) innermost_scope = current_scope;
+      ASSERT(context->IsCatchContext());
+      String* name = String::cast(context->extension());
+      current_scope = new Scope(current_scope, Handle<String>(name));
     }
+    if (contains_with) current_scope->RecordWithStatement();
+    if (innermost_scope == NULL) innermost_scope = current_scope;
 
     // Forget about a with when we move to a context for a different function.
     if (context->previous()->closure() != context->closure()) {
@@ -281,15 +282,15 @@
 }
 
 
-void Scope::Initialize(bool inside_with) {
+void Scope::Initialize() {
   ASSERT(!already_resolved());
 
   // Add this scope as a new inner scope of the outer scope.
   if (outer_scope_ != NULL) {
     outer_scope_->inner_scopes_.Add(this);
-    scope_inside_with_ = outer_scope_->scope_inside_with_ || inside_with;
+    scope_inside_with_ = outer_scope_->scope_inside_with_ || is_with_scope();
   } else {
-    scope_inside_with_ = inside_with;
+    scope_inside_with_ = is_with_scope();
   }
 
   // Declare convenience variables.
@@ -300,13 +301,7 @@
   // instead load them directly from the stack. Currently, the only
   // such parameter is 'this' which is passed on the stack when
   // invoking scripts
-  if (is_catch_scope() || is_block_scope()) {
-    ASSERT(outer_scope() != NULL);
-    receiver_ = outer_scope()->receiver();
-  } else {
-    ASSERT(is_function_scope() ||
-           is_global_scope() ||
-           is_eval_scope());
+  if (is_declaration_scope()) {
     Variable* var =
         variables_.Declare(this,
                            isolate_->factory()->this_symbol(),
@@ -315,6 +310,9 @@
                            Variable::THIS);
     var->AllocateTo(Variable::PARAMETER, -1);
     receiver_ = var;
+  } else {
+    ASSERT(outer_scope() != NULL);
+    receiver_ = outer_scope()->receiver();
   }
 
   if (is_function_scope()) {
@@ -381,7 +379,7 @@
     index = scope_info_->ParameterIndex(*name);
     if (index < 0) {
       // Check the function name.
-      index = scope_info_->FunctionContextSlotIndex(*name);
+      index = scope_info_->FunctionContextSlotIndex(*name, NULL);
       if (index < 0) return NULL;
     }
   }
@@ -404,10 +402,10 @@
 }
 
 
-Variable* Scope::DeclareFunctionVar(Handle<String> name) {
+Variable* Scope::DeclareFunctionVar(Handle<String> name, VariableMode mode) {
   ASSERT(is_function_scope() && function_ == NULL);
   Variable* function_var =
-      new Variable(this, name, CONST, true, Variable::NORMAL);
+      new Variable(this, name, mode, true, Variable::NORMAL);
   function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var);
   return function_var;
 }
@@ -427,7 +425,10 @@
   // This function handles VAR and CONST modes.  DYNAMIC variables are
   // introduces during variable allocation, INTERNAL variables are allocated
   // explicitly, and TEMPORARY variables are allocated via NewTemporary().
-  ASSERT(mode == VAR || mode == CONST || mode == LET);
+  ASSERT(mode == VAR ||
+         mode == CONST ||
+         mode == CONST_HARMONY ||
+         mode == LET);
   ++num_var_or_const_;
   return variables_.Declare(this, name, mode, true, Variable::NORMAL);
 }
@@ -441,15 +442,13 @@
 }
 
 
-VariableProxy* Scope::NewUnresolved(Handle<String> name,
-                                    bool inside_with,
-                                    int position) {
+VariableProxy* Scope::NewUnresolved(Handle<String> name, int position) {
   // Note that we must not share the unresolved variables with
   // the same name because they may be removed selectively via
   // RemoveUnresolved().
   ASSERT(!already_resolved());
   VariableProxy* proxy = new(isolate_->zone()) VariableProxy(
-      isolate_, name, false, inside_with, position);
+      isolate_, name, false, position);
   unresolved_.Add(proxy);
   return proxy;
 }
@@ -505,17 +504,19 @@
     Declaration* decl = decls_[i];
     if (decl->mode() != VAR) continue;
     Handle<String> name = decl->proxy()->name();
-    bool cond = true;
-    for (Scope* scope = decl->scope(); cond ; scope = scope->outer_scope_) {
+
+    // Iterate through all scopes until and including the declaration scope.
+    Scope* previous = NULL;
+    Scope* current = decl->scope();
+    do {
       // There is a conflict if there exists a non-VAR binding.
-      Variable* other_var = scope->variables_.Lookup(name);
+      Variable* other_var = current->variables_.Lookup(name);
       if (other_var != NULL && other_var->mode() != VAR) {
         return decl;
       }
-
-      // Include declaration scope in the iteration but stop after.
-      if (!scope->is_block_scope() && !scope->is_catch_scope()) cond = false;
-    }
+      previous = current;
+      current = current->outer_scope_;
+    } while (!previous->is_declaration_scope());
   }
   return NULL;
 }
@@ -563,16 +564,11 @@
   // this information in the ScopeInfo and then use it here (by traversing
   // the call chain stack, at compile time).
 
-  bool eval_scope = is_eval_scope();
-  bool outer_scope_calls_eval = false;
   bool outer_scope_calls_non_strict_eval = false;
   if (!is_global_scope()) {
-    context->ComputeEvalScopeInfo(&outer_scope_calls_eval,
-                                  &outer_scope_calls_non_strict_eval);
+    context->ComputeEvalScopeInfo(&outer_scope_calls_non_strict_eval);
   }
-  PropagateScopeInfo(outer_scope_calls_eval,
-                     outer_scope_calls_non_strict_eval,
-                     eval_scope);
+  PropagateScopeInfo(outer_scope_calls_non_strict_eval);
 
   // 2) Resolve variables.
   Scope* global_scope = NULL;
@@ -625,8 +621,7 @@
 
 Scope* Scope::DeclarationScope() {
   Scope* scope = this;
-  while (scope->is_catch_scope() ||
-         scope->is_block_scope()) {
+  while (!scope->is_declaration_scope()) {
     scope = scope->outer_scope();
   }
   return scope;
@@ -641,14 +636,33 @@
 }
 
 
+void Scope::GetNestedScopeChain(
+    List<Handle<SerializedScopeInfo> >* chain,
+    int position) {
+  chain->Add(Handle<SerializedScopeInfo>(GetSerializedScopeInfo()));
+
+  for (int i = 0; i < inner_scopes_.length(); i++) {
+    Scope* scope = inner_scopes_[i];
+    int beg_pos = scope->start_position();
+    int end_pos = scope->end_position();
+    ASSERT(beg_pos >= 0 && end_pos >= 0);
+    if (beg_pos <= position && position <= end_pos) {
+      scope->GetNestedScopeChain(chain, position);
+      return;
+    }
+  }
+}
+
+
 #ifdef DEBUG
-static const char* Header(Scope::Type type) {
+static const char* Header(ScopeType type) {
   switch (type) {
-    case Scope::EVAL_SCOPE: return "eval";
-    case Scope::FUNCTION_SCOPE: return "function";
-    case Scope::GLOBAL_SCOPE: return "global";
-    case Scope::CATCH_SCOPE: return "catch";
-    case Scope::BLOCK_SCOPE: return "block";
+    case EVAL_SCOPE: return "eval";
+    case FUNCTION_SCOPE: return "function";
+    case GLOBAL_SCOPE: return "global";
+    case CATCH_SCOPE: return "catch";
+    case BLOCK_SCOPE: return "block";
+    case WITH_SCOPE: return "with";
   }
   UNREACHABLE();
   return NULL;
@@ -748,14 +762,10 @@
   if (scope_inside_with_) Indent(n1, "// scope inside 'with'\n");
   if (scope_contains_with_) Indent(n1, "// scope contains 'with'\n");
   if (scope_calls_eval_) Indent(n1, "// scope calls 'eval'\n");
-  if (outer_scope_calls_eval_) Indent(n1, "// outer scope calls 'eval'\n");
   if (outer_scope_calls_non_strict_eval_) {
     Indent(n1, "// outer scope calls 'eval' in non-strict context\n");
   }
   if (inner_scope_calls_eval_) Indent(n1, "// inner scope calls 'eval'\n");
-  if (outer_scope_is_eval_scope_) {
-    Indent(n1, "// outer scope is 'eval' scope\n");
-  }
   if (num_stack_slots_ > 0) { Indent(n1, "// ");
   PrintF("%d stack slots\n", num_stack_slots_); }
   if (num_heap_slots_ > 0) { Indent(n1, "// ");
@@ -809,74 +819,68 @@
 }
 
 
-// Lookup a variable starting with this scope. The result is either
-// the statically resolved variable belonging to an outer scope, or
-// NULL. It may be NULL because a) we couldn't find a variable, or b)
-// because the variable is just a guess (and may be shadowed by
-// another variable that is introduced dynamically via an 'eval' call
-// or a 'with' statement).
 Variable* Scope::LookupRecursive(Handle<String> name,
-                                 bool from_inner_scope,
-                                 Variable** invalidated_local) {
-  // If we find a variable, but the current scope calls 'eval', the found
-  // variable may not be the correct one (the 'eval' may introduce a
-  // property with the same name). In that case, remember that the variable
-  // found is just a guess.
-  bool guess = scope_calls_eval_;
-
+                                 Handle<Context> context,
+                                 BindingKind* binding_kind) {
+  ASSERT(binding_kind != NULL);
   // Try to find the variable in this scope.
   Variable* var = LocalLookup(name);
 
+  // We found a variable and we are done. (Even if there is an 'eval' in
+  // this scope which introduces the same variable again, the resulting
+  // variable remains the same.)
   if (var != NULL) {
-    // We found a variable. If this is not an inner lookup, we are done.
-    // (Even if there is an 'eval' in this scope which introduces the
-    // same variable again, the resulting variable remains the same.
-    // Note that enclosing 'with' statements are handled at the call site.)
-    if (!from_inner_scope)
-      return var;
+    *binding_kind = BOUND;
+    return var;
+  }
 
-  } else {
-    // We did not find a variable locally. Check against the function variable,
-    // if any. We can do this for all scopes, since the function variable is
-    // only present - if at all - for function scopes.
-    //
-    // This lookup corresponds to a lookup in the "intermediate" scope sitting
-    // between this scope and the outer scope. (ECMA-262, 3rd., requires that
-    // the name of named function literal is kept in an intermediate scope
-    // in between this scope and the next outer scope.)
-    if (function_ != NULL && function_->name().is_identical_to(name)) {
-      var = function_->var();
+  // We did not find a variable locally. Check against the function variable,
+  // if any. We can do this for all scopes, since the function variable is
+  // only present - if at all - for function scopes.
+  //
+  // This lookup corresponds to a lookup in the "intermediate" scope sitting
+  // between this scope and the outer scope. (ECMA-262, 3rd., requires that
+  // the name of named function literal is kept in an intermediate scope
+  // in between this scope and the next outer scope.)
+  *binding_kind = UNBOUND;
+  if (function_ != NULL && function_->name().is_identical_to(name)) {
+    var = function_->var();
+    *binding_kind = BOUND;
+  } else if (outer_scope_ != NULL) {
+    var = outer_scope_->LookupRecursive(name, context, binding_kind);
+    if (*binding_kind == BOUND) var->MarkAsAccessedFromInnerScope();
+  }
 
-    } else if (outer_scope_ != NULL) {
-      var = outer_scope_->LookupRecursive(name, true, invalidated_local);
-      // We may have found a variable in an outer scope. However, if
-      // the current scope is inside a 'with', the actual variable may
-      // be a property introduced via the 'with' statement. Then, the
-      // variable we may have found is just a guess.
-      if (scope_inside_with_)
-        guess = true;
+  if (is_with_scope()) {
+    // The current scope is a with scope, so the variable binding can not be
+    // statically resolved. However, note that it was necessary to do a lookup
+    // in the outer scope anyway, because if a binding exists in an outer scope,
+    // the associated variable has to be marked as potentially being accessed
+    // from inside of an inner with scope (the property may not be in the 'with'
+    // object).
+    *binding_kind = DYNAMIC_LOOKUP;
+    return NULL;
+  } else if (is_eval_scope()) {
+    // No local binding was found, no 'with' statements have been encountered
+    // and the code is executed as part of a call to 'eval'. The calling context
+    // contains scope information that we can use to determine if the variable
+    // is global, i.e. the calling context chain does not contain a binding and
+    // no 'with' contexts.
+    ASSERT(*binding_kind == UNBOUND);
+    *binding_kind = context->GlobalIfNotShadowedByEval(name)
+        ? UNBOUND_EVAL_SHADOWED : DYNAMIC_LOOKUP;
+    return NULL;
+  } else if (calls_non_strict_eval()) {
+    // A variable binding may have been found in an outer scope, but the current
+    // scope makes a non-strict 'eval' call, so the found variable may not be
+    // the correct one (the 'eval' may introduce a binding with the same name).
+    // In that case, change the lookup result to reflect this situation.
+    if (*binding_kind == BOUND) {
+      *binding_kind = BOUND_EVAL_SHADOWED;
+    } else if (*binding_kind == UNBOUND) {
+      *binding_kind = UNBOUND_EVAL_SHADOWED;
     }
-
-    // If we did not find a variable, we are done.
-    if (var == NULL)
-      return NULL;
   }
-
-  ASSERT(var != NULL);
-
-  // If this is a lookup from an inner scope, mark the variable.
-  if (from_inner_scope) {
-    var->MarkAsAccessedFromInnerScope();
-  }
-
-  // If the variable we have found is just a guess, invalidate the
-  // result. If the found variable is local, record that fact so we
-  // can generate fast code to get it if it is not shadowed by eval.
-  if (guess) {
-    if (!var->is_global()) *invalidated_local = var;
-    var = NULL;
-  }
-
   return var;
 }
 
@@ -891,71 +895,44 @@
   if (proxy->var() != NULL) return;
 
   // Otherwise, try to resolve the variable.
-  Variable* invalidated_local = NULL;
-  Variable* var = LookupRecursive(proxy->name(), false, &invalidated_local);
+  BindingKind binding_kind;
+  Variable* var = LookupRecursive(proxy->name(), context, &binding_kind);
+  switch (binding_kind) {
+    case BOUND:
+      // We found a variable binding.
+      break;
 
-  if (proxy->inside_with()) {
-    // If we are inside a local 'with' statement, all bets are off
-    // and we cannot resolve the proxy to a local variable even if
-    // we found an outer matching variable.
-    // Note that we must do a lookup anyway, because if we find one,
-    // we must mark that variable as potentially accessed from this
-    // inner scope (the property may not be in the 'with' object).
-    var = NonLocal(proxy->name(), DYNAMIC);
-
-  } else {
-    // We are not inside a local 'with' statement.
-
-    if (var == NULL) {
-      // We did not find the variable. We have a global variable
-      // if we are in the global scope (we know already that we
-      // are outside a 'with' statement) or if there is no way
-      // that the variable might be introduced dynamically (through
-      // a local or outer eval() call, or an outer 'with' statement),
-      // or we don't know about the outer scope (because we are
-      // in an eval scope).
-      if (is_global_scope() ||
-          !(scope_inside_with_ || outer_scope_is_eval_scope_ ||
-            scope_calls_eval_ || outer_scope_calls_eval_)) {
-        // We must have a global variable.
-        ASSERT(global_scope != NULL);
-        var = global_scope->DeclareGlobal(proxy->name());
-
-      } else if (scope_inside_with_) {
-        // If we are inside a with statement we give up and look up
-        // the variable at runtime.
-        var = NonLocal(proxy->name(), DYNAMIC);
-
-      } else if (invalidated_local != NULL) {
-        // No with statements are involved and we found a local
-        // variable that might be shadowed by eval introduced
-        // variables.
-        var = NonLocal(proxy->name(), DYNAMIC_LOCAL);
-        var->set_local_if_not_shadowed(invalidated_local);
-
-      } else if (outer_scope_is_eval_scope_) {
-        // No with statements and we did not find a local and the code
-        // is executed with a call to eval.  The context contains
-        // scope information that we can use to determine if the
-        // variable is global if it is not shadowed by eval-introduced
-        // variables.
-        if (context->GlobalIfNotShadowedByEval(proxy->name())) {
-          var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
-
-        } else {
-          var = NonLocal(proxy->name(), DYNAMIC);
-        }
-
-      } else {
-        // No with statements and we did not find a local and the code
-        // is not executed with a call to eval.  We know that this
-        // variable is global unless it is shadowed by eval-introduced
-        // variables.
+    case BOUND_EVAL_SHADOWED:
+      // We found a variable variable binding that might be shadowed
+      // by 'eval' introduced variable bindings.
+      if (var->is_global()) {
         var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
+      } else {
+        Variable* invalidated = var;
+        var = NonLocal(proxy->name(), DYNAMIC_LOCAL);
+        var->set_local_if_not_shadowed(invalidated);
       }
-    }
+      break;
+
+    case UNBOUND:
+      // No binding has been found. Declare a variable in global scope.
+      ASSERT(global_scope != NULL);
+      var = global_scope->DeclareGlobal(proxy->name());
+      break;
+
+    case UNBOUND_EVAL_SHADOWED:
+      // No binding has been found. But some scope makes a
+      // non-strict 'eval' call.
+      var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
+      break;
+
+    case DYNAMIC_LOOKUP:
+      // The variable could not be resolved statically.
+      var = NonLocal(proxy->name(), DYNAMIC);
+      break;
   }
 
+  ASSERT(var != NULL);
   proxy->BindTo(var);
 }
 
@@ -976,31 +953,17 @@
 }
 
 
-bool Scope::PropagateScopeInfo(bool outer_scope_calls_eval,
-                               bool outer_scope_calls_non_strict_eval,
-                               bool outer_scope_is_eval_scope) {
-  if (outer_scope_calls_eval) {
-    outer_scope_calls_eval_ = true;
-  }
-
+bool Scope::PropagateScopeInfo(bool outer_scope_calls_non_strict_eval ) {
   if (outer_scope_calls_non_strict_eval) {
     outer_scope_calls_non_strict_eval_ = true;
   }
 
-  if (outer_scope_is_eval_scope) {
-    outer_scope_is_eval_scope_ = true;
-  }
-
-  bool calls_eval = scope_calls_eval_ || outer_scope_calls_eval_;
-  bool is_eval = is_eval_scope() || outer_scope_is_eval_scope_;
   bool calls_non_strict_eval =
       (scope_calls_eval_ && !is_strict_mode()) ||
       outer_scope_calls_non_strict_eval_;
   for (int i = 0; i < inner_scopes_.length(); i++) {
     Scope* inner_scope = inner_scopes_[i];
-    if (inner_scope->PropagateScopeInfo(calls_eval,
-                                        calls_non_strict_eval,
-                                        is_eval)) {
+    if (inner_scope->PropagateScopeInfo(calls_non_strict_eval)) {
       inner_scope_calls_eval_ = true;
     }
     if (inner_scope->force_eager_compilation_) {
diff --git a/src/scopes.h b/src/scopes.h
index 7e789b8..a141887 100644
--- a/src/scopes.h
+++ b/src/scopes.h
@@ -89,15 +89,7 @@
   // ---------------------------------------------------------------------------
   // Construction
 
-  enum Type {
-    EVAL_SCOPE,      // The top-level scope for an eval source.
-    FUNCTION_SCOPE,  // The top-level scope for a function.
-    GLOBAL_SCOPE,    // The top-level scope for a program or a top-level eval.
-    CATCH_SCOPE,     // The scope introduced by catch.
-    BLOCK_SCOPE      // The scope introduced by a new block.
-  };
-
-  Scope(Scope* outer_scope, Type type);
+  Scope(Scope* outer_scope, ScopeType type);
 
   // Compute top scope and allocate variables. For lazy compilation the top
   // scope only contains the single lazily compiled function, so this
@@ -110,7 +102,7 @@
   // The scope name is only used for printing/debugging.
   void SetScopeName(Handle<String> scope_name) { scope_name_ = scope_name; }
 
-  void Initialize(bool inside_with);
+  void Initialize();
 
   // Checks if the block scope is redundant, i.e. it does not contain any
   // block scoped declarations. In that case it is removed from the scope
@@ -130,7 +122,7 @@
   // Declare the function variable for a function literal. This variable
   // is in an intermediate scope between this function scope and the the
   // outer scope. Only possible for function scopes; at most one variable.
-  Variable* DeclareFunctionVar(Handle<String> name);
+  Variable* DeclareFunctionVar(Handle<String> name, VariableMode mode);
 
   // Declare a parameter in this scope.  When there are duplicated
   // parameters the rightmost one 'wins'.  However, the implementation
@@ -149,7 +141,6 @@
 
   // Create a new unresolved variable.
   VariableProxy* NewUnresolved(Handle<String> name,
-                               bool inside_with,
                                int position = RelocInfo::kNoPosition);
 
   // Remove a unresolved variable. During parsing, an unresolved variable
@@ -199,11 +190,42 @@
   void RecordWithStatement() { scope_contains_with_ = true; }
 
   // Inform the scope that the corresponding code contains an eval call.
-  void RecordEvalCall() { scope_calls_eval_ = true; }
+  void RecordEvalCall() { if (!is_global_scope()) scope_calls_eval_ = true; }
 
-  // Enable strict mode for the scope (unless disabled by a global flag).
-  void EnableStrictMode() {
-    strict_mode_ = FLAG_strict_mode;
+  // Set the strict mode flag (unless disabled by a global flag).
+  void SetStrictModeFlag(StrictModeFlag strict_mode_flag) {
+    strict_mode_flag_ = FLAG_strict_mode ? strict_mode_flag : kNonStrictMode;
+  }
+
+  // Position in the source where this scope begins and ends.
+  //
+  // * For the scope of a with statement
+  //     with (obj) stmt
+  //   start position: start position of first token of 'stmt'
+  //   end position: end position of last token of 'stmt'
+  // * For the scope of a block
+  //     { stmts }
+  //   start position: start position of '{'
+  //   end position: end position of '}'
+  // * For the scope of a function literal or decalaration
+  //     function fun(a,b) { stmts }
+  //   start position: start position of '('
+  //   end position: end position of '}'
+  // * For the scope of a catch block
+  //     try { stms } catch(e) { stmts }
+  //   start position: start position of '('
+  //   end position: end position of ')'
+  // * For the scope of a for-statement
+  //     for (let x ...) stmt
+  //   start position: start position of '('
+  //   end position: end position of last token of 'stmt'
+  int start_position() const { return start_position_; }
+  void set_start_position(int statement_pos) {
+    start_position_ = statement_pos;
+  }
+  int end_position() const { return end_position_; }
+  void set_end_position(int statement_pos) {
+    end_position_ = statement_pos;
   }
 
   // ---------------------------------------------------------------------------
@@ -215,14 +237,20 @@
   bool is_global_scope() const { return type_ == GLOBAL_SCOPE; }
   bool is_catch_scope() const { return type_ == CATCH_SCOPE; }
   bool is_block_scope() const { return type_ == BLOCK_SCOPE; }
-  bool is_strict_mode() const { return strict_mode_; }
+  bool is_with_scope() const { return type_ == WITH_SCOPE; }
+  bool is_declaration_scope() const {
+    return is_eval_scope() || is_function_scope() || is_global_scope();
+  }
+  bool is_strict_mode() const { return strict_mode_flag() == kStrictMode; }
   bool is_strict_mode_eval_scope() const {
     return is_eval_scope() && is_strict_mode();
   }
 
   // Information about which scopes calls eval.
   bool calls_eval() const { return scope_calls_eval_; }
-  bool outer_scope_calls_eval() const { return outer_scope_calls_eval_; }
+  bool calls_non_strict_eval() {
+    return scope_calls_eval_ && !is_strict_mode();
+  }
   bool outer_scope_calls_non_strict_eval() const {
     return outer_scope_calls_non_strict_eval_;
   }
@@ -238,6 +266,12 @@
   // ---------------------------------------------------------------------------
   // Accessors.
 
+  // The type of this scope.
+  ScopeType type() const { return type_; }
+
+  // The strict mode of this scope.
+  StrictModeFlag strict_mode_flag() const { return strict_mode_flag_; }
+
   // The variable corresponding the 'this' value.
   Variable* receiver() { return receiver_; }
 
@@ -264,6 +298,8 @@
   // Declarations list.
   ZoneList<Declaration*>* declarations() { return &decls_; }
 
+  // Inner scope list.
+  ZoneList<Scope*>* inner_scopes() { return &inner_scopes_; }
 
   // ---------------------------------------------------------------------------
   // Variable allocation.
@@ -307,6 +343,13 @@
 
   Handle<SerializedScopeInfo> GetSerializedScopeInfo();
 
+  // Get the chain of nested scopes within this scope for the source statement
+  // position. The scopes will be added to the list from the outermost scope to
+  // the innermost scope. Only nested block, catch or with scopes are tracked
+  // and will be returned, but no inner function scopes.
+  void GetNestedScopeChain(List<Handle<SerializedScopeInfo> >* chain,
+                           int statement_position);
+
   // ---------------------------------------------------------------------------
   // Strict mode support.
   bool IsDeclared(Handle<String> name) {
@@ -330,7 +373,7 @@
  protected:
   friend class ParserFactory;
 
-  explicit Scope(Type type);
+  explicit Scope(ScopeType type);
 
   Isolate* const isolate_;
 
@@ -339,7 +382,7 @@
   ZoneList<Scope*> inner_scopes_;  // the immediately enclosed inner scopes
 
   // The scope type.
-  Type type_;
+  ScopeType type_;
 
   // Debugging support.
   Handle<String> scope_name_;
@@ -380,13 +423,14 @@
   // the 'eval' call site this scope is the declaration scope.
   bool scope_calls_eval_;
   // This scope is a strict mode scope.
-  bool strict_mode_;
+  StrictModeFlag strict_mode_flag_;
+  // Source positions.
+  int start_position_;
+  int end_position_;
 
   // Computed via PropagateScopeInfo.
-  bool outer_scope_calls_eval_;
   bool outer_scope_calls_non_strict_eval_;
   bool inner_scope_calls_eval_;
-  bool outer_scope_is_eval_scope_;
   bool force_eager_compilation_;
 
   // True if it doesn't need scope resolution (e.g., if the scope was
@@ -396,7 +440,7 @@
   // Computed as variables are declared.
   int num_var_or_const_;
 
-  // Computed via AllocateVariables; function scopes only.
+  // Computed via AllocateVariables; function, block and catch scopes only.
   int num_stack_slots_;
   int num_heap_slots_;
 
@@ -409,9 +453,57 @@
   Variable* NonLocal(Handle<String> name, VariableMode mode);
 
   // Variable resolution.
+  // Possible results of a recursive variable lookup telling if and how a
+  // variable is bound. These are returned in the output parameter *binding_kind
+  // of the LookupRecursive function.
+  enum BindingKind {
+    // The variable reference could be statically resolved to a variable binding
+    // which is returned. There is no 'with' statement between the reference and
+    // the binding and no scope between the reference scope (inclusive) and
+    // binding scope (exclusive) makes a non-strict 'eval' call.
+    BOUND,
+
+    // The variable reference could be statically resolved to a variable binding
+    // which is returned. There is no 'with' statement between the reference and
+    // the binding, but some scope between the reference scope (inclusive) and
+    // binding scope (exclusive) makes a non-strict 'eval' call, that might
+    // possibly introduce variable bindings shadowing the found one. Thus the
+    // found variable binding is just a guess.
+    BOUND_EVAL_SHADOWED,
+
+    // The variable reference could not be statically resolved to any binding
+    // and thus should be considered referencing a global variable. NULL is
+    // returned. The variable reference is not inside any 'with' statement and
+    // no scope between the reference scope (inclusive) and global scope
+    // (exclusive) makes a non-strict 'eval' call.
+    UNBOUND,
+
+    // The variable reference could not be statically resolved to any binding
+    // NULL is returned. The variable reference is not inside any 'with'
+    // statement, but some scope between the reference scope (inclusive) and
+    // global scope (exclusive) makes a non-strict 'eval' call, that might
+    // possibly introduce a variable binding. Thus the reference should be
+    // considered referencing a global variable unless it is shadowed by an
+    // 'eval' introduced binding.
+    UNBOUND_EVAL_SHADOWED,
+
+    // The variable could not be statically resolved and needs to be looked up
+    // dynamically. NULL is returned. There are two possible reasons:
+    // * A 'with' statement has been encountered and there is no variable
+    //   binding for the name between the variable reference and the 'with'.
+    //   The variable potentially references a property of the 'with' object.
+    // * The code is being executed as part of a call to 'eval' and the calling
+    //   context chain contains either a variable binding for the name or it
+    //   contains a 'with' context.
+    DYNAMIC_LOOKUP
+  };
+
+  // Lookup a variable reference given by name recursively starting with this
+  // scope. If the code is executed because of a call to 'eval', the context
+  // parameter should be set to the calling context of 'eval'.
   Variable* LookupRecursive(Handle<String> name,
-                            bool from_inner_function,
-                            Variable** invalidated_local);
+                            Handle<Context> context,
+                            BindingKind* binding_kind);
   void ResolveVariable(Scope* global_scope,
                        Handle<Context> context,
                        VariableProxy* proxy);
@@ -419,9 +511,7 @@
                                    Handle<Context> context);
 
   // Scope analysis.
-  bool PropagateScopeInfo(bool outer_scope_calls_eval,
-                          bool outer_scope_calls_non_strict_eval,
-                          bool outer_scope_is_eval_scope);
+  bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval);
   bool HasTrivialContext() const;
 
   // Predicates.
@@ -438,8 +528,10 @@
   void AllocateVariablesRecursively();
 
  private:
-  // Construct a function or block scope based on the scope info.
-  Scope(Scope* inner_scope, Type type, Handle<SerializedScopeInfo> scope_info);
+  // Construct a scope based on the scope info.
+  Scope(Scope* inner_scope,
+        ScopeType type,
+        Handle<SerializedScopeInfo> scope_info);
 
   // Construct a catch scope with a binding for the name.
   Scope(Scope* inner_scope, Handle<String> catch_variable_name);
@@ -451,7 +543,7 @@
     }
   }
 
-  void SetDefaults(Type type,
+  void SetDefaults(ScopeType type,
                    Scope* outer_scope,
                    Handle<SerializedScopeInfo> scope_info);
 };
diff --git a/src/serialize.cc b/src/serialize.cc
index 84ab94a..ba7b2a5 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -318,10 +318,10 @@
 
 
   // Miscellaneous
-  Add(ExternalReference::roots_address(isolate).address(),
+  Add(ExternalReference::roots_array_start(isolate).address(),
       UNCLASSIFIED,
       3,
-      "Heap::roots_address()");
+      "Heap::roots_array_start()");
   Add(ExternalReference::address_of_stack_limit(isolate).address(),
       UNCLASSIFIED,
       4,
@@ -490,6 +490,10 @@
       UNCLASSIFIED,
       44,
       "canonical_nan");
+  Add(ExternalReference::address_of_the_hole_nan().address(),
+      UNCLASSIFIED,
+      45,
+      "the_hole_nan");
 }
 
 
@@ -753,8 +757,13 @@
 void Deserializer::ReadChunk(Object** current,
                              Object** limit,
                              int source_space,
-                             Address address) {
+                             Address current_object_address) {
   Isolate* const isolate = isolate_;
+  bool write_barrier_needed = (current_object_address != NULL &&
+                               source_space != NEW_SPACE &&
+                               source_space != CELL_SPACE &&
+                               source_space != CODE_SPACE &&
+                               source_space != OLD_DATA_SPACE);
   while (current < limit) {
     int data = source_->Get();
     switch (data) {
@@ -774,9 +783,7 @@
         if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
           ASSIGN_DEST_SPACE(space_number)                                      \
           ReadObject(space_number, dest_space, current);                       \
-          emit_write_barrier = (space_number == NEW_SPACE &&                   \
-                                source_space != NEW_SPACE &&                   \
-                                source_space != CELL_SPACE);                   \
+          emit_write_barrier = (space_number == NEW_SPACE);                    \
         } else {                                                               \
           Object* new_object = NULL;  /* May not be a real Object pointer. */  \
           if (where == kNewObject) {                                           \
@@ -784,27 +791,25 @@
             ReadObject(space_number, dest_space, &new_object);                 \
           } else if (where == kRootArray) {                                    \
             int root_id = source_->GetInt();                                   \
-            new_object = isolate->heap()->roots_address()[root_id];            \
+            new_object = isolate->heap()->roots_array_start()[root_id];        \
+            emit_write_barrier = isolate->heap()->InNewSpace(new_object);      \
           } else if (where == kPartialSnapshotCache) {                         \
             int cache_index = source_->GetInt();                               \
             new_object = isolate->serialize_partial_snapshot_cache()           \
                 [cache_index];                                                 \
+            emit_write_barrier = isolate->heap()->InNewSpace(new_object);      \
           } else if (where == kExternalReference) {                            \
             int reference_id = source_->GetInt();                              \
             Address address = external_reference_decoder_->                    \
                 Decode(reference_id);                                          \
             new_object = reinterpret_cast<Object*>(address);                   \
           } else if (where == kBackref) {                                      \
-            emit_write_barrier = (space_number == NEW_SPACE &&                 \
-                                  source_space != NEW_SPACE &&                 \
-                                  source_space != CELL_SPACE);                 \
+            emit_write_barrier = (space_number == NEW_SPACE);                  \
             new_object = GetAddressFromEnd(data & kSpaceMask);                 \
           } else {                                                             \
             ASSERT(where == kFromStart);                                       \
             if (offset_from_start == kUnknownOffsetFromStart) {                \
-              emit_write_barrier = (space_number == NEW_SPACE &&               \
-                                    source_space != NEW_SPACE &&               \
-                                    source_space != CELL_SPACE);               \
+              emit_write_barrier = (space_number == NEW_SPACE);                \
               new_object = GetAddressFromStart(data & kSpaceMask);             \
             } else {                                                           \
               Address object_address = pages_[space_number][0] +               \
@@ -831,12 +836,14 @@
             *current = new_object;                                             \
           }                                                                    \
         }                                                                      \
-        if (emit_write_barrier) {                                              \
-          isolate->heap()->RecordWrite(address, static_cast<int>(              \
-              reinterpret_cast<Address>(current) - address));                  \
+        if (emit_write_barrier && write_barrier_needed) {                      \
+          Address current_address = reinterpret_cast<Address>(current);        \
+          isolate->heap()->RecordWrite(                                        \
+              current_object_address,                                          \
+              static_cast<int>(current_address - current_object_address));     \
         }                                                                      \
         if (!current_was_incremented) {                                        \
-          current++;   /* Increment current if it wasn't done above. */        \
+          current++;                                                           \
         }                                                                      \
         break;                                                                 \
       }                                                                        \
@@ -883,11 +890,17 @@
   CASE_STATEMENT(where, how, within, kLargeCode)                               \
   CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart)
 
-#define EMIT_COMMON_REFERENCE_PATTERNS(pseudo_space_number,                    \
-                                       space_number,                           \
-                                       offset_from_start)                      \
-  CASE_STATEMENT(kFromStart, kPlain, kStartOfObject, pseudo_space_number)      \
-  CASE_BODY(kFromStart, kPlain, kStartOfObject, space_number, offset_from_start)
+#define FOUR_CASES(byte_code)             \
+  case byte_code:                         \
+  case byte_code + 1:                     \
+  case byte_code + 2:                     \
+  case byte_code + 3:
+
+#define SIXTEEN_CASES(byte_code)          \
+  FOUR_CASES(byte_code)                   \
+  FOUR_CASES(byte_code + 4)               \
+  FOUR_CASES(byte_code + 8)               \
+  FOUR_CASES(byte_code + 12)
 
       // We generate 15 cases and bodies that process special tags that combine
       // the raw data tag and the length into one byte.
@@ -911,6 +924,38 @@
         break;
       }
 
+      SIXTEEN_CASES(kRootArrayLowConstants)
+      SIXTEEN_CASES(kRootArrayHighConstants) {
+        int root_id = RootArrayConstantFromByteCode(data);
+        Object* object = isolate->heap()->roots_array_start()[root_id];
+        ASSERT(!isolate->heap()->InNewSpace(object));
+        *current++ = object;
+        break;
+      }
+
+      case kRepeat: {
+        int repeats = source_->GetInt();
+        Object* object = current[-1];
+        ASSERT(!isolate->heap()->InNewSpace(object));
+        for (int i = 0; i < repeats; i++) current[i] = object;
+        current += repeats;
+        break;
+      }
+
+      STATIC_ASSERT(kRootArrayNumberOfConstantEncodings ==
+                    Heap::kOldSpaceRoots);
+      STATIC_ASSERT(kMaxRepeats == 12);
+      FOUR_CASES(kConstantRepeat)
+      FOUR_CASES(kConstantRepeat + 4)
+      FOUR_CASES(kConstantRepeat + 8) {
+        int repeats = RepeatsForCode(data);
+        Object* object = current[-1];
+        ASSERT(!isolate->heap()->InNewSpace(object));
+        for (int i = 0; i < repeats; i++) current[i] = object;
+        current += repeats;
+        break;
+      }
+
       // Deserialize a new object and write a pointer to it to the current
       // object.
       ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject)
@@ -936,9 +981,6 @@
       // start and write a pointer to its first instruction to the current code
       // object.
       ALL_SPACES(kFromStart, kFromCode, kFirstInstruction)
-      // Find an already deserialized object at one of the predetermined popular
-      // offsets from the start and write a pointer to it in the current object.
-      COMMON_REFERENCE_PATTERNS(EMIT_COMMON_REFERENCE_PATTERNS)
       // Find an object in the roots array and write a pointer to it to the
       // current object.
       CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
@@ -980,7 +1022,6 @@
 #undef CASE_BODY
 #undef ONE_PER_SPACE
 #undef ALL_SPACES
-#undef EMIT_COMMON_REFERENCE_PATTERNS
 #undef ASSIGN_DEST_SPACE
 
       case kNewPage: {
@@ -1067,7 +1108,8 @@
     : sink_(sink),
       current_root_index_(0),
       external_reference_encoder_(new ExternalReferenceEncoder),
-      large_object_total_(0) {
+      large_object_total_(0),
+      root_index_wave_front_(0) {
   // The serializer is meant to be used only to generate initial heap images
   // from a context in which there is only one isolate.
   ASSERT(Isolate::Current()->IsDefaultIsolate());
@@ -1124,6 +1166,10 @@
   Isolate* isolate = Isolate::Current();
 
   for (Object** current = start; current < end; current++) {
+    if (start == isolate->heap()->roots_array_start()) {
+      root_index_wave_front_ =
+          Max(root_index_wave_front_, static_cast<intptr_t>(current - start));
+    }
     if (reinterpret_cast<Address>(current) ==
         isolate->heap()->store_buffer()->TopAddress()) {
       sink_->Put(kSkip, "Skip");
@@ -1191,10 +1237,12 @@
 }
 
 
-int PartialSerializer::RootIndex(HeapObject* heap_object) {
-  for (int i = 0; i < Heap::kRootListLength; i++) {
-    Object* root = HEAP->roots_address()[i];
-    if (root == heap_object) return i;
+int Serializer::RootIndex(HeapObject* heap_object) {
+  Heap* heap = HEAP;
+  if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
+  for (int i = 0; i < root_index_wave_front_; i++) {
+    Object* root = heap->roots_array_start()[i];
+    if (!root->IsSmi() && root == heap_object) return i;
   }
   return kInvalidRootIndex;
 }
@@ -1230,18 +1278,8 @@
   // all objects) then we should shift out the bits that are always 0.
   if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits;
   if (from_start) {
-#define COMMON_REFS_CASE(pseudo_space, actual_space, offset)                   \
-    if (space == actual_space && address == offset &&                          \
-        how_to_code == kPlain && where_to_point == kStartOfObject) {           \
-      sink_->Put(kFromStart + how_to_code + where_to_point +                   \
-                 pseudo_space, "RefSer");                                      \
-    } else  /* NOLINT */
-    COMMON_REFERENCE_PATTERNS(COMMON_REFS_CASE)
-#undef COMMON_REFS_CASE
-    {  /* NOLINT */
-      sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
-      sink_->PutInt(address, "address");
-    }
+    sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
+    sink_->PutInt(address, "address");
   } else {
     sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
     sink_->PutInt(address, "address");
@@ -1256,6 +1294,12 @@
   CHECK(o->IsHeapObject());
   HeapObject* heap_object = HeapObject::cast(o);
 
+  int root_index;
+  if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
+    PutRoot(root_index, heap_object, how_to_code, where_to_point);
+    return;
+  }
+
   if (address_mapper_.IsMapped(heap_object)) {
     int space = SpaceOfAlreadySerializedObject(heap_object);
     int address = address_mapper_.MappedTo(heap_object);
@@ -1286,6 +1330,28 @@
 }
 
 
+void Serializer::PutRoot(int root_index,
+                         HeapObject* object,
+                         SerializerDeserializer::HowToCode how_to_code,
+                         SerializerDeserializer::WhereToPoint where_to_point) {
+  if (how_to_code == kPlain &&
+      where_to_point == kStartOfObject &&
+      root_index < kRootArrayNumberOfConstantEncodings &&
+      !HEAP->InNewSpace(object)) {
+    if (root_index < kRootArrayNumberOfLowConstantEncodings) {
+      sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant");
+    } else {
+      sink_->Put(kRootArrayHighConstants + root_index -
+                     kRootArrayNumberOfLowConstantEncodings,
+                 "RootHiConstant");
+    }
+  } else {
+    sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
+    sink_->PutInt(root_index, "root_index");
+  }
+}
+
+
 void PartialSerializer::SerializeObject(
     Object* o,
     HowToCode how_to_code,
@@ -1295,8 +1361,7 @@
 
   int root_index;
   if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
-    sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
-    sink_->PutInt(root_index, "root_index");
+    PutRoot(root_index, heap_object, how_to_code, where_to_point);
     return;
   }
 
@@ -1374,9 +1439,33 @@
     if (current < end) OutputRawData(reinterpret_cast<Address>(current));
 
     while (current < end && !(*current)->IsSmi()) {
-      serializer_->SerializeObject(*current, kPlain, kStartOfObject);
-      bytes_processed_so_far_ += kPointerSize;
-      current++;
+      HeapObject* current_contents = HeapObject::cast(*current);
+      int root_index = serializer_->RootIndex(current_contents);
+      // Repeats are not subject to the write barrier so there are only some
+      // objects that can be used in a repeat encoding.  These are the early
+      // ones in the root array that are never in new space.
+      if (current != start &&
+          root_index != kInvalidRootIndex &&
+          root_index < kRootArrayNumberOfConstantEncodings &&
+          current_contents == current[-1]) {
+        ASSERT(!HEAP->InNewSpace(current_contents));
+        int repeat_count = 1;
+        while (current < end - 1 && current[repeat_count] == current_contents) {
+          repeat_count++;
+        }
+        current += repeat_count;
+        bytes_processed_so_far_ += repeat_count * kPointerSize;
+        if (repeat_count > kMaxRepeats) {
+          sink_->Put(kRepeat, "SerializeRepeats");
+          sink_->PutInt(repeat_count, "SerializeRepeats");
+        } else {
+          sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
+        }
+      } else {
+        serializer_->SerializeObject(current_contents, kPlain, kStartOfObject);
+        bytes_processed_so_far_ += kPointerSize;
+        current++;
+      }
     }
   }
 }
diff --git a/src/serialize.h b/src/serialize.h
index c070923..49695ec 100644
--- a/src/serialize.h
+++ b/src/serialize.h
@@ -187,24 +187,6 @@
 };
 
 
-// It is very common to have a reference to objects at certain offsets in the
-// heap.  These offsets have been determined experimentally.  We code
-// references to such objects in a single byte that encodes the way the pointer
-// is written (only plain pointers allowed), the space number and the offset.
-// This only works for objects in the first page of a space.  Don't use this for
-// things in newspace since it bypasses the write barrier.
-
-static const int k64 = (sizeof(uintptr_t) - 4) / 4;
-
-#define COMMON_REFERENCE_PATTERNS(f)                               \
-  f(kNumberOfSpaces, 2, (11 - k64))                                \
-  f((kNumberOfSpaces + 1), 2, 0)                                   \
-  f((kNumberOfSpaces + 2), 2, (142 - 16 * k64))                    \
-  f((kNumberOfSpaces + 3), 2, (74 - 15 * k64))                     \
-  f((kNumberOfSpaces + 4), 2, 5)                                   \
-  f((kNumberOfSpaces + 5), 1, 135)                                 \
-  f((kNumberOfSpaces + 6), 2, (228 - 39 * k64))
-
 #define COMMON_RAW_LENGTHS(f)        \
   f(1, 1)  \
   f(2, 2)  \
@@ -242,7 +224,7 @@
     // 0xd-0xf                         Free.
     kBackref = 0x10,                 // Object is described relative to end.
     // 0x11-0x18                       One per space.
-    // 0x19-0x1f                       Common backref offsets.
+    // 0x19-0x1f                       Free.
     kFromStart = 0x20,              // Object is described relative to start.
     // 0x21-0x28                       One per space.
     // 0x29-0x2f                       Free.
@@ -279,9 +261,29 @@
   // is referred to from external strings in the snapshot.
   static const int kNativesStringResource = 0x71;
   static const int kNewPage = 0x72;
-  // 0x73-0x7f                            Free.
-  // 0xb0-0xbf                            Free.
-  // 0xf0-0xff                            Free.
+  static const int kRepeat = 0x73;
+  static const int kConstantRepeat = 0x74;
+  // 0x74-0x7f            Repeat last word (subtract 0x73 to get the count).
+  static const int kMaxRepeats = 0x7f - 0x73;
+  static int CodeForRepeats(int repeats) {
+    ASSERT(repeats >= 1 && repeats <= kMaxRepeats);
+    return 0x73 + repeats;
+  }
+  static int RepeatsForCode(int byte_code) {
+    ASSERT(byte_code >= kConstantRepeat && byte_code <= 0x7f);
+    return byte_code - 0x73;
+  }
+  static const int kRootArrayLowConstants = 0xb0;
+  // 0xb0-0xbf            Things from the first 16 elements of the root array.
+  static const int kRootArrayHighConstants = 0xf0;
+  // 0xf0-0xff            Things from the next 16 elements of the root array.
+  static const int kRootArrayNumberOfConstantEncodings = 0x20;
+  static const int kRootArrayNumberOfLowConstantEncodings = 0x10;
+  static int RootArrayConstantFromByteCode(int byte_code) {
+    int constant = (byte_code & 0xf) | ((byte_code & 0x40) >> 2);
+    ASSERT(constant >= 0 && constant < kRootArrayNumberOfConstantEncodings);
+    return constant;
+  }
 
 
   static const int kLargeData = LAST_SPACE;
@@ -354,7 +356,13 @@
     UNREACHABLE();
   }
 
-  void ReadChunk(Object** start, Object** end, int space, Address address);
+  // Fills in some heap data in an area from start to end (non-inclusive).  The
+  // space id is used for the write barrier.  The object_address is the address
+  // of the object we are writing into, or NULL if we are not writing into an
+  // object, ie if we are writing a series of tagged values that are not on the
+  // heap.
+  void ReadChunk(
+      Object** start, Object** end, int space, Address object_address);
   HeapObject* GetAddressFromStart(int space);
   inline HeapObject* GetAddressFromEnd(int space);
   Address Allocate(int space_number, Space* space, int size);
@@ -475,14 +483,22 @@
   static void TooLateToEnableNow() { too_late_to_enable_now_ = true; }
   static bool enabled() { return serialization_enabled_; }
   SerializationAddressMapper* address_mapper() { return &address_mapper_; }
+  void PutRoot(
+      int index, HeapObject* object, HowToCode how, WhereToPoint where);
 #ifdef DEBUG
   virtual void Synchronize(const char* tag);
 #endif
 
  protected:
   static const int kInvalidRootIndex = -1;
-  virtual int RootIndex(HeapObject* heap_object) = 0;
+
+  int RootIndex(HeapObject* heap_object);
   virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) = 0;
+  intptr_t root_index_wave_front() { return root_index_wave_front_; }
+  void set_root_index_wave_front(intptr_t value) {
+    ASSERT(value >= root_index_wave_front_);
+    root_index_wave_front_ = value;
+  }
 
   class ObjectSerializer : public ObjectVisitor {
    public:
@@ -558,6 +574,7 @@
   static bool too_late_to_enable_now_;
   int large_object_total_;
   SerializationAddressMapper address_mapper_;
+  intptr_t root_index_wave_front_;
 
   friend class ObjectSerializer;
   friend class Deserializer;
@@ -572,6 +589,7 @@
                     SnapshotByteSink* sink)
     : Serializer(sink),
       startup_serializer_(startup_snapshot_serializer) {
+    set_root_index_wave_front(Heap::kStrongRootListLength);
   }
 
   // Serialize the objects reachable from a single object pointer.
@@ -581,7 +599,6 @@
                                WhereToPoint where_to_point);
 
  protected:
-  virtual int RootIndex(HeapObject* o);
   virtual int PartialSnapshotCacheIndex(HeapObject* o);
   virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) {
     // Scripts should be referred only through shared function infos.  We can't
@@ -606,7 +623,7 @@
   explicit StartupSerializer(SnapshotByteSink* sink) : Serializer(sink) {
     // Clear the cache of objects used by the partial snapshot.  After the
     // strong roots have been serialized we can create a partial snapshot
-    // which will repopulate the cache with objects neede by that partial
+    // which will repopulate the cache with objects needed by that partial
     // snapshot.
     Isolate::Current()->set_serialize_partial_snapshot_cache_length(0);
   }
@@ -625,7 +642,6 @@
   }
 
  private:
-  virtual int RootIndex(HeapObject* o) { return kInvalidRootIndex; }
   virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) {
     return false;
   }
diff --git a/src/spaces-inl.h b/src/spaces-inl.h
index d9e6053..1973b3a 100644
--- a/src/spaces-inl.h
+++ b/src/spaces-inl.h
@@ -257,16 +257,12 @@
   if (new_top > allocation_info_.limit) return NULL;
 
   allocation_info_.top = new_top;
-  ASSERT(allocation_info_.VerifyPagedAllocation());
-  ASSERT(current_top != NULL);
   return HeapObject::FromAddress(current_top);
 }
 
 
 // Raw allocation.
 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) {
-  ASSERT(HasBeenSetup());
-  ASSERT_OBJECT_SIZE(size_in_bytes);
   HeapObject* object = AllocateLinearly(size_in_bytes);
   if (object != NULL) {
     if (identity() == CODE_SPACE) {
diff --git a/src/spaces.cc b/src/spaces.cc
index 61b3181..f467f71 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -95,10 +95,6 @@
   cur_end_ = end;
   page_mode_ = mode;
   size_func_ = size_f;
-
-#ifdef DEBUG
-  Verify();
-#endif
 }
 
 
@@ -123,13 +119,6 @@
 }
 
 
-#ifdef DEBUG
-void HeapObjectIterator::Verify() {
-  // TODO(gc): We should do something here.
-}
-#endif
-
-
 // -----------------------------------------------------------------------------
 // CodeRange
 
@@ -1909,11 +1898,24 @@
 
 bool NewSpace::ReserveSpace(int bytes) {
   // We can't reliably unpack a partial snapshot that needs more new space
-  // space than the minimum NewSpace size.
+  // space than the minimum NewSpace size.  The limit can be set lower than
+  // the end of new space either because there is more space on the next page
+  // or because we have lowered the limit in order to get periodic incremental
+  // marking.  The most reliable way to ensure that there is linear space is
+  // to do the allocation, then rewind the limit.
   ASSERT(bytes <= InitialCapacity());
-  Address limit = allocation_info_.limit;
+  MaybeObject* maybe = AllocateRawInternal(bytes);
+  Object* object = NULL;
+  if (!maybe->ToObject(&object)) return false;
+  HeapObject* allocation = HeapObject::cast(object);
   Address top = allocation_info_.top;
-  return limit - top >= bytes;
+  if ((top - bytes) == allocation->address()) {
+    allocation_info_.top = allocation->address();
+    return true;
+  }
+  // There may be a borderline case here where the allocation succeeded, but
+  // the limit and top have moved on to a new page.  In that case we try again.
+  return ReserveSpace(bytes);
 }
 
 
@@ -2278,8 +2280,11 @@
 // -----------------------------------------------------------------------------
 // LargeObjectSpace
 
-LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
+LargeObjectSpace::LargeObjectSpace(Heap* heap,
+                                   intptr_t max_capacity,
+                                   AllocationSpace id)
     : Space(heap, id, NOT_EXECUTABLE),  // Managed on a per-allocation basis
+      max_capacity_(max_capacity),
       first_page_(NULL),
       size_(0),
       page_count_(0),
@@ -2319,6 +2324,10 @@
     return Failure::RetryAfterGC(identity());
   }
 
+  if (Size() + object_size > max_capacity_) {
+    return Failure::RetryAfterGC(identity());
+  }
+
   LargePage* page = heap()->isolate()->memory_allocator()->
       AllocateLargePage(object_size, executable, this);
   if (page == NULL) return Failure::RetryAfterGC(identity());
diff --git a/src/spaces.h b/src/spaces.h
index ce8e382..45e008c 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -459,7 +459,6 @@
     live_byte_count_ = 0;
   }
   void IncrementLiveBytes(int by) {
-    ASSERT_LE(static_cast<unsigned>(live_byte_count_), size_);
     if (FLAG_gc_verbose) {
       printf("UpdateLiveBytes:%p:%x%c=%x->%x\n",
              static_cast<void*>(this), live_byte_count_,
@@ -642,7 +641,6 @@
   // [page_addr + kObjectStartOffset .. page_addr + kPageSize].
   INLINE(static Page* FromAllocationTop(Address top)) {
     Page* p = FromAddress(top - kPointerSize);
-    ASSERT_PAGE_OFFSET(p->Offset(top));
     return p;
   }
 
@@ -666,7 +664,6 @@
   // Returns the offset of a given address to this page.
   INLINE(int Offset(Address a)) {
     int offset = static_cast<int>(a - address());
-    ASSERT_PAGE_OFFSET(offset);
     return offset;
   }
 
@@ -1134,11 +1131,6 @@
                          Address end,
                          PageMode mode,
                          HeapObjectCallback size_func);
-
-#ifdef DEBUG
-  // Verifies whether fields have valid values.
-  void Verify();
-#endif
 };
 
 
@@ -1741,7 +1733,6 @@
         reinterpret_cast<Address>(reinterpret_cast<uintptr_t>(address_in_page) &
                                   ~Page::kPageAlignmentMask);
     NewSpacePage* page = reinterpret_cast<NewSpacePage*>(page_start);
-    ASSERT(page->InNewSpace());
     return page;
   }
 
@@ -1818,7 +1809,6 @@
 
   // Returns the start address of the current page of the space.
   Address page_low() {
-    ASSERT(anchor_.next_page() != &anchor_);
     return current_page_->body();
   }
 
@@ -2084,7 +2074,7 @@
 
   // Return the current capacity of a semispace.
   intptr_t EffectiveCapacity() {
-    ASSERT(to_space_.Capacity() == from_space_.Capacity());
+    SLOW_ASSERT(to_space_.Capacity() == from_space_.Capacity());
     return (to_space_.Capacity() / Page::kPageSize) * Page::kObjectAreaSize;
   }
 
@@ -2100,10 +2090,9 @@
     return Capacity();
   }
 
-  // Return the available bytes without growing or switching page in the
-  // active semispace.
+  // Return the available bytes without growing.
   intptr_t Available() {
-    return allocation_info_.limit - allocation_info_.top;
+    return Capacity() - Size();
   }
 
   // Return the maximum capacity of a semispace.
@@ -2317,9 +2306,9 @@
 // For contiguous spaces, top should be in the space (or at the end) and limit
 // should be the end of the space.
 #define ASSERT_SEMISPACE_ALLOCATION_INFO(info, space) \
-  ASSERT((space).page_low() <= (info).top             \
-         && (info).top <= (space).page_high()         \
-         && (info).limit <= (space).page_high())
+  SLOW_ASSERT((space).page_low() <= (info).top             \
+              && (info).top <= (space).page_high()         \
+              && (info).limit <= (space).page_high())
 
 
 // -----------------------------------------------------------------------------
@@ -2447,7 +2436,7 @@
 
 class LargeObjectSpace : public Space {
  public:
-  LargeObjectSpace(Heap* heap, AllocationSpace id);
+  LargeObjectSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id);
   virtual ~LargeObjectSpace() {}
 
   // Initializes internal data structures.
@@ -2517,6 +2506,7 @@
   bool SlowContains(Address addr) { return !FindObject(addr)->IsFailure(); }
 
  private:
+  intptr_t max_capacity_;
   // The head of the linked list of large object chunks.
   LargePage* first_page_;
   intptr_t size_;  // allocated bytes
diff --git a/src/store-buffer-inl.h b/src/store-buffer-inl.h
index 34f35a4..dd65cbc 100644
--- a/src/store-buffer-inl.h
+++ b/src/store-buffer-inl.h
@@ -55,10 +55,10 @@
 
 void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) {
   if (store_buffer_rebuilding_enabled_) {
-    ASSERT(!heap_->cell_space()->Contains(addr));
-    ASSERT(!heap_->code_space()->Contains(addr));
-    ASSERT(!heap_->old_data_space()->Contains(addr));
-    ASSERT(!heap_->new_space()->Contains(addr));
+    SLOW_ASSERT(!heap_->cell_space()->Contains(addr) &&
+                !heap_->code_space()->Contains(addr) &&
+                !heap_->old_data_space()->Contains(addr) &&
+                !heap_->new_space()->Contains(addr));
     Address* top = old_top_;
     *top++ = addr;
     old_top_ = top;
diff --git a/src/store-buffer.cc b/src/store-buffer.cc
index ab810e4..7c8b5f2 100644
--- a/src/store-buffer.cc
+++ b/src/store-buffer.cc
@@ -401,7 +401,9 @@
 
 void StoreBuffer::GCEpilogue() {
   during_gc_ = false;
-  Verify();
+  if (FLAG_verify_heap) {
+    Verify();
+  }
 }
 
 
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 67451f2..139bc2d 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -109,8 +109,8 @@
 }
 
 
-MaybeObject* StubCache::ComputeLoadNonexistent(String* name,
-                                               JSObject* receiver) {
+Handle<Code> StubCache::ComputeLoadNonexistent(Handle<String> name,
+                                               Handle<JSObject> receiver) {
   ASSERT(receiver->IsGlobalObject() || receiver->HasFastProperties());
   // If no global objects are present in the prototype chain, the load
   // nonexistent IC stub can be shared for all names for a given map
@@ -118,385 +118,328 @@
   // there are global objects involved, we need to check global
   // property cells in the stub and therefore the stub will be
   // specific to the name.
-  String* cache_name = heap()->empty_string();
+  Handle<String> cache_name = factory()->empty_string();
   if (receiver->IsGlobalObject()) cache_name = name;
-  JSObject* last = receiver;
+  Handle<JSObject> last = receiver;
   while (last->GetPrototype() != heap()->null_value()) {
-    last = JSObject::cast(last->GetPrototype());
+    last = Handle<JSObject>(JSObject::cast(last->GetPrototype()));
     if (last->IsGlobalObject()) cache_name = name;
   }
   // Compile the stub that is either shared for all names or
   // name specific if there are global objects involved.
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::LOAD_IC, NONEXISTENT);
-  Object* code = receiver->map()->FindInCodeCache(cache_name, flags);
-  if (code->IsUndefined()) {
-    LoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadNonexistent(cache_name, receiver, last);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), cache_name));
-    GDBJIT(AddCode(GDBJITInterface::LOAD_IC, cache_name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(cache_name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*cache_name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  LoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadNonexistent(cache_name, receiver, last);
+  PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *cache_name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *cache_name, *code));
+  JSObject::UpdateMapCodeCache(receiver, cache_name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeLoadField(String* name,
-                                         JSObject* receiver,
-                                         JSObject* holder,
+Handle<Code> StubCache::ComputeLoadField(Handle<String> name,
+                                         Handle<JSObject> receiver,
+                                         Handle<JSObject> holder,
                                          int field_index) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, FIELD);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    LoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadField(receiver, holder, field_index, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  LoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadField(receiver, holder, field_index, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeLoadCallback(String* name,
-                                            JSObject* receiver,
-                                            JSObject* holder,
-                                            AccessorInfo* callback) {
+Handle<Code> LoadStubCompiler::CompileLoadCallback(
+    Handle<String> name,
+    Handle<JSObject> object,
+    Handle<JSObject> holder,
+    Handle<AccessorInfo> callback) {
+  CALL_HEAP_FUNCTION(isolate(),
+                     (set_failure(NULL),
+                      CompileLoadCallback(*name, *object, *holder, *callback)),
+                     Code);
+}
+
+
+Handle<Code> StubCache::ComputeLoadCallback(Handle<String> name,
+                                            Handle<JSObject> receiver,
+                                            Handle<JSObject> holder,
+                                            Handle<AccessorInfo> callback) {
   ASSERT(v8::ToCData<Address>(callback->getter()) != 0);
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CALLBACKS);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    LoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadCallback(name, receiver, holder, callback);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  LoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadCallback(name, receiver, holder, callback);
+  PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeLoadConstant(String* name,
-                                            JSObject* receiver,
-                                            JSObject* holder,
-                                            Object* value) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+Handle<Code> StubCache::ComputeLoadConstant(Handle<String> name,
+                                            Handle<JSObject> receiver,
+                                            Handle<JSObject> holder,
+                                            Handle<Object> value) {
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::LOAD_IC, CONSTANT_FUNCTION);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    LoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadConstant(receiver, holder, value, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  LoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+        compiler.CompileLoadConstant(receiver, holder, value, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeLoadInterceptor(String* name,
-                                               JSObject* receiver,
-                                               JSObject* holder) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
+                                                      Handle<JSObject> holder,
+                                                      Handle<String> name) {
+  CALL_HEAP_FUNCTION(isolate(),
+                     (set_failure(NULL),
+                      CompileLoadInterceptor(*object, *holder, *name)),
+                     Code);
+}
+
+
+Handle<Code> StubCache::ComputeLoadInterceptor(Handle<String> name,
+                                               Handle<JSObject> receiver,
+                                               Handle<JSObject> holder) {
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, INTERCEPTOR);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    LoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadInterceptor(receiver, holder, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  LoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+        compiler.CompileLoadInterceptor(receiver, holder, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeLoadNormal() {
-  return isolate_->builtins()->builtin(Builtins::kLoadIC_Normal);
+Handle<Code> StubCache::ComputeLoadNormal() {
+  return isolate_->builtins()->LoadIC_Normal();
 }
 
 
-MaybeObject* StubCache::ComputeLoadGlobal(String* name,
-                                          JSObject* receiver,
-                                          GlobalObject* holder,
-                                          JSGlobalPropertyCell* cell,
+Handle<Code> StubCache::ComputeLoadGlobal(Handle<String> name,
+                                          Handle<JSObject> receiver,
+                                          Handle<GlobalObject> holder,
+                                          Handle<JSGlobalPropertyCell> cell,
                                           bool is_dont_delete) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    LoadStubCompiler compiler;
-    { MaybeObject* maybe_code = compiler.CompileLoadGlobal(receiver,
-                                                           holder,
-                                                           cell,
-                                                           name,
-                                                           is_dont_delete);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  LoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadGlobal(receiver, holder, cell, name, is_dont_delete);
+  PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadField(String* name,
-                                              JSObject* receiver,
-                                              JSObject* holder,
+Handle<Code> StubCache::ComputeKeyedLoadField(Handle<String> name,
+                                              Handle<JSObject> receiver,
+                                              Handle<JSObject> holder,
                                               int field_index) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, FIELD);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadField(name, receiver, holder, field_index);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadField(name, receiver, holder, field_index);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadConstant(String* name,
-                                                 JSObject* receiver,
-                                                 JSObject* holder,
-                                                 Object* value) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+Handle<Code> StubCache::ComputeKeyedLoadConstant(Handle<String> name,
+                                                 Handle<JSObject> receiver,
+                                                 Handle<JSObject> holder,
+                                                 Handle<Object> value) {
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CONSTANT_FUNCTION);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadConstant(name, receiver, holder, value);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadConstant(name, receiver, holder, value);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadInterceptor(String* name,
-                                                    JSObject* receiver,
-                                                    JSObject* holder) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
+    Handle<JSObject> object,
+    Handle<JSObject> holder,
+    Handle<String> name) {
+  CALL_HEAP_FUNCTION(isolate(),
+                     (set_failure(NULL),
+                      CompileLoadInterceptor(*object, *holder, *name)),
+                     Code);
+}
+
+
+Handle<Code> StubCache::ComputeKeyedLoadInterceptor(Handle<String> name,
+                                                    Handle<JSObject> receiver,
+                                                    Handle<JSObject> holder) {
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, INTERCEPTOR);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadInterceptor(receiver, holder, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileLoadInterceptor(receiver, holder, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadCallback(String* name,
-                                                 JSObject* receiver,
-                                                 JSObject* holder,
-                                                 AccessorInfo* callback) {
-  ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
+Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
+    Handle<String> name,
+    Handle<JSObject> object,
+    Handle<JSObject> holder,
+    Handle<AccessorInfo> callback) {
+  CALL_HEAP_FUNCTION(isolate(),
+                     (set_failure(NULL),
+                      CompileLoadCallback(*name, *object, *holder, *callback)),
+                     Code);
+}
+
+
+Handle<Code> StubCache::ComputeKeyedLoadCallback(
+    Handle<String> name,
+    Handle<JSObject> receiver,
+    Handle<JSObject> holder,
+    Handle<AccessorInfo> callback) {
+  ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code =
-          compiler.CompileLoadCallback(name, receiver, holder, callback);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code =
+      compiler.CompileLoadCallback(name, receiver, holder, callback);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-
-MaybeObject* StubCache::ComputeKeyedLoadArrayLength(String* name,
-                                                    JSArray* receiver) {
+Handle<Code> StubCache::ComputeKeyedLoadArrayLength(Handle<String> name,
+                                                    Handle<JSArray> receiver) {
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
-  ASSERT(receiver->IsJSObject());
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code = compiler.CompileLoadArrayLength(name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileLoadArrayLength(name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadStringLength(String* name,
-                                                     String* receiver) {
+Handle<Code> StubCache::ComputeKeyedLoadStringLength(Handle<String> name,
+                                                     Handle<String> receiver) {
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
-  Map* map = receiver->map();
-  Object* code = map->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code = compiler.CompileLoadStringLength(name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result = map->UpdateCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Map> map(receiver->map());
+  Handle<Object> probe(map->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileLoadStringLength(name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  Map::UpdateCodeCache(map, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadFunctionPrototype(
-    String* name,
-    JSFunction* receiver) {
+Handle<Code> StubCache::ComputeKeyedLoadFunctionPrototype(
+    Handle<String> name,
+    Handle<JSFunction> receiver) {
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedLoadStubCompiler compiler;
-    { MaybeObject* maybe_code = compiler.CompileLoadFunctionPrototype(name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedLoadStubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileLoadFunctionPrototype(name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeStoreField(String* name,
-                                          JSObject* receiver,
+Handle<Code> StubCache::ComputeStoreField(Handle<String> name,
+                                          Handle<JSObject> receiver,
                                           int field_index,
-                                          Map* transition,
+                                          Handle<Map> transition,
                                           StrictModeFlag strict_mode) {
-  PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION;
+  PropertyType type = (transition.is_null()) ? FIELD : MAP_TRANSITION;
   Code::Flags flags = Code::ComputeMonomorphicFlags(
       Code::STORE_IC, type, strict_mode);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    StoreStubCompiler compiler(strict_mode);
-    { MaybeObject* maybe_code =
-          compiler.CompileStoreField(receiver, field_index, transition, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  StoreStubCompiler compiler(isolate_, strict_mode);
+  Handle<Code> code =
+      compiler.CompileStoreField(receiver, field_index, transition, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeKeyedLoadOrStoreElement(
-    JSObject* receiver,
+Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
+    Handle<JSObject> receiver,
     KeyedIC::StubKind stub_kind,
     StrictModeFlag strict_mode) {
   Code::Flags flags =
@@ -505,189 +448,159 @@
                                      : Code::KEYED_STORE_IC,
           NORMAL,
           strict_mode);
-  String* name = NULL;
+  Handle<String> name;
   switch (stub_kind) {
     case KeyedIC::LOAD:
-      name = isolate()->heap()->KeyedLoadElementMonomorphic_symbol();
+      name = isolate()->factory()->KeyedLoadElementMonomorphic_symbol();
       break;
     case KeyedIC::STORE_NO_TRANSITION:
-      name = isolate()->heap()->KeyedStoreElementMonomorphic_symbol();
+      name = isolate()->factory()->KeyedStoreElementMonomorphic_symbol();
       break;
     default:
       UNREACHABLE();
       break;
   }
-  Object* maybe_code = receiver->map()->FindInCodeCache(name, flags);
-  if (!maybe_code->IsUndefined()) return Code::cast(maybe_code);
+  Handle<Map> receiver_map(receiver->map());
+  Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
 
-  Map* receiver_map = receiver->map();
-  MaybeObject* maybe_new_code = NULL;
+  Handle<Code> code;
   switch (stub_kind) {
     case KeyedIC::LOAD: {
-      KeyedLoadStubCompiler compiler;
-      maybe_new_code = compiler.CompileLoadElement(receiver_map);
+      KeyedLoadStubCompiler compiler(isolate_);
+      code = compiler.CompileLoadElement(receiver_map);
       break;
     }
     case KeyedIC::STORE_NO_TRANSITION: {
-      KeyedStoreStubCompiler compiler(strict_mode);
-      maybe_new_code = compiler.CompileStoreElement(receiver_map);
+      KeyedStoreStubCompiler compiler(isolate_, strict_mode);
+      code = compiler.CompileStoreElement(receiver_map);
       break;
     }
     default:
       UNREACHABLE();
       break;
   }
-  Code* code = NULL;
-  if (!maybe_new_code->To(&code)) return maybe_new_code;
+
+  ASSERT(!code.is_null());
 
   if (stub_kind == KeyedIC::LOAD) {
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG,
-                            Code::cast(code), 0));
+    PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, 0));
   } else {
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
-                            Code::cast(code), 0));
+    PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, 0));
   }
-  ASSERT(code->IsCode());
-  Object* result;
-  { MaybeObject* maybe_result =
-        receiver->UpdateMapCodeCache(name, Code::cast(code));
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeStoreNormal(StrictModeFlag strict_mode) {
-  return isolate_->builtins()->builtin((strict_mode == kStrictMode)
-                            ? Builtins::kStoreIC_Normal_Strict
-                            : Builtins::kStoreIC_Normal);
+Handle<Code> StubCache::ComputeStoreNormal(StrictModeFlag strict_mode) {
+  return (strict_mode == kStrictMode)
+      ? isolate_->builtins()->Builtins::StoreIC_Normal_Strict()
+      : isolate_->builtins()->Builtins::StoreIC_Normal();
 }
 
 
-MaybeObject* StubCache::ComputeStoreGlobal(String* name,
-                                           GlobalObject* receiver,
-                                           JSGlobalPropertyCell* cell,
+Handle<Code> StubCache::ComputeStoreGlobal(Handle<String> name,
+                                           Handle<GlobalObject> receiver,
+                                           Handle<JSGlobalPropertyCell> cell,
                                            StrictModeFlag strict_mode) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(
       Code::STORE_IC, NORMAL, strict_mode);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    StoreStubCompiler compiler(strict_mode);
-    { MaybeObject* maybe_code =
-          compiler.CompileStoreGlobal(receiver, cell, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  StoreStubCompiler compiler(isolate_, strict_mode);
+  Handle<Code> code = compiler.CompileStoreGlobal(receiver, cell, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeStoreCallback(
-    String* name,
-    JSObject* receiver,
-    AccessorInfo* callback,
-    StrictModeFlag strict_mode) {
+Handle<Code> StubCache::ComputeStoreCallback(Handle<String> name,
+                                             Handle<JSObject> receiver,
+                                             Handle<AccessorInfo> callback,
+                                             StrictModeFlag strict_mode) {
   ASSERT(v8::ToCData<Address>(callback->setter()) != 0);
   Code::Flags flags = Code::ComputeMonomorphicFlags(
       Code::STORE_IC, CALLBACKS, strict_mode);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    StoreStubCompiler compiler(strict_mode);
-    { MaybeObject* maybe_code =
-          compiler.CompileStoreCallback(receiver, callback, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  StoreStubCompiler compiler(isolate_, strict_mode);
+  Handle<Code> code = compiler.CompileStoreCallback(receiver, callback, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeStoreInterceptor(
-    String* name,
-    JSObject* receiver,
-    StrictModeFlag strict_mode) {
+Handle<Code> StubCache::ComputeStoreInterceptor(Handle<String> name,
+                                                Handle<JSObject> receiver,
+                                                StrictModeFlag strict_mode) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(
       Code::STORE_IC, INTERCEPTOR, strict_mode);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    StoreStubCompiler compiler(strict_mode);
-    { MaybeObject* maybe_code =
-          compiler.CompileStoreInterceptor(receiver, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate_,
-            CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  StoreStubCompiler compiler(isolate_, strict_mode);
+  Handle<Code> code = compiler.CompileStoreInterceptor(receiver, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
-
-MaybeObject* StubCache::ComputeKeyedStoreField(String* name,
-                                               JSObject* receiver,
+Handle<Code> StubCache::ComputeKeyedStoreField(Handle<String> name,
+                                               Handle<JSObject> receiver,
                                                int field_index,
-                                               Map* transition,
+                                               Handle<Map> transition,
                                                StrictModeFlag strict_mode) {
-  PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION;
+  PropertyType type = (transition.is_null()) ? FIELD : MAP_TRANSITION;
   Code::Flags flags = Code::ComputeMonomorphicFlags(
       Code::KEYED_STORE_IC, type, strict_mode);
-  Object* code = receiver->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    KeyedStoreStubCompiler compiler(strict_mode);
-    { MaybeObject* maybe_code =
-          compiler.CompileStoreField(receiver, field_index, transition, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    PROFILE(isolate(),
-            CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
-                            Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          receiver->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  KeyedStoreStubCompiler compiler(isolate(), strict_mode);
+  Handle<Code> code =
+      compiler.CompileStoreField(receiver, field_index, transition, name);
+  PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(receiver, name, code);
   return code;
 }
 
+
 #define CALL_LOGGER_TAG(kind, type) \
     (kind == Code::CALL_IC ? Logger::type : Logger::KEYED_##type)
 
-MaybeObject* StubCache::ComputeCallConstant(int argc,
+Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
+                                                   Handle<JSObject> holder,
+                                                   Handle<JSFunction> function,
+                                                   Handle<String> name,
+                                                   CheckType check) {
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      (set_failure(NULL),
+       CompileCallConstant(*object, *holder, *function, *name, check)),
+      Code);
+}
+
+
+Handle<Code> StubCache::ComputeCallConstant(int argc,
                                             Code::Kind kind,
-                                            Code::ExtraICState extra_ic_state,
-                                            String* name,
-                                            Object* object,
-                                            JSObject* holder,
-                                            JSFunction* function) {
+                                            Code::ExtraICState extra_state,
+                                            Handle<String> name,
+                                            Handle<Object> object,
+                                            Handle<JSObject> holder,
+                                            Handle<JSFunction> function) {
   // Compute the check type and the map.
   InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(object, holder);
-  JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder);
+      IC::GetCodeCacheForObject(*object, *holder);
+  Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*object, cache_holder));
 
   // Compute check type based on receiver/holder.
   CheckType check = RECEIVER_MAP_CHECK;
@@ -699,51 +612,36 @@
     check = BOOLEAN_CHECK;
   }
 
-  Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
-                                                    CONSTANT_FUNCTION,
-                                                    extra_ic_state,
-                                                    cache_holder,
-                                                    argc);
-  Object* code = map_holder->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    // If the function hasn't been compiled yet, we cannot do it now
-    // because it may cause GC. To avoid this issue, we return an
-    // internal error which will make sure we do not update any
-    // caches.
-    if (!function->is_compiled()) return Failure::InternalError();
-    // Compile the stub - only create stubs for fully compiled functions.
-    CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder);
-    { MaybeObject* maybe_code =
-          compiler.CompileCallConstant(object, holder, function, name, check);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    Code::cast(code)->set_check_type(check);
-    ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(isolate_,
-            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
-                            Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          map_holder->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Code::Flags flags =
+      Code::ComputeMonomorphicFlags(kind, CONSTANT_FUNCTION, extra_state,
+                                    cache_holder, argc);
+  Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  CallStubCompiler compiler(isolate_, argc, kind, extra_state, cache_holder);
+  Handle<Code> code =
+      compiler.CompileCallConstant(object, holder, function, name, check);
+  code->set_check_type(check);
+  ASSERT_EQ(flags, code->flags());
+  PROFILE(isolate_,
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(map_holder, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallField(int argc,
+Handle<Code> StubCache::ComputeCallField(int argc,
                                          Code::Kind kind,
-                                         Code::ExtraICState extra_ic_state,
-                                         String* name,
-                                         Object* object,
-                                         JSObject* holder,
+                                         Code::ExtraICState extra_state,
+                                         Handle<String> name,
+                                         Handle<Object> object,
+                                         Handle<JSObject> holder,
                                          int index) {
   // Compute the check type and the map.
   InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(object, holder);
-  JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder);
+      IC::GetCodeCacheForObject(*object, *holder);
+  Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*object, cache_holder));
 
   // TODO(1233596): We cannot do receiver map check for non-JS objects
   // because they may be represented as immediates without a
@@ -752,47 +650,45 @@
     object = holder;
   }
 
-  Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
-                                                    FIELD,
-                                                    extra_ic_state,
-                                                    cache_holder,
-                                                    argc);
-  Object* code = map_holder->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder);
-    { MaybeObject* maybe_code =
-          compiler.CompileCallField(JSObject::cast(object),
-                                    holder,
-                                    index,
-                                    name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(isolate_,
-            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
-                            Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          map_holder->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Code::Flags flags =
+      Code::ComputeMonomorphicFlags(kind, FIELD, extra_state,
+                                    cache_holder, argc);
+  Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  CallStubCompiler compiler(isolate_, argc, kind, extra_state, cache_holder);
+  Handle<Code> code =
+      compiler.CompileCallField(Handle<JSObject>::cast(object),
+                                holder, index, name);
+  ASSERT_EQ(flags, code->flags());
+  PROFILE(isolate_,
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(map_holder, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallInterceptor(
-    int argc,
-    Code::Kind kind,
-    Code::ExtraICState extra_ic_state,
-    String* name,
-    Object* object,
-    JSObject* holder) {
+Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
+                                                      Handle<JSObject> holder,
+                                                      Handle<String> name) {
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      (set_failure(NULL), CompileCallInterceptor(*object, *holder, *name)),
+      Code);
+}
+
+
+Handle<Code> StubCache::ComputeCallInterceptor(int argc,
+                                               Code::Kind kind,
+                                               Code::ExtraICState extra_state,
+                                               Handle<String> name,
+                                               Handle<Object> object,
+                                               Handle<JSObject> holder) {
   // Compute the check type and the map.
   InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(object, holder);
-  JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder);
+      IC::GetCodeCacheForObject(*object, *holder);
+  Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*object, cache_holder));
 
   // TODO(1233596): We cannot do receiver map check for non-JS objects
   // because they may be represented as immediates without a
@@ -801,134 +697,75 @@
     object = holder;
   }
 
-  Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
-                                                    INTERCEPTOR,
-                                                    extra_ic_state,
-                                                    cache_holder,
-                                                    argc);
-  Object* code = map_holder->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder);
-    { MaybeObject* maybe_code =
-          compiler.CompileCallInterceptor(JSObject::cast(object), holder, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(isolate(),
-            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
-                            Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          map_holder->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+  Code::Flags flags =
+      Code::ComputeMonomorphicFlags(kind, INTERCEPTOR, extra_state,
+                                    cache_holder, argc);
+  Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  CallStubCompiler compiler(isolate(), argc, kind, extra_state, cache_holder);
+  Handle<Code> code =
+      compiler.CompileCallInterceptor(Handle<JSObject>::cast(object),
+                                      holder, name);
+  ASSERT_EQ(flags, code->flags());
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(map_holder, name, code);
   return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallNormal(int argc,
-                                          Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state,
-                                          String* name,
-                                          JSObject* receiver) {
-  Object* code;
-  { MaybeObject* maybe_code = ComputeCallNormal(argc, kind, extra_ic_state);
-    if (!maybe_code->ToObject(&code)) return maybe_code;
-  }
-  return code;
+Handle<Code> CallStubCompiler::CompileCallGlobal(
+    Handle<JSObject> object,
+    Handle<GlobalObject> holder,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<JSFunction> function,
+    Handle<String> name) {
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      (set_failure(NULL),
+       CompileCallGlobal(*object, *holder, *cell, *function, *name)),
+      Code);
 }
 
 
-MaybeObject* StubCache::ComputeCallGlobal(int argc,
+Handle<Code> StubCache::ComputeCallGlobal(int argc,
                                           Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state,
-                                          String* name,
-                                          JSObject* receiver,
-                                          GlobalObject* holder,
-                                          JSGlobalPropertyCell* cell,
-                                          JSFunction* function) {
+                                          Code::ExtraICState extra_state,
+                                          Handle<String> name,
+                                          Handle<JSObject> receiver,
+                                          Handle<GlobalObject> holder,
+                                          Handle<JSGlobalPropertyCell> cell,
+                                          Handle<JSFunction> function) {
   InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(receiver, holder);
-  JSObject* map_holder = IC::GetCodeCacheHolder(receiver, cache_holder);
-  Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
-                                                    NORMAL,
-                                                    extra_ic_state,
-                                                    cache_holder,
-                                                    argc);
-  Object* code = map_holder->map()->FindInCodeCache(name, flags);
-  if (code->IsUndefined()) {
-    // If the function hasn't been compiled yet, we cannot do it now
-    // because it may cause GC. To avoid this issue, we return an
-    // internal error which will make sure we do not update any
-    // caches.
-    if (!function->is_compiled()) return Failure::InternalError();
-    CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder);
-    { MaybeObject* maybe_code =
-          compiler.CompileCallGlobal(receiver, holder, cell, function, name);
-      if (!maybe_code->ToObject(&code)) return maybe_code;
-    }
-    ASSERT_EQ(flags, Code::cast(code)->flags());
-    PROFILE(isolate(),
-            CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
-                            Code::cast(code), name));
-    GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code)));
-    Object* result;
-    { MaybeObject* maybe_result =
-          map_holder->UpdateMapCodeCache(name, Code::cast(code));
-      if (!maybe_result->ToObject(&result)) return maybe_result;
-    }
-  }
+      IC::GetCodeCacheForObject(*receiver, *holder);
+  Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*receiver, cache_holder));
+  Code::Flags flags =
+      Code::ComputeMonomorphicFlags(kind, NORMAL, extra_state,
+                                    cache_holder, argc);
+  Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
+  if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+  CallStubCompiler compiler(isolate(), argc, kind, extra_state, cache_holder);
+  Handle<Code> code =
+      compiler.CompileCallGlobal(receiver, holder, cell, function, name);
+  ASSERT_EQ(flags, code->flags());
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code));
+  JSObject::UpdateMapCodeCache(map_holder, name, code);
   return code;
 }
 
 
-static Object* GetProbeValue(Isolate* isolate, Code::Flags flags) {
-  // Use raw_unchecked... so we don't get assert failures during GC.
-  NumberDictionary* dictionary =
-      isolate->heap()->raw_unchecked_non_monomorphic_cache();
-  int entry = dictionary->FindEntry(isolate, flags);
-  if (entry != -1) return dictionary->ValueAt(entry);
-  return isolate->heap()->raw_unchecked_undefined_value();
-}
-
-
-MUST_USE_RESULT static MaybeObject* ProbeCache(Isolate* isolate,
-                                               Code::Flags flags) {
-  Heap* heap = isolate->heap();
-  Object* probe = GetProbeValue(isolate, flags);
-  if (probe != heap->undefined_value()) return probe;
-  // Seed the cache with an undefined value to make sure that any
-  // generated code object can always be inserted into the cache
-  // without causing  allocation failures.
-  Object* result;
-  { MaybeObject* maybe_result =
-        heap->non_monomorphic_cache()->AtNumberPut(flags,
-                                                   heap->undefined_value());
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
-  heap->public_set_non_monomorphic_cache(NumberDictionary::cast(result));
-  return probe;
-}
-
-
-static MaybeObject* FillCache(Isolate* isolate, MaybeObject* maybe_code) {
-  Object* code;
-  if (maybe_code->ToObject(&code)) {
-    if (code->IsCode()) {
-      Heap* heap = isolate->heap();
-      int entry = heap->non_monomorphic_cache()->FindEntry(
-          Code::cast(code)->flags());
-      // The entry must be present see comment in ProbeCache.
-      ASSERT(entry != -1);
-      ASSERT(heap->non_monomorphic_cache()->ValueAt(entry) ==
-             heap->undefined_value());
-      heap->non_monomorphic_cache()->ValueAtPut(entry, code);
-      CHECK(GetProbeValue(isolate, Code::cast(code)->flags()) == code);
-    }
-  }
-  return maybe_code;
+static void FillCache(Isolate* isolate, Handle<Code> code) {
+  Handle<NumberDictionary> dictionary =
+      NumberDictionarySet(isolate->factory()->non_monomorphic_cache(),
+                          code->flags(),
+                          code,
+                          PropertyDetails(NONE, NORMAL));
+  isolate->heap()->public_set_non_monomorphic_cache(*dictionary);
 }
 
 
@@ -938,188 +775,198 @@
   Code::ExtraICState extra_state =
       CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) |
       CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         UNINITIALIZED,
-                                         extra_state,
-                                         NORMAL,
-                                         argc);
-  Object* result = ProbeCache(isolate(), flags)->ToObjectUnchecked();
-  ASSERT(result != heap()->undefined_value());
+  Code::Flags flags =
+      Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc);
+
+  // Use raw_unchecked... so we don't get assert failures during GC.
+  NumberDictionary* dictionary =
+      isolate()->heap()->raw_unchecked_non_monomorphic_cache();
+  int entry = dictionary->FindEntry(isolate(), flags);
+  ASSERT(entry != -1);
+  Object* code = dictionary->ValueAt(entry);
   // This might be called during the marking phase of the collector
   // hence the unchecked cast.
-  return reinterpret_cast<Code*>(result);
+  return reinterpret_cast<Code*>(code);
 }
 
 
-MaybeObject* StubCache::ComputeCallInitialize(int argc,
+Handle<Code> StubCache::ComputeCallInitialize(int argc,
                                               RelocInfo::Mode mode,
                                               Code::Kind kind) {
   Code::ExtraICState extra_state =
       CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) |
       CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         UNINITIALIZED,
-                                         extra_state,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallInitialize(flags));
+  Code::Flags flags =
+      Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallInitialize(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 
 
-Handle<Code> StubCache::ComputeCallInitialize(int argc,
-                                              RelocInfo::Mode mode) {
-  CALL_HEAP_FUNCTION(isolate_,
-                     ComputeCallInitialize(argc, mode, Code::CALL_IC),
-                     Code);
+Handle<Code> StubCache::ComputeCallInitialize(int argc, RelocInfo::Mode mode) {
+  return ComputeCallInitialize(argc, mode, Code::CALL_IC);
 }
 
 
 Handle<Code> StubCache::ComputeKeyedCallInitialize(int argc) {
-  CALL_HEAP_FUNCTION(
-      isolate_,
-      ComputeCallInitialize(argc, RelocInfo::CODE_TARGET, Code::KEYED_CALL_IC),
-      Code);
+  return ComputeCallInitialize(argc, RelocInfo::CODE_TARGET,
+                               Code::KEYED_CALL_IC);
 }
 
 
-MaybeObject* StubCache::ComputeCallPreMonomorphic(
+Handle<Code> StubCache::ComputeCallPreMonomorphic(
     int argc,
     Code::Kind kind,
-    Code::ExtraICState extra_ic_state) {
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         PREMONOMORPHIC,
-                                         extra_ic_state,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallPreMonomorphic(flags));
+    Code::ExtraICState extra_state) {
+  Code::Flags flags =
+      Code::ComputeFlags(kind, PREMONOMORPHIC, extra_state, NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallPreMonomorphic(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallNormal(int argc,
+Handle<Code> StubCache::ComputeCallNormal(int argc,
                                           Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state) {
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         MONOMORPHIC,
-                                         extra_ic_state,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallNormal(flags));
+                                          Code::ExtraICState extra_state) {
+  Code::Flags flags =
+      Code::ComputeFlags(kind, MONOMORPHIC, extra_state, NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallNormal(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallArguments(int argc, Code::Kind kind) {
+Handle<Code> StubCache::ComputeCallArguments(int argc, Code::Kind kind) {
   ASSERT(kind == Code::KEYED_CALL_IC);
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         MEGAMORPHIC,
-                                         Code::kNoExtraICState,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallArguments(flags));
+  Code::Flags flags =
+      Code::ComputeFlags(kind, MEGAMORPHIC, Code::kNoExtraICState,
+                         NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallArguments(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallMegamorphic(
+Handle<Code> StubCache::ComputeCallMegamorphic(
     int argc,
     Code::Kind kind,
-    Code::ExtraICState extra_ic_state) {
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         MEGAMORPHIC,
-                                         extra_ic_state,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallMegamorphic(flags));
+    Code::ExtraICState extra_state) {
+  Code::Flags flags =
+      Code::ComputeFlags(kind, MEGAMORPHIC, extra_state,
+                         NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallMegamorphic(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallMiss(int argc,
+Handle<Code> StubCache::ComputeCallMiss(int argc,
                                         Code::Kind kind,
-                                        Code::ExtraICState extra_ic_state) {
+                                        Code::ExtraICState extra_state) {
   // MONOMORPHIC_PROTOTYPE_FAILURE state is used to make sure that miss stubs
   // and monomorphic stubs are not mixed up together in the stub cache.
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         MONOMORPHIC_PROTOTYPE_FAILURE,
-                                         extra_ic_state,
-                                         NORMAL,
-                                         argc,
-                                         OWN_MAP);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallMiss(flags));
+  Code::Flags flags =
+      Code::ComputeFlags(kind, MONOMORPHIC_PROTOTYPE_FAILURE, extra_state,
+                         NORMAL, argc, OWN_MAP);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallMiss(flags);
+  FillCache(isolate_, code);
+  return code;
+}
+
+
+// The CallStubCompiler needs a version of ComputeCallMiss that does not
+// perform GC.  This function is temporary, because the stub cache but not
+// yet the stub compiler uses handles.
+MaybeObject* StubCache::TryComputeCallMiss(int argc,
+                                           Code::Kind kind,
+                                           Code::ExtraICState extra_state) {
+  Code::Flags flags =
+      Code::ComputeFlags(kind, MONOMORPHIC_PROTOTYPE_FAILURE, extra_state,
+                         NORMAL, argc, OWN_MAP);
+  NumberDictionary* cache = isolate_->heap()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return cache->ValueAt(entry);
+
+  StubCompiler compiler(isolate_);
+  Code* code = NULL;
+  MaybeObject* maybe_code = compiler.TryCompileCallMiss(flags);
+  if (!maybe_code->To(&code)) return maybe_code;
+
+  NumberDictionary* new_cache = NULL;
+  MaybeObject* maybe_new_cache = cache->AtNumberPut(flags, code);
+  if (!maybe_new_cache->To(&new_cache)) return maybe_new_cache;
+  isolate_->heap()->public_set_non_monomorphic_cache(new_cache);
+
+  return code;
 }
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-MaybeObject* StubCache::ComputeCallDebugBreak(
-    int argc,
-    Code::Kind kind) {
+Handle<Code> StubCache::ComputeCallDebugBreak(int argc,
+                                              Code::Kind kind) {
   // Extra IC state is irrelevant for debug break ICs. They jump to
   // the actual call ic to carry out the work.
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         DEBUG_BREAK,
-                                         Code::kNoExtraICState,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallDebugBreak(flags));
+  Code::Flags flags =
+      Code::ComputeFlags(kind, DEBUG_BREAK, Code::kNoExtraICState,
+                         NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallDebugBreak(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 
 
-MaybeObject* StubCache::ComputeCallDebugPrepareStepIn(
-    int argc,
-    Code::Kind kind) {
+Handle<Code> StubCache::ComputeCallDebugPrepareStepIn(int argc,
+                                                      Code::Kind kind) {
   // Extra IC state is irrelevant for debug break ICs. They jump to
   // the actual call ic to carry out the work.
-  Code::Flags flags = Code::ComputeFlags(kind,
-                                         DEBUG_PREPARE_STEP_IN,
-                                         Code::kNoExtraICState,
-                                         NORMAL,
-                                         argc);
-  Object* probe;
-  { MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
-    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
-  }
-  if (!probe->IsUndefined()) return probe;
-  StubCompiler compiler;
-  return FillCache(isolate_, compiler.CompileCallDebugPrepareStepIn(flags));
+  Code::Flags flags =
+      Code::ComputeFlags(kind, DEBUG_PREPARE_STEP_IN, Code::kNoExtraICState,
+                         NORMAL, argc);
+  Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+  int entry = cache->FindEntry(isolate_, flags);
+  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
+
+  StubCompiler compiler(isolate_);
+  Handle<Code> code = compiler.CompileCallDebugPrepareStepIn(flags);
+  FillCache(isolate_, code);
+  return code;
 }
 #endif
 
@@ -1384,62 +1231,47 @@
 }
 
 
-MaybeObject* StubCompiler::CompileCallInitialize(Code::Flags flags) {
-  HandleScope scope(isolate());
+Handle<Code> StubCompiler::CompileCallInitialize(Code::Flags flags) {
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
-  Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
+  Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
   if (kind == Code::CALL_IC) {
-    CallIC::GenerateInitialize(masm(), argc, extra_ic_state);
+    CallIC::GenerateInitialize(masm(), argc, extra_state);
   } else {
     KeyedCallIC::GenerateInitialize(masm(), argc);
   }
-  Object* result;
-  { MaybeObject* maybe_result =
-        GetCodeWithFlags(flags, "CompileCallInitialize");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallInitialize");
   isolate()->counters()->call_initialize_stubs()->Increment();
-  Code* code = Code::cast(result);
-  USE(code);
   PROFILE(isolate(),
           CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_INITIALIZE_TAG),
-                          code, code->arguments_count()));
-  GDBJIT(AddCode(GDBJITInterface::CALL_INITIALIZE, Code::cast(code)));
-  return result;
+                          *code, code->arguments_count()));
+  GDBJIT(AddCode(GDBJITInterface::CALL_INITIALIZE, *code));
+  return code;
 }
 
 
-MaybeObject* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
-  HandleScope scope(isolate());
+Handle<Code> StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   // The code of the PreMonomorphic stub is the same as the code
   // of the Initialized stub.  They just differ on the code object flags.
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
-  Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
+  Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
   if (kind == Code::CALL_IC) {
-    CallIC::GenerateInitialize(masm(), argc, extra_ic_state);
+    CallIC::GenerateInitialize(masm(), argc, extra_state);
   } else {
     KeyedCallIC::GenerateInitialize(masm(), argc);
   }
-  Object* result;
-  { MaybeObject* maybe_result =
-        GetCodeWithFlags(flags, "CompileCallPreMonomorphic");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallPreMonomorphic");
   isolate()->counters()->call_premonomorphic_stubs()->Increment();
-  Code* code = Code::cast(result);
-  USE(code);
   PROFILE(isolate(),
           CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_PRE_MONOMORPHIC_TAG),
-                          code, code->arguments_count()));
-  GDBJIT(AddCode(GDBJITInterface::CALL_PRE_MONOMORPHIC, Code::cast(code)));
-  return result;
+                          *code, code->arguments_count()));
+  GDBJIT(AddCode(GDBJITInterface::CALL_PRE_MONOMORPHIC, *code));
+  return code;
 }
 
 
-MaybeObject* StubCompiler::CompileCallNormal(Code::Flags flags) {
-  HandleScope scope(isolate());
+Handle<Code> StubCompiler::CompileCallNormal(Code::Flags flags) {
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   if (kind == Code::CALL_IC) {
@@ -1450,79 +1282,81 @@
   } else {
     KeyedCallIC::GenerateNormal(masm(), argc);
   }
-  Object* result;
-  { MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallNormal");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallNormal");
   isolate()->counters()->call_normal_stubs()->Increment();
-  Code* code = Code::cast(result);
-  USE(code);
   PROFILE(isolate(),
           CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_NORMAL_TAG),
-                          code, code->arguments_count()));
-  GDBJIT(AddCode(GDBJITInterface::CALL_NORMAL, Code::cast(code)));
-  return result;
+                          *code, code->arguments_count()));
+  GDBJIT(AddCode(GDBJITInterface::CALL_NORMAL, *code));
+  return code;
 }
 
 
-MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
-  HandleScope scope(isolate());
+Handle<Code> StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
-  Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
+  Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
   if (kind == Code::CALL_IC) {
-    CallIC::GenerateMegamorphic(masm(), argc, extra_ic_state);
+    CallIC::GenerateMegamorphic(masm(), argc, extra_state);
   } else {
     KeyedCallIC::GenerateMegamorphic(masm(), argc);
   }
-  Object* result;
-  { MaybeObject* maybe_result =
-        GetCodeWithFlags(flags, "CompileCallMegamorphic");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallMegamorphic");
   isolate()->counters()->call_megamorphic_stubs()->Increment();
-  Code* code = Code::cast(result);
-  USE(code);
   PROFILE(isolate(),
           CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG),
-                          code, code->arguments_count()));
-  GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, Code::cast(code)));
-  return result;
+                          *code, code->arguments_count()));
+  GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, *code));
+  return code;
 }
 
 
-MaybeObject* StubCompiler::CompileCallArguments(Code::Flags flags) {
-  HandleScope scope(isolate());
+Handle<Code> StubCompiler::CompileCallArguments(Code::Flags flags) {
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   KeyedCallIC::GenerateNonStrictArguments(masm(), argc);
-  Code::Kind kind = Code::ExtractKindFromFlags(flags);
-  Object* result;
-  { MaybeObject* maybe_result =
-        GetCodeWithFlags(flags, "CompileCallArguments");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
-  Code* code = Code::cast(result);
-  USE(code);
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallArguments");
   PROFILE(isolate(),
-          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG),
-                          code, code->arguments_count()));
-  GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, Code::cast(code)));
-  return result;
+          CodeCreateEvent(CALL_LOGGER_TAG(Code::ExtractKindFromFlags(flags),
+                                          CALL_MEGAMORPHIC_TAG),
+                          *code, code->arguments_count()));
+  GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, *code));
+  return code;
 }
 
 
-MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) {
+Handle<Code> StubCompiler::CompileCallMiss(Code::Flags flags) {
+  int argc = Code::ExtractArgumentsCountFromFlags(flags);
+  Code::Kind kind = Code::ExtractKindFromFlags(flags);
+  Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
+  if (kind == Code::CALL_IC) {
+    CallIC::GenerateMiss(masm(), argc, extra_state);
+  } else {
+    KeyedCallIC::GenerateMiss(masm(), argc);
+  }
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallMiss");
+  isolate()->counters()->call_megamorphic_stubs()->Increment();
+  PROFILE(isolate(),
+          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MISS_TAG),
+                          *code, code->arguments_count()));
+  GDBJIT(AddCode(GDBJITInterface::CALL_MISS, *code));
+  return code;
+}
+
+
+// TODO(kmillikin): This annoying raw pointer implementation should be
+// eliminated when the stub compiler no longer needs it.
+MaybeObject* StubCompiler::TryCompileCallMiss(Code::Flags flags) {
   HandleScope scope(isolate());
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
-  Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
+  Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
   if (kind == Code::CALL_IC) {
-    CallIC::GenerateMiss(masm(), argc, extra_ic_state);
+    CallIC::GenerateMiss(masm(), argc, extra_state);
   } else {
     KeyedCallIC::GenerateMiss(masm(), argc);
   }
   Object* result;
-  { MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallMiss");
+  { MaybeObject* maybe_result = TryGetCodeWithFlags(flags, "CompileCallMiss");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   isolate()->counters()->call_megamorphic_stubs()->Increment();
@@ -1537,29 +1371,20 @@
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-MaybeObject* StubCompiler::CompileCallDebugBreak(Code::Flags flags) {
-  HandleScope scope(isolate());
+Handle<Code> StubCompiler::CompileCallDebugBreak(Code::Flags flags) {
   Debug::GenerateCallICDebugBreak(masm());
-  Object* result;
-  { MaybeObject* maybe_result =
-        GetCodeWithFlags(flags, "CompileCallDebugBreak");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
-  Code* code = Code::cast(result);
-  USE(code);
-  Code::Kind kind = Code::ExtractKindFromFlags(flags);
-  USE(kind);
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallDebugBreak");
   PROFILE(isolate(),
-          CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_DEBUG_BREAK_TAG),
-                          code, code->arguments_count()));
-  return result;
+          CodeCreateEvent(CALL_LOGGER_TAG(Code::ExtractKindFromFlags(flags),
+                                          CALL_DEBUG_BREAK_TAG),
+                          *code, code->arguments_count()));
+  return code;
 }
 
 
-MaybeObject* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) {
-  HandleScope scope(isolate());
-  // Use the same code for the the step in preparations as we do for
-  // the miss case.
+Handle<Code> StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) {
+  // Use the same code for the the step in preparations as we do for the
+  // miss case.
   int argc = Code::ExtractArgumentsCountFromFlags(flags);
   Code::Kind kind = Code::ExtractKindFromFlags(flags);
   if (kind == Code::CALL_IC) {
@@ -1568,26 +1393,42 @@
   } else {
     KeyedCallIC::GenerateMiss(masm(), argc);
   }
-  Object* result;
-  { MaybeObject* maybe_result =
-        GetCodeWithFlags(flags, "CompileCallDebugPrepareStepIn");
-    if (!maybe_result->ToObject(&result)) return maybe_result;
-  }
-  Code* code = Code::cast(result);
-  USE(code);
+  Handle<Code> code = GetCodeWithFlags(flags, "CompileCallDebugPrepareStepIn");
   PROFILE(isolate(),
           CodeCreateEvent(
               CALL_LOGGER_TAG(kind, CALL_DEBUG_PREPARE_STEP_IN_TAG),
-              code,
+              *code,
               code->arguments_count()));
-  return result;
+  return code;
 }
-#endif
+#endif  // ENABLE_DEBUGGER_SUPPORT
 
 #undef CALL_LOGGER_TAG
 
-MaybeObject* StubCompiler::GetCodeWithFlags(Code::Flags flags,
+
+Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags,
                                             const char* name) {
+  // Create code object in the heap.
+  CodeDesc desc;
+  masm_.GetCode(&desc);
+  Handle<Code> code = factory()->NewCode(desc, flags, masm_.CodeObject());
+#ifdef ENABLE_DISASSEMBLER
+  if (FLAG_print_code_stubs) code->Disassemble(name);
+#endif
+  return code;
+}
+
+
+Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags,
+                                            Handle<String> name) {
+  return (FLAG_print_code_stubs && !name.is_null())
+      ? GetCodeWithFlags(flags, *name->ToCString())
+      : GetCodeWithFlags(flags, reinterpret_cast<char*>(NULL));
+}
+
+
+MaybeObject* StubCompiler::TryGetCodeWithFlags(Code::Flags flags,
+                                               const char* name) {
   // Check for allocation failures during stub compilation.
   if (failure_->IsFailure()) return failure_;
 
@@ -1604,11 +1445,12 @@
 }
 
 
-MaybeObject* StubCompiler::GetCodeWithFlags(Code::Flags flags, String* name) {
-  if (FLAG_print_code_stubs && (name != NULL)) {
-    return GetCodeWithFlags(flags, *name->ToCString());
+MaybeObject* StubCompiler::TryGetCodeWithFlags(Code::Flags flags,
+                                               String* name) {
+  if (FLAG_print_code_stubs && name != NULL) {
+    return TryGetCodeWithFlags(flags, *name->ToCString());
   }
-  return GetCodeWithFlags(flags, reinterpret_cast<char*>(NULL));
+  return TryGetCodeWithFlags(flags, reinterpret_cast<char*>(NULL));
 }
 
 
@@ -1626,10 +1468,20 @@
 }
 
 
-
-MaybeObject* LoadStubCompiler::GetCode(PropertyType type, String* name) {
+Handle<Code> LoadStubCompiler::GetCode(PropertyType type, Handle<String> name) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type);
-  MaybeObject* result = GetCodeWithFlags(flags, name);
+  Handle<Code> code = GetCodeWithFlags(flags, name);
+  PROFILE(isolate(), CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  return code;
+}
+
+
+// TODO(ulan): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* LoadStubCompiler::TryGetCode(PropertyType type, String* name) {
+  Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type);
+  MaybeObject* result = TryGetCodeWithFlags(flags, name);
   if (!result->IsFailure()) {
     PROFILE(isolate(),
             CodeCreateEvent(Logger::LOAD_IC_TAG,
@@ -1643,12 +1495,25 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type,
+Handle<Code> KeyedLoadStubCompiler::GetCode(PropertyType type,
+                                            Handle<String> name,
+                                            InlineCacheState state) {
+  Code::Flags flags = Code::ComputeFlags(
+      Code::KEYED_LOAD_IC, state, Code::kNoExtraICState, type);
+  Handle<Code> code = GetCodeWithFlags(flags, name);
+  PROFILE(isolate(), CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+  return code;
+}
+
+// TODO(ulan): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* KeyedLoadStubCompiler::TryGetCode(PropertyType type,
                                             String* name,
                                             InlineCacheState state) {
   Code::Flags flags = Code::ComputeFlags(
       Code::KEYED_LOAD_IC, state, Code::kNoExtraICState, type);
-  MaybeObject* result = GetCodeWithFlags(flags, name);
+  MaybeObject* result = TryGetCodeWithFlags(flags, name);
   if (!result->IsFailure()) {
     PROFILE(isolate(),
             CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG,
@@ -1662,39 +1527,26 @@
 }
 
 
-MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) {
+Handle<Code> StoreStubCompiler::GetCode(PropertyType type,
+                                        Handle<String> name) {
   Code::Flags flags =
       Code::ComputeMonomorphicFlags(Code::STORE_IC, type, strict_mode_);
-  MaybeObject* result = GetCodeWithFlags(flags, name);
-  if (!result->IsFailure()) {
-    PROFILE(isolate(),
-            CodeCreateEvent(Logger::STORE_IC_TAG,
-                            Code::cast(result->ToObjectUnchecked()),
-                            name));
-    GDBJIT(AddCode(GDBJITInterface::STORE_IC,
-                   name,
-                   Code::cast(result->ToObjectUnchecked())));
-  }
-  return result;
+  Handle<Code> code = GetCodeWithFlags(flags, name);
+  PROFILE(isolate(), CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+  return code;
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type,
-                                             String* name,
+Handle<Code> KeyedStoreStubCompiler::GetCode(PropertyType type,
+                                             Handle<String> name,
                                              InlineCacheState state) {
   Code::Flags flags =
       Code::ComputeFlags(Code::KEYED_STORE_IC, state, strict_mode_, type);
-  MaybeObject* result = GetCodeWithFlags(flags, name);
-  if (!result->IsFailure()) {
-    PROFILE(isolate(),
-            CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
-                            Code::cast(result->ToObjectUnchecked()),
-                            name));
-    GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC,
-                   name,
-                   Code::cast(result->ToObjectUnchecked())));
-  }
-  return result;
+  Handle<Code> code = GetCodeWithFlags(flags, name);
+  PROFILE(isolate(), CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, *name));
+  GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, *name, *code));
+  return code;
 }
 
 
@@ -1704,13 +1556,15 @@
 }
 
 
-CallStubCompiler::CallStubCompiler(int argc,
+CallStubCompiler::CallStubCompiler(Isolate* isolate,
+                                   int argc,
                                    Code::Kind kind,
-                                   Code::ExtraICState extra_ic_state,
+                                   Code::ExtraICState extra_state,
                                    InlineCacheHolderFlag cache_holder)
-    : arguments_(argc),
+    : StubCompiler(isolate),
+      arguments_(argc),
       kind_(kind),
-      extra_ic_state_(extra_ic_state),
+      extra_state_(extra_state),
       cache_holder_(cache_holder) {
 }
 
@@ -1763,30 +1617,54 @@
 }
 
 
-MaybeObject* CallStubCompiler::GetCode(PropertyType type, String* name) {
+Handle<Code> CallStubCompiler::GetCode(PropertyType type, Handle<String> name) {
   int argc = arguments_.immediate();
   Code::Flags flags = Code::ComputeMonomorphicFlags(kind_,
                                                     type,
-                                                    extra_ic_state_,
+                                                    extra_state_,
                                                     cache_holder_,
                                                     argc);
   return GetCodeWithFlags(flags, name);
 }
 
 
-MaybeObject* CallStubCompiler::GetCode(JSFunction* function) {
+Handle<Code> CallStubCompiler::GetCode(Handle<JSFunction> function) {
+  Handle<String> function_name;
+  if (function->shared()->name()->IsString()) {
+    function_name = Handle<String>(String::cast(function->shared()->name()));
+  }
+  return GetCode(CONSTANT_FUNCTION, function_name);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* CallStubCompiler::TryGetCode(PropertyType type, String* name) {
+  int argc = arguments_.immediate();
+  Code::Flags flags = Code::ComputeMonomorphicFlags(kind_,
+                                                    type,
+                                                    extra_state_,
+                                                    cache_holder_,
+                                                    argc);
+  return TryGetCodeWithFlags(flags, name);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* CallStubCompiler::TryGetCode(JSFunction* function) {
   String* function_name = NULL;
   if (function->shared()->name()->IsString()) {
     function_name = String::cast(function->shared()->name());
   }
-  return GetCode(CONSTANT_FUNCTION, function_name);
+  return TryGetCode(CONSTANT_FUNCTION, function_name);
 }
 
 
 MaybeObject* ConstructStubCompiler::GetCode() {
   Code::Flags flags = Code::ComputeFlags(Code::STUB);
   Object* result;
-  { MaybeObject* maybe_result = GetCodeWithFlags(flags, "ConstructStub");
+  { MaybeObject* maybe_result = TryGetCodeWithFlags(flags, "ConstructStub");
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
   Code* code = Code::cast(result);
diff --git a/src/stub-cache.h b/src/stub-cache.h
index d9ec88f..11fdb89 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -76,207 +76,171 @@
 
   // Computes the right stub matching. Inserts the result in the
   // cache before returning.  This might compile a stub if needed.
-  MUST_USE_RESULT MaybeObject* ComputeLoadNonexistent(
-      String* name,
-      JSObject* receiver);
+  Handle<Code> ComputeLoadNonexistent(Handle<String> name,
+                                      Handle<JSObject> receiver);
 
-  MUST_USE_RESULT MaybeObject* ComputeLoadField(String* name,
-                                                JSObject* receiver,
-                                                JSObject* holder,
-                                                int field_index);
+  Handle<Code> ComputeLoadField(Handle<String> name,
+                                Handle<JSObject> receiver,
+                                Handle<JSObject> holder,
+                                int field_index);
 
-  MUST_USE_RESULT MaybeObject* ComputeLoadCallback(
-      String* name,
-      JSObject* receiver,
-      JSObject* holder,
-      AccessorInfo* callback);
+  Handle<Code> ComputeLoadCallback(Handle<String> name,
+                                   Handle<JSObject> receiver,
+                                   Handle<JSObject> holder,
+                                   Handle<AccessorInfo> callback);
 
-  MUST_USE_RESULT MaybeObject* ComputeLoadConstant(String* name,
-                                                   JSObject* receiver,
-                                                   JSObject* holder,
-                                                   Object* value);
+  Handle<Code> ComputeLoadConstant(Handle<String> name,
+                                   Handle<JSObject> receiver,
+                                   Handle<JSObject> holder,
+                                   Handle<Object> value);
 
-  MUST_USE_RESULT MaybeObject* ComputeLoadInterceptor(
-      String* name,
-      JSObject* receiver,
-      JSObject* holder);
+  Handle<Code> ComputeLoadInterceptor(Handle<String> name,
+                                      Handle<JSObject> receiver,
+                                      Handle<JSObject> holder);
 
-  MUST_USE_RESULT MaybeObject* ComputeLoadNormal();
+  Handle<Code> ComputeLoadNormal();
 
-
-  MUST_USE_RESULT MaybeObject* ComputeLoadGlobal(
-      String* name,
-      JSObject* receiver,
-      GlobalObject* holder,
-      JSGlobalPropertyCell* cell,
-      bool is_dont_delete);
-
+  Handle<Code> ComputeLoadGlobal(Handle<String> name,
+                                 Handle<JSObject> receiver,
+                                 Handle<GlobalObject> holder,
+                                 Handle<JSGlobalPropertyCell> cell,
+                                 bool is_dont_delete);
 
   // ---
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadField(String* name,
-                                                     JSObject* receiver,
-                                                     JSObject* holder,
-                                                     int field_index);
+  Handle<Code> ComputeKeyedLoadField(Handle<String> name,
+                                     Handle<JSObject> receiver,
+                                     Handle<JSObject> holder,
+                                     int field_index);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadCallback(
-      String* name,
-      JSObject* receiver,
-      JSObject* holder,
-      AccessorInfo* callback);
+  Handle<Code> ComputeKeyedLoadCallback(Handle<String> name,
+                                        Handle<JSObject> receiver,
+                                        Handle<JSObject> holder,
+                                        Handle<AccessorInfo> callback);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadConstant(
-      String* name,
-      JSObject* receiver,
-      JSObject* holder,
-      Object* value);
+  Handle<Code> ComputeKeyedLoadConstant(Handle<String> name,
+                                        Handle<JSObject> receiver,
+                                        Handle<JSObject> holder,
+                                        Handle<Object> value);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadInterceptor(
-      String* name,
-      JSObject* receiver,
-      JSObject* holder);
+  Handle<Code> ComputeKeyedLoadInterceptor(Handle<String> name,
+                                           Handle<JSObject> receiver,
+                                           Handle<JSObject> holder);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadArrayLength(
-      String* name,
-      JSArray* receiver);
+  Handle<Code> ComputeKeyedLoadArrayLength(Handle<String> name,
+                                           Handle<JSArray> receiver);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadStringLength(
-      String* name,
-      String* receiver);
+  Handle<Code> ComputeKeyedLoadStringLength(Handle<String> name,
+                                            Handle<String> receiver);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadFunctionPrototype(
-      String* name,
-      JSFunction* receiver);
+  Handle<Code> ComputeKeyedLoadFunctionPrototype(Handle<String> name,
+                                                 Handle<JSFunction> receiver);
 
   // ---
 
-  MUST_USE_RESULT MaybeObject* ComputeStoreField(
-      String* name,
-      JSObject* receiver,
-      int field_index,
-      Map* transition,
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeStoreField(Handle<String> name,
+                                 Handle<JSObject> receiver,
+                                 int field_index,
+                                 Handle<Map> transition,
+                                 StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT MaybeObject* ComputeStoreNormal(
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeStoreNormal(StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT MaybeObject* ComputeStoreGlobal(
-      String* name,
-      GlobalObject* receiver,
-      JSGlobalPropertyCell* cell,
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeStoreGlobal(Handle<String> name,
+                                  Handle<GlobalObject> receiver,
+                                  Handle<JSGlobalPropertyCell> cell,
+                                  StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT MaybeObject* ComputeStoreCallback(
-      String* name,
-      JSObject* receiver,
-      AccessorInfo* callback,
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeStoreCallback(Handle<String> name,
+                                    Handle<JSObject> receiver,
+                                    Handle<AccessorInfo> callback,
+                                    StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT MaybeObject* ComputeStoreInterceptor(
-      String* name,
-      JSObject* receiver,
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeStoreInterceptor(Handle<String> name,
+                                       Handle<JSObject> receiver,
+                                       StrictModeFlag strict_mode);
 
   // ---
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedStoreField(
-      String* name,
-      JSObject* receiver,
-      int field_index,
-      Map* transition,
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeKeyedStoreField(Handle<String> name,
+                                      Handle<JSObject> receiver,
+                                      int field_index,
+                                      Handle<Map> transition,
+                                      StrictModeFlag strict_mode);
 
-  MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreElement(
-      JSObject* receiver,
-      KeyedIC::StubKind stub_kind,
-      StrictModeFlag strict_mode);
+  Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<JSObject> receiver,
+                                              KeyedIC::StubKind stub_kind,
+                                              StrictModeFlag strict_mode);
 
   // ---
 
-  MUST_USE_RESULT MaybeObject* ComputeCallField(
-      int argc,
-      Code::Kind,
-      Code::ExtraICState extra_ic_state,
-      String* name,
-      Object* object,
-      JSObject* holder,
-      int index);
+  Handle<Code> ComputeCallField(int argc,
+                                Code::Kind,
+                                Code::ExtraICState extra_state,
+                                Handle<String> name,
+                                Handle<Object> object,
+                                Handle<JSObject> holder,
+                                int index);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallConstant(
-      int argc,
-      Code::Kind,
-      Code::ExtraICState extra_ic_state,
-      String* name,
-      Object* object,
-      JSObject* holder,
-      JSFunction* function);
+  Handle<Code> ComputeCallConstant(int argc,
+                                   Code::Kind,
+                                   Code::ExtraICState extra_state,
+                                   Handle<String> name,
+                                   Handle<Object> object,
+                                   Handle<JSObject> holder,
+                                   Handle<JSFunction> function);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallNormal(
-      int argc,
-      Code::Kind,
-      Code::ExtraICState extra_ic_state,
-      String* name,
-      JSObject* receiver);
+  Handle<Code> ComputeCallInterceptor(int argc,
+                                      Code::Kind,
+                                      Code::ExtraICState extra_state,
+                                      Handle<String> name,
+                                      Handle<Object> object,
+                                      Handle<JSObject> holder);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallInterceptor(
-      int argc,
-      Code::Kind,
-      Code::ExtraICState extra_ic_state,
-      String* name,
-      Object* object,
-      JSObject* holder);
-
-  MUST_USE_RESULT MaybeObject* ComputeCallGlobal(
-      int argc,
-      Code::Kind,
-      Code::ExtraICState extra_ic_state,
-      String* name,
-      JSObject* receiver,
-      GlobalObject* holder,
-      JSGlobalPropertyCell* cell,
-      JSFunction* function);
+  Handle<Code> ComputeCallGlobal(int argc,
+                                 Code::Kind,
+                                 Code::ExtraICState extra_state,
+                                 Handle<String> name,
+                                 Handle<JSObject> receiver,
+                                 Handle<GlobalObject> holder,
+                                 Handle<JSGlobalPropertyCell> cell,
+                                 Handle<JSFunction> function);
 
   // ---
 
-  MUST_USE_RESULT MaybeObject* ComputeCallInitialize(int argc,
-                                                     RelocInfo::Mode mode,
-                                                     Code::Kind kind);
-
-  Handle<Code> ComputeCallInitialize(int argc,
-                                     RelocInfo::Mode mode);
+  Handle<Code> ComputeCallInitialize(int argc, RelocInfo::Mode mode);
 
   Handle<Code> ComputeKeyedCallInitialize(int argc);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallPreMonomorphic(
-      int argc,
-      Code::Kind kind,
-      Code::ExtraICState extra_ic_state);
+  Handle<Code> ComputeCallPreMonomorphic(int argc,
+                                         Code::Kind kind,
+                                         Code::ExtraICState extra_state);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallNormal(int argc,
-                                                 Code::Kind kind,
-                                                 Code::ExtraICState state);
+  Handle<Code> ComputeCallNormal(int argc,
+                                 Code::Kind kind,
+                                 Code::ExtraICState state);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallArguments(int argc,
-                                                    Code::Kind kind);
+  Handle<Code> ComputeCallArguments(int argc, Code::Kind kind);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallMegamorphic(int argc,
-                                                      Code::Kind kind,
-                                                      Code::ExtraICState state);
+  Handle<Code> ComputeCallMegamorphic(int argc,
+                                      Code::Kind kind,
+                                      Code::ExtraICState state);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallMiss(int argc,
-                                               Code::Kind kind,
-                                               Code::ExtraICState state);
+  Handle<Code> ComputeCallMiss(int argc,
+                               Code::Kind kind,
+                               Code::ExtraICState state);
+
+  MUST_USE_RESULT MaybeObject* TryComputeCallMiss(int argc,
+                                                  Code::Kind kind,
+                                                  Code::ExtraICState state);
 
   // Finds the Code object stored in the Heap::non_monomorphic_cache().
-  MUST_USE_RESULT Code* FindCallInitialize(int argc,
-                                           RelocInfo::Mode mode,
-                                           Code::Kind kind);
+  Code* FindCallInitialize(int argc, RelocInfo::Mode mode, Code::Kind kind);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  MUST_USE_RESULT MaybeObject* ComputeCallDebugBreak(int argc, Code::Kind kind);
+  Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind);
 
-  MUST_USE_RESULT MaybeObject* ComputeCallDebugPrepareStepIn(int argc,
-                                                             Code::Kind kind);
+  Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind);
 #endif
 
   // Update cache for entry hash(name, map).
@@ -330,16 +294,14 @@
 
   Isolate* isolate() { return isolate_; }
   Heap* heap() { return isolate()->heap(); }
+  Factory* factory() { return isolate()->factory(); }
 
  private:
   explicit StubCache(Isolate* isolate);
 
-  friend class Isolate;
-  friend class SCTableReference;
-  static const int kPrimaryTableSize = 2048;
-  static const int kSecondaryTableSize = 512;
-  Entry primary_[kPrimaryTableSize];
-  Entry secondary_[kSecondaryTableSize];
+  Handle<Code> ComputeCallInitialize(int argc,
+                                     RelocInfo::Mode mode,
+                                     Code::Kind kind);
 
   // Computes the hashed offsets for primary and secondary caches.
   static int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
@@ -384,8 +346,16 @@
         reinterpret_cast<Address>(table) + (offset << shift_amount));
   }
 
+  static const int kPrimaryTableSize = 2048;
+  static const int kSecondaryTableSize = 512;
+
+  Entry primary_[kPrimaryTableSize];
+  Entry secondary_[kSecondaryTableSize];
   Isolate* isolate_;
 
+  friend class Isolate;
+  friend class SCTableReference;
+
   DISALLOW_COPY_AND_ASSIGN(StubCache);
 };
 
@@ -407,21 +377,26 @@
 DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor);
 
 
-// The stub compiler compiles stubs for the stub cache.
+// The stub compilers compile stubs for the stub cache.
 class StubCompiler BASE_EMBEDDED {
  public:
-  StubCompiler()
-      : scope_(), masm_(Isolate::Current(), NULL, 256), failure_(NULL) { }
+  explicit StubCompiler(Isolate* isolate)
+      : isolate_(isolate), masm_(isolate, NULL, 256), failure_(NULL) { }
 
-  MUST_USE_RESULT MaybeObject* CompileCallInitialize(Code::Flags flags);
-  MUST_USE_RESULT MaybeObject* CompileCallPreMonomorphic(Code::Flags flags);
-  MUST_USE_RESULT MaybeObject* CompileCallNormal(Code::Flags flags);
-  MUST_USE_RESULT MaybeObject* CompileCallMegamorphic(Code::Flags flags);
-  MUST_USE_RESULT MaybeObject* CompileCallArguments(Code::Flags flags);
-  MUST_USE_RESULT MaybeObject* CompileCallMiss(Code::Flags flags);
+  // Functions to compile either CallIC or KeyedCallIC.  The specific kind
+  // is extracted from the code flags.
+  Handle<Code> CompileCallInitialize(Code::Flags flags);
+  Handle<Code> CompileCallPreMonomorphic(Code::Flags flags);
+  Handle<Code> CompileCallNormal(Code::Flags flags);
+  Handle<Code> CompileCallMegamorphic(Code::Flags flags);
+  Handle<Code> CompileCallArguments(Code::Flags flags);
+  Handle<Code> CompileCallMiss(Code::Flags flags);
+
+  MUST_USE_RESULT MaybeObject* TryCompileCallMiss(Code::Flags flags);
+
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  MUST_USE_RESULT MaybeObject* CompileCallDebugBreak(Code::Flags flags);
-  MUST_USE_RESULT MaybeObject* CompileCallDebugPrepareStepIn(Code::Flags flags);
+  Handle<Code> CompileCallDebugBreak(Code::Flags flags);
+  Handle<Code> CompileCallDebugPrepareStepIn(Code::Flags flags);
 #endif
 
   // Static functions for generating parts of stubs.
@@ -441,8 +416,10 @@
                                                         Label* miss);
 
   static void GenerateFastPropertyLoad(MacroAssembler* masm,
-                                       Register dst, Register src,
-                                       JSObject* holder, int index);
+                                       Register dst,
+                                       Register src,
+                                       Handle<JSObject> holder,
+                                       int index);
 
   static void GenerateLoadArrayLength(MacroAssembler* masm,
                                       Register receiver,
@@ -463,9 +440,9 @@
                                             Label* miss_label);
 
   static void GenerateStoreField(MacroAssembler* masm,
-                                 JSObject* object,
+                                 Handle<JSObject> object,
                                  int index,
-                                 Map* transition,
+                                 Handle<Map> transition,
                                  Register receiver_reg,
                                  Register name_reg,
                                  Register scratch,
@@ -491,7 +468,30 @@
   // The function can optionally (when save_at_depth !=
   // kInvalidProtoDepth) save the object at the given depth by moving
   // it to [esp + kPointerSize].
+  Register CheckPrototypes(Handle<JSObject> object,
+                           Register object_reg,
+                           Handle<JSObject> holder,
+                           Register holder_reg,
+                           Register scratch1,
+                           Register scratch2,
+                           Handle<String> name,
+                           Label* miss) {
+    return CheckPrototypes(object, object_reg, holder, holder_reg, scratch1,
+                           scratch2, name, kInvalidProtoDepth, miss);
+  }
 
+  Register CheckPrototypes(Handle<JSObject> object,
+                           Register object_reg,
+                           Handle<JSObject> holder,
+                           Register holder_reg,
+                           Register scratch1,
+                           Register scratch2,
+                           Handle<String> name,
+                           int save_at_depth,
+                           Label* miss);
+
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
   Register CheckPrototypes(JSObject* object,
                            Register object_reg,
                            JSObject* holder,
@@ -504,6 +504,8 @@
                            scratch2, name, kInvalidProtoDepth, miss);
   }
 
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
   Register CheckPrototypes(JSObject* object,
                            Register object_reg,
                            JSObject* holder,
@@ -515,20 +517,25 @@
                            Label* miss);
 
  protected:
-  MaybeObject* GetCodeWithFlags(Code::Flags flags, const char* name);
-  MaybeObject* GetCodeWithFlags(Code::Flags flags, String* name);
+  Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
+  Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<String> name);
+
+  MUST_USE_RESULT MaybeObject* TryGetCodeWithFlags(Code::Flags flags,
+                                                   const char* name);
+  MUST_USE_RESULT MaybeObject* TryGetCodeWithFlags(Code::Flags flags,
+                                                   String* name);
 
   MacroAssembler* masm() { return &masm_; }
   void set_failure(Failure* failure) { failure_ = failure; }
 
-  void GenerateLoadField(JSObject* object,
-                         JSObject* holder,
+  void GenerateLoadField(Handle<JSObject> object,
+                         Handle<JSObject> holder,
                          Register receiver,
                          Register scratch1,
                          Register scratch2,
                          Register scratch3,
                          int index,
-                         String* name,
+                         Handle<String> name,
                          Label* miss);
 
   MaybeObject* GenerateLoadCallback(JSObject* object,
@@ -542,14 +549,14 @@
                                     String* name,
                                     Label* miss);
 
-  void GenerateLoadConstant(JSObject* object,
-                            JSObject* holder,
+  void GenerateLoadConstant(Handle<JSObject> object,
+                            Handle<JSObject> holder,
                             Register receiver,
                             Register scratch1,
                             Register scratch2,
                             Register scratch3,
-                            Object* value,
-                            String* name,
+                            Handle<Object> value,
+                            Handle<String> name,
                             Label* miss);
 
   void GenerateLoadInterceptor(JSObject* object,
@@ -567,12 +574,12 @@
                                     String* name,
                                     LookupResult* lookup);
 
-  Isolate* isolate() { return scope_.isolate(); }
+  Isolate* isolate() { return isolate_; }
   Heap* heap() { return isolate()->heap(); }
   Factory* factory() { return isolate()->factory(); }
 
  private:
-  HandleScope scope_;
+  Isolate* isolate_;
   MacroAssembler masm_;
   Failure* failure_;
 };
@@ -580,70 +587,95 @@
 
 class LoadStubCompiler: public StubCompiler {
  public:
-  MUST_USE_RESULT MaybeObject* CompileLoadNonexistent(String* name,
-                                                      JSObject* object,
-                                                      JSObject* last);
+  explicit LoadStubCompiler(Isolate* isolate) : StubCompiler(isolate) { }
 
-  MUST_USE_RESULT MaybeObject* CompileLoadField(JSObject* object,
-                                                JSObject* holder,
-                                                int index,
-                                                String* name);
+  Handle<Code> CompileLoadNonexistent(Handle<String> name,
+                                      Handle<JSObject> object,
+                                      Handle<JSObject> last);
+
+  Handle<Code> CompileLoadField(Handle<JSObject> object,
+                                Handle<JSObject> holder,
+                                int index,
+                                Handle<String> name);
+
+  Handle<Code> CompileLoadCallback(Handle<String> name,
+                                   Handle<JSObject> object,
+                                   Handle<JSObject> holder,
+                                   Handle<AccessorInfo> callback);
 
   MUST_USE_RESULT MaybeObject* CompileLoadCallback(String* name,
                                                    JSObject* object,
                                                    JSObject* holder,
                                                    AccessorInfo* callback);
 
-  MUST_USE_RESULT MaybeObject* CompileLoadConstant(JSObject* object,
-                                                   JSObject* holder,
-                                                   Object* value,
-                                                   String* name);
+  Handle<Code> CompileLoadConstant(Handle<JSObject> object,
+                                   Handle<JSObject> holder,
+                                   Handle<Object> value,
+                                   Handle<String> name);
+
+  Handle<Code> CompileLoadInterceptor(Handle<JSObject> object,
+                                      Handle<JSObject> holder,
+                                      Handle<String> name);
 
   MUST_USE_RESULT MaybeObject* CompileLoadInterceptor(JSObject* object,
                                                       JSObject* holder,
                                                       String* name);
 
-  MUST_USE_RESULT MaybeObject* CompileLoadGlobal(JSObject* object,
-                                                 GlobalObject* holder,
-                                                 JSGlobalPropertyCell* cell,
-                                                 String* name,
-                                                 bool is_dont_delete);
+  Handle<Code> CompileLoadGlobal(Handle<JSObject> object,
+                                 Handle<GlobalObject> holder,
+                                 Handle<JSGlobalPropertyCell> cell,
+                                 Handle<String> name,
+                                 bool is_dont_delete);
 
  private:
-  MUST_USE_RESULT MaybeObject* GetCode(PropertyType type, String* name);
+  MUST_USE_RESULT MaybeObject* TryGetCode(PropertyType type, String* name);
+
+  Handle<Code> GetCode(PropertyType type, Handle<String> name);
 };
 
 
 class KeyedLoadStubCompiler: public StubCompiler {
  public:
-  MUST_USE_RESULT MaybeObject* CompileLoadField(String* name,
-                                                JSObject* object,
-                                                JSObject* holder,
-                                                int index);
+  explicit KeyedLoadStubCompiler(Isolate* isolate) : StubCompiler(isolate) { }
+
+  Handle<Code> CompileLoadField(Handle<String> name,
+                                Handle<JSObject> object,
+                                Handle<JSObject> holder,
+                                int index);
+
+  Handle<Code> CompileLoadCallback(Handle<String> name,
+                                   Handle<JSObject> object,
+                                   Handle<JSObject> holder,
+                                   Handle<AccessorInfo> callback);
 
   MUST_USE_RESULT MaybeObject* CompileLoadCallback(String* name,
                                                    JSObject* object,
                                                    JSObject* holder,
                                                    AccessorInfo* callback);
 
-  MUST_USE_RESULT MaybeObject* CompileLoadConstant(String* name,
-                                                   JSObject* object,
-                                                   JSObject* holder,
-                                                   Object* value);
+  Handle<Code> CompileLoadConstant(Handle<String> name,
+                                   Handle<JSObject> object,
+                                   Handle<JSObject> holder,
+                                   Handle<Object> value);
+
+  Handle<Code> CompileLoadInterceptor(Handle<JSObject> object,
+                                      Handle<JSObject> holder,
+                                      Handle<String> name);
 
   MUST_USE_RESULT MaybeObject* CompileLoadInterceptor(JSObject* object,
                                                       JSObject* holder,
                                                       String* name);
 
-  MUST_USE_RESULT MaybeObject* CompileLoadArrayLength(String* name);
-  MUST_USE_RESULT MaybeObject* CompileLoadStringLength(String* name);
-  MUST_USE_RESULT MaybeObject* CompileLoadFunctionPrototype(String* name);
+  Handle<Code> CompileLoadArrayLength(Handle<String> name);
 
-  MUST_USE_RESULT MaybeObject* CompileLoadElement(Map* receiver_map);
+  Handle<Code> CompileLoadStringLength(Handle<String> name);
 
-  MUST_USE_RESULT MaybeObject* CompileLoadPolymorphic(
-      MapList* receiver_maps,
-      CodeList* handler_ics);
+  Handle<Code> CompileLoadFunctionPrototype(Handle<String> name);
+
+  Handle<Code> CompileLoadElement(Handle<Map> receiver_map);
+
+  Handle<Code> CompileLoadPolymorphic(MapHandleList* receiver_maps,
+                                      CodeHandleList* handler_ics);
 
   static void GenerateLoadExternalArray(MacroAssembler* masm,
                                         ElementsKind elements_kind);
@@ -655,34 +687,40 @@
   static void GenerateLoadDictionaryElement(MacroAssembler* masm);
 
  private:
-  MaybeObject* GetCode(PropertyType type,
-                       String* name,
+  MaybeObject* TryGetCode(PropertyType type,
+                          String* name,
+                          InlineCacheState state = MONOMORPHIC);
+
+  Handle<Code> GetCode(PropertyType type,
+                       Handle<String> name,
                        InlineCacheState state = MONOMORPHIC);
 };
 
 
 class StoreStubCompiler: public StubCompiler {
  public:
-  explicit StoreStubCompiler(StrictModeFlag strict_mode)
-    : strict_mode_(strict_mode) { }
+  StoreStubCompiler(Isolate* isolate, StrictModeFlag strict_mode)
+    : StubCompiler(isolate), strict_mode_(strict_mode) { }
 
-  MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object,
-                                                 int index,
-                                                 Map* transition,
-                                                 String* name);
 
-  MUST_USE_RESULT MaybeObject* CompileStoreCallback(JSObject* object,
-                                                    AccessorInfo* callbacks,
-                                                    String* name);
-  MUST_USE_RESULT MaybeObject* CompileStoreInterceptor(JSObject* object,
-                                                       String* name);
-  MUST_USE_RESULT MaybeObject* CompileStoreGlobal(GlobalObject* object,
-                                                  JSGlobalPropertyCell* holder,
-                                                  String* name);
+  Handle<Code> CompileStoreField(Handle<JSObject> object,
+                                 int index,
+                                 Handle<Map> transition,
+                                 Handle<String> name);
 
+  Handle<Code> CompileStoreCallback(Handle<JSObject> object,
+                                    Handle<AccessorInfo> callback,
+                                    Handle<String> name);
+
+  Handle<Code> CompileStoreInterceptor(Handle<JSObject> object,
+                                       Handle<String> name);
+
+  Handle<Code> CompileStoreGlobal(Handle<GlobalObject> object,
+                                  Handle<JSGlobalPropertyCell> holder,
+                                  Handle<String> name);
 
  private:
-  MaybeObject* GetCode(PropertyType type, String* name);
+  Handle<Code> GetCode(PropertyType type, Handle<String> name);
 
   StrictModeFlag strict_mode_;
 };
@@ -690,20 +728,19 @@
 
 class KeyedStoreStubCompiler: public StubCompiler {
  public:
-  explicit KeyedStoreStubCompiler(StrictModeFlag strict_mode)
-    : strict_mode_(strict_mode) { }
+  KeyedStoreStubCompiler(Isolate* isolate, StrictModeFlag strict_mode)
+    : StubCompiler(isolate), strict_mode_(strict_mode) { }
 
-  MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object,
-                                                 int index,
-                                                 Map* transition,
-                                                 String* name);
+  Handle<Code> CompileStoreField(Handle<JSObject> object,
+                                 int index,
+                                 Handle<Map> transition,
+                                 Handle<String> name);
 
-  MUST_USE_RESULT MaybeObject* CompileStoreElement(Map* receiver_map);
+  Handle<Code> CompileStoreElement(Handle<Map> receiver_map);
 
-  MUST_USE_RESULT MaybeObject* CompileStorePolymorphic(
-      MapList* receiver_maps,
-      CodeList* handler_stubs,
-      MapList* transitioned_maps);
+  Handle<Code> CompileStorePolymorphic(MapHandleList* receiver_maps,
+                                       CodeHandleList* handler_stubs,
+                                       MapHandleList* transitioned_maps);
 
   static void GenerateStoreFastElement(MacroAssembler* masm,
                                        bool is_js_array,
@@ -718,8 +755,8 @@
   static void GenerateStoreDictionaryElement(MacroAssembler* masm);
 
  private:
-  MaybeObject* GetCode(PropertyType type,
-                       String* name,
+  Handle<Code> GetCode(PropertyType type,
+                       Handle<String> name,
                        InlineCacheState state = MONOMORPHIC);
 
   StrictModeFlag strict_mode_;
@@ -742,35 +779,48 @@
 
 class CallStubCompiler: public StubCompiler {
  public:
-  CallStubCompiler(int argc,
+  CallStubCompiler(Isolate* isolate,
+                   int argc,
                    Code::Kind kind,
-                   Code::ExtraICState extra_ic_state,
+                   Code::ExtraICState extra_state,
                    InlineCacheHolderFlag cache_holder);
 
-  MUST_USE_RESULT MaybeObject* CompileCallField(
-      JSObject* object,
-      JSObject* holder,
-      int index,
-      String* name);
+  Handle<Code> CompileCallField(Handle<JSObject> object,
+                                Handle<JSObject> holder,
+                                int index,
+                                Handle<String> name);
 
-  MUST_USE_RESULT MaybeObject* CompileCallConstant(
-      Object* object,
-      JSObject* holder,
-      JSFunction* function,
-      String* name,
-      CheckType check);
+  Handle<Code> CompileCallConstant(Handle<Object> object,
+                                   Handle<JSObject> holder,
+                                   Handle<JSFunction> function,
+                                   Handle<String> name,
+                                   CheckType check);
 
-  MUST_USE_RESULT MaybeObject* CompileCallInterceptor(
-      JSObject* object,
-      JSObject* holder,
-      String* name);
+  MUST_USE_RESULT MaybeObject* CompileCallConstant(Object* object,
+                                                   JSObject* holder,
+                                                   JSFunction* function,
+                                                   String* name,
+                                                   CheckType check);
 
-  MUST_USE_RESULT MaybeObject* CompileCallGlobal(
-      JSObject* object,
-      GlobalObject* holder,
-      JSGlobalPropertyCell* cell,
-      JSFunction* function,
-      String* name);
+  Handle<Code> CompileCallInterceptor(Handle<JSObject> object,
+                                      Handle<JSObject> holder,
+                                      Handle<String> name);
+
+  MUST_USE_RESULT MaybeObject* CompileCallInterceptor(JSObject* object,
+                                                      JSObject* holder,
+                                                      String* name);
+
+  Handle<Code> CompileCallGlobal(Handle<JSObject> object,
+                                 Handle<GlobalObject> holder,
+                                 Handle<JSGlobalPropertyCell> cell,
+                                 Handle<JSFunction> function,
+                                 Handle<String> name);
+
+  MUST_USE_RESULT MaybeObject* CompileCallGlobal(JSObject* object,
+                                                 GlobalObject* holder,
+                                                 JSGlobalPropertyCell* cell,
+                                                 JSFunction* function,
+                                                 String* name);
 
   static bool HasCustomCallGenerator(JSFunction* function);
 
@@ -803,18 +853,20 @@
 
   const ParameterCount arguments_;
   const Code::Kind kind_;
-  const Code::ExtraICState extra_ic_state_;
+  const Code::ExtraICState extra_state_;
   const InlineCacheHolderFlag cache_holder_;
 
   const ParameterCount& arguments() { return arguments_; }
 
-  MUST_USE_RESULT MaybeObject* GetCode(PropertyType type, String* name);
+  Handle<Code> GetCode(PropertyType type, Handle<String> name);
+  Handle<Code> GetCode(Handle<JSFunction> function);
 
-  // Convenience function. Calls GetCode above passing
-  // CONSTANT_FUNCTION type and the name of the given function.
-  MUST_USE_RESULT MaybeObject* GetCode(JSFunction* function);
+  // TODO(kmillikin): Eliminate these functions when the stub cache is fully
+  // handlified.
+  MUST_USE_RESULT MaybeObject* TryGetCode(PropertyType type, String* name);
+  MUST_USE_RESULT MaybeObject* TryGetCode(JSFunction* function);
 
-  void GenerateNameCheck(String* name, Label* miss);
+  void GenerateNameCheck(Handle<String> name, Label* miss);
 
   void GenerateGlobalReceiverCheck(JSObject* object,
                                    JSObject* holder,
@@ -827,15 +879,18 @@
                                     JSFunction* function,
                                     Label* miss);
 
-  // Generates a jump to CallIC miss stub. Returns Failure if the jump cannot
-  // be generated.
-  MUST_USE_RESULT MaybeObject* GenerateMissBranch();
+  // Generates a jump to CallIC miss stub.
+  void GenerateMissBranch();
+
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
+  MUST_USE_RESULT MaybeObject* TryGenerateMissBranch();
 };
 
 
 class ConstructStubCompiler: public StubCompiler {
  public:
-  explicit ConstructStubCompiler() {}
+  explicit ConstructStubCompiler(Isolate* isolate) : StubCompiler(isolate) { }
 
   MUST_USE_RESULT MaybeObject* CompileConstructStub(JSFunction* function);
 
diff --git a/src/token.h b/src/token.h
index de4972d..7a2156c 100644
--- a/src/token.h
+++ b/src/token.h
@@ -73,6 +73,7 @@
   T(INIT_VAR, "=init_var", 2)  /* AST-use only. */                      \
   T(INIT_LET, "=init_let", 2)  /* AST-use only. */                      \
   T(INIT_CONST, "=init_const", 2)  /* AST-use only. */                  \
+  T(INIT_CONST_HARMONY, "=init_const_harmony", 2)  /* AST-use only. */  \
   T(ASSIGN, "=", 2)                                                     \
   T(ASSIGN_BIT_OR, "|=", 2)                                             \
   T(ASSIGN_BIT_XOR, "^=", 2)                                            \
diff --git a/src/type-info.cc b/src/type-info.cc
index a4b16f4..afec71a 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -423,6 +423,14 @@
 }
 
 
+static void AddMapIfMissing(Handle<Map> map, SmallMapList* list) {
+  for (int i = 0; i < list->length(); ++i) {
+    if (list->at(i).is_identical_to(map)) return;
+  }
+  list->Add(map);
+}
+
+
 void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
                                                    SmallMapList* types) {
   Handle<Object> object = GetInfo(ast_id);
@@ -436,7 +444,7 @@
       RelocInfo* info = it.rinfo();
       Object* object = info->target_object();
       if (object->IsMap()) {
-        types->Add(Handle<Map>(Map::cast(object)));
+        AddMapIfMissing(Handle<Map>(Map::cast(object)), types);
       }
     }
   }
@@ -496,61 +504,56 @@
 
 void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
   for (int i = 0; i < infos->length(); i++) {
-    Address target_address = (*infos)[i].target_address();
+    RelocInfo reloc_entry = (*infos)[i];
+    Address target_address = reloc_entry.target_address();
     unsigned ast_id = static_cast<unsigned>((*infos)[i].data());
-    ProcessTargetAt(target_address, ast_id);
-  }
-}
-
-
-void TypeFeedbackOracle::ProcessTargetAt(Address target_address,
-                                         unsigned ast_id) {
-  Code* target = Code::GetCodeFromTargetAddress(target_address);
-  switch (target->kind()) {
-    case Code::LOAD_IC:
-    case Code::STORE_IC:
-    case Code::CALL_IC:
-    case Code::KEYED_CALL_IC:
-      if (target->ic_state() == MONOMORPHIC) {
-        if (target->kind() == Code::CALL_IC &&
-            target->check_type() != RECEIVER_MAP_CHECK) {
-          SetInfo(ast_id, Smi::FromInt(target->check_type()));
-        } else {
-          Object* map = target->FindFirstMap();
-          SetInfo(ast_id, map == NULL ? static_cast<Object*>(target) : map);
+    Code* target = Code::GetCodeFromTargetAddress(target_address);
+    switch (target->kind()) {
+      case Code::LOAD_IC:
+      case Code::STORE_IC:
+      case Code::CALL_IC:
+      case Code::KEYED_CALL_IC:
+        if (target->ic_state() == MONOMORPHIC) {
+          if (target->kind() == Code::CALL_IC &&
+              target->check_type() != RECEIVER_MAP_CHECK) {
+            SetInfo(ast_id, Smi::FromInt(target->check_type()));
+          } else {
+            Object* map = target->FindFirstMap();
+            SetInfo(ast_id, map == NULL ? static_cast<Object*>(target) : map);
+          }
+        } else if (target->ic_state() == MEGAMORPHIC) {
+          SetInfo(ast_id, target);
         }
-      } else if (target->ic_state() == MEGAMORPHIC) {
-        SetInfo(ast_id, target);
-      }
-      break;
+        break;
 
-    case Code::KEYED_LOAD_IC:
-    case Code::KEYED_STORE_IC:
-      if (target->ic_state() == MONOMORPHIC ||
-          target->ic_state() == MEGAMORPHIC) {
-        SetInfo(ast_id, target);
-      }
-      break;
-
-    case Code::UNARY_OP_IC:
-    case Code::BINARY_OP_IC:
-    case Code::COMPARE_IC:
-    case Code::TO_BOOLEAN_IC:
-      SetInfo(ast_id, target);
-      break;
-
-    case Code::STUB:
-      if (target->major_key() == CodeStub::CallFunction &&
-          target->has_function_cache()) {
-        Object* value = CallFunctionStub::GetCachedValue(target_address);
-        if (value->IsJSFunction()) {
-          SetInfo(ast_id, value);
+      case Code::KEYED_LOAD_IC:
+      case Code::KEYED_STORE_IC:
+        if (target->ic_state() == MONOMORPHIC ||
+            target->ic_state() == MEGAMORPHIC) {
+          SetInfo(ast_id, target);
         }
-      }
-      break;
+        break;
 
-    default:
-      break;
+      case Code::UNARY_OP_IC:
+      case Code::BINARY_OP_IC:
+      case Code::COMPARE_IC:
+      case Code::TO_BOOLEAN_IC:
+        SetInfo(ast_id, target);
+        break;
+
+      case Code::STUB:
+        if (target->major_key() == CodeStub::CallFunction &&
+            target->has_function_cache()) {
+          Object* value = CallFunctionStub::GetCachedValue(reloc_entry.pc());
+          if (value->IsJSFunction()) {
+            SetInfo(ast_id, value);
+          }
+        }
+        break;
+
+      default:
+        break;
+    }
   }
 }
 
diff --git a/src/type-info.h b/src/type-info.h
index 0ba10aa..2c3543e 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -277,7 +277,6 @@
                           byte* old_start,
                           byte* new_start);
   void ProcessRelocInfos(ZoneList<RelocInfo>* infos);
-  void ProcessTargetAt(Address target_address, unsigned ast_id);
 
   // Returns an element from the backing store. Returns undefined if
   // there is no information.
diff --git a/src/utils.h b/src/utils.h
index a523118..2e6cfbd 100644
--- a/src/utils.h
+++ b/src/utils.h
@@ -143,6 +143,16 @@
 }
 
 
+// Compare function to compare the object pointer value of two
+// handlified objects. The handles are passed as pointers to the
+// handles.
+template<typename T> class Handle;  // Forward declaration.
+template <typename T>
+static int HandleObjectPointerCompare(const Handle<T>* a, const Handle<T>* b) {
+  return Compare<T*>(*(*a), *(*b));
+}
+
+
 // Returns the smallest power of two which is >= x. If you pass in a
 // number that is already a power of two, it is returned as is.
 // Implementation is from "Hacker's Delight" by Henry S. Warren, Jr.,
@@ -168,7 +178,6 @@
 
 template <typename T, typename U>
 static inline bool IsAligned(T value, U alignment) {
-  ASSERT(IsPowerOf2(alignment));
   return (value & (alignment - 1)) == 0;
 }
 
@@ -257,6 +266,18 @@
 }
 
 
+static inline uint32_t ComputeLongHash(uint64_t key) {
+  uint64_t hash = key;
+  hash = ~hash + (hash << 18);  // hash = (hash << 18) - hash - 1;
+  hash = hash ^ (hash >> 31);
+  hash = hash * 21;  // hash = (hash + (hash << 2)) + (hash << 4);
+  hash = hash ^ (hash >> 11);
+  hash = hash + (hash << 6);
+  hash = hash ^ (hash >> 22);
+  return (uint32_t) hash;
+}
+
+
 static inline uint32_t ComputePointerHash(void* ptr) {
   return ComputeIntegerHash(
       static_cast<uint32_t>(reinterpret_cast<intptr_t>(ptr)));
diff --git a/src/v8.cc b/src/v8.cc
index a04114e..66c65e7 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -63,7 +63,7 @@
     FLAG_harmony_typeof = true;
     FLAG_harmony_scoping = true;
     FLAG_harmony_proxies = true;
-    FLAG_harmony_weakmaps = true;
+    FLAG_harmony_collections = true;
   }
 
   InitializeOncePerProcess();
@@ -150,9 +150,10 @@
 
 
 // Used by JavaScript APIs
-uint32_t V8::Random(Isolate* isolate) {
-  ASSERT(isolate == Isolate::Current());
-  return random_base(isolate->random_seed());
+uint32_t V8::Random(Context* context) {
+  ASSERT(context->IsGlobalContext());
+  ByteArray* seed = context->random_seed();
+  return random_base(reinterpret_cast<uint32_t*>(seed->GetDataStartAddress()));
 }
 
 
@@ -182,8 +183,9 @@
 } double_int_union;
 
 
-Object* V8::FillHeapNumberWithRandom(Object* heap_number, Isolate* isolate) {
-  uint64_t random_bits = Random(isolate);
+Object* V8::FillHeapNumberWithRandom(Object* heap_number,
+                                     Context* context) {
+  uint64_t random_bits = Random(context);
   // Make a double* from address (heap_number + sizeof(double)).
   double_int_union* r = reinterpret_cast<double_int_union*>(
       reinterpret_cast<char*>(heap_number) +
diff --git a/src/v8.h b/src/v8.h
index 2e039d4..01feefc 100644
--- a/src/v8.h
+++ b/src/v8.h
@@ -96,14 +96,14 @@
   // generation.
   static void SetEntropySource(EntropySource source);
   // Random number generation support. Not cryptographically safe.
-  static uint32_t Random(Isolate* isolate);
+  static uint32_t Random(Context* context);
   // We use random numbers internally in memory allocation and in the
   // compilers for security. In order to prevent information leaks we
   // use a separate random state for internal random number
   // generation.
   static uint32_t RandomPrivate(Isolate* isolate);
   static Object* FillHeapNumberWithRandom(Object* heap_number,
-                                          Isolate* isolate);
+                                          Context* context);
 
   // Idle notification directly from the API.
   static bool IdleNotification();
diff --git a/src/v8globals.h b/src/v8globals.h
index 09d26d2..f4703ff 100644
--- a/src/v8globals.h
+++ b/src/v8globals.h
@@ -509,6 +509,16 @@
 };
 
 
+enum ScopeType {
+  EVAL_SCOPE,      // The top-level scope for an eval source.
+  FUNCTION_SCOPE,  // The top-level scope for a function.
+  GLOBAL_SCOPE,    // The top-level scope for a program or a top-level eval.
+  CATCH_SCOPE,     // The scope introduced by catch.
+  BLOCK_SCOPE,     // The scope introduced by a new block.
+  WITH_SCOPE       // The scope introduced by with.
+};
+
+
 static const uint32_t kHoleNanUpper32 = 0x7FFFFFFF;
 static const uint32_t kHoleNanLower32 = 0xFFFFFFFF;
 static const uint32_t kNaNOrInfinityLowerBoundUpper32 = 0x7FF00000;
@@ -521,11 +531,13 @@
 
 enum VariableMode {
   // User declared variables:
-  VAR,       // declared via 'var', and 'function' declarations
+  VAR,             // declared via 'var', and 'function' declarations
 
-  CONST,     // declared via 'const' declarations
+  CONST,           // declared via 'const' declarations
 
-  LET,       // declared via 'let' declarations
+  CONST_HARMONY,   // declared via 'const' declarations in harmony mode
+
+  LET,             // declared via 'let' declarations
 
   // Variables introduced by the compiler:
   DYNAMIC,         // always require dynamic lookup (we don't know
@@ -547,6 +559,13 @@
                    // in a context
 };
 
+
+enum ClearExceptionFlag {
+  KEEP_EXCEPTION,
+  CLEAR_EXCEPTION
+};
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_V8GLOBALS_H_
diff --git a/src/v8natives.js b/src/v8natives.js
index dee3032..e6669d5 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -373,6 +373,7 @@
 
 // ES5 8.10.3.
 function IsGenericDescriptor(desc) {
+  if (IS_UNDEFINED(desc)) return false;
   return !(IsAccessorDescriptor(desc) || IsDataDescriptor(desc));
 }
 
@@ -704,7 +705,7 @@
     if (should_throw) {
       throw MakeTypeError("define_disallowed", [p]);
     } else {
-      return;
+      return false;
     }
   }
 
@@ -734,7 +735,7 @@
         if (should_throw) {
           throw MakeTypeError("redefine_disallowed", [p]);
         } else {
-          return;
+          return false;
         }
       }
       // Step 8
@@ -744,7 +745,7 @@
           if (should_throw) {
             throw MakeTypeError("redefine_disallowed", [p]);
           } else {
-            return;
+            return false;
           }
         }
         // Step 10a
@@ -753,7 +754,7 @@
             if (should_throw) {
               throw MakeTypeError("redefine_disallowed", [p]);
             } else {
-              return;
+              return false;
             }
           }
           if (!current.isWritable() && desc.hasValue() &&
@@ -761,7 +762,7 @@
             if (should_throw) {
               throw MakeTypeError("redefine_disallowed", [p]);
             } else {
-              return;
+              return false;
             }
           }
         }
@@ -771,14 +772,14 @@
             if (should_throw) {
               throw MakeTypeError("redefine_disallowed", [p]);
             } else {
-              return;
+              return false;
             }
           }
           if (desc.hasGetter() && !SameValue(desc.getGet(),current.getGet())) {
             if (should_throw) {
               throw MakeTypeError("redefine_disallowed", [p]);
             } else {
-              return;
+              return false;
             }
           }
         }
@@ -881,7 +882,7 @@
       if (should_throw) {
         throw MakeTypeError("define_disallowed", [p]);
       } else {
-        return;
+        return false;
       }
     }
     if (index >= length) {
@@ -936,14 +937,14 @@
   }
   var n = ToUint32(obj.length);
   var array = new $Array(n);
-  var names = {}
+  var names = {}  // TODO(rossberg): use sets once they are ready.
   for (var index = 0; index < n; index++) {
     var s = ToString(obj[index]);
     if (s in names) {
       throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s])
     }
     array[index] = s;
-    names.s = 0;
+    names[s] = 0;
   }
   return array;
 }
@@ -1078,10 +1079,12 @@
     throw MakeTypeError("obj_ctor_property_non_object", ["defineProperties"]);
   var props = ToObject(properties);
   var names = GetOwnEnumerablePropertyNames(props);
+  var descriptors = new InternalArray();
   for (var i = 0; i < names.length; i++) {
-    var name = names[i];
-    var desc = ToPropertyDescriptor(props[name]);
-    DefineOwnProperty(obj, name, desc, true);
+    descriptors.push(ToPropertyDescriptor(props[names[i]]));
+  }
+  for (var i = 0; i < names.length; i++) {
+    DefineOwnProperty(obj, names[i], descriptors[i], true);
   }
   return obj;
 }
@@ -1517,53 +1520,53 @@
 // ES5 15.3.4.5
 function FunctionBind(this_arg) { // Length is 1.
   if (!IS_SPEC_FUNCTION(this)) {
-      throw new $TypeError('Bind must be called on a function');
+    throw new $TypeError('Bind must be called on a function');
   }
-  // this_arg is not an argument that should be bound.
-  var argc_bound = (%_ArgumentsLength() || 1) - 1;
-  var fn = this;
-
-  if (argc_bound == 0) {
-    var result = function() {
-      if (%_IsConstructCall()) {
-        // %NewObjectFromBound implicitly uses arguments passed to this
-        // function. We do not pass the arguments object explicitly to avoid
-        // materializing it and guarantee that this function will be optimized.
-        return %NewObjectFromBound(fn, null);
-      }
-      return %Apply(fn, this_arg, arguments, 0, %_ArgumentsLength());
-    };
-  } else {
-    var bound_args = new InternalArray(argc_bound);
-    for(var i = 0; i < argc_bound; i++) {
-      bound_args[i] = %_Arguments(i+1);
+  var boundFunction = function () {
+    // Poison .arguments and .caller, but is otherwise not detectable.
+    "use strict";
+    // This function must not use any object literals (Object, Array, RegExp),
+    // since the literals-array is being used to store the bound data.
+    if (%_IsConstructCall()) {
+      return %NewObjectFromBound(boundFunction);
     }
+    var bindings = %BoundFunctionGetBindings(boundFunction);
 
-    var result = function() {
-      // If this is a construct call we use a special runtime method
-      // to generate the actual object using the bound function.
-      if (%_IsConstructCall()) {
-        // %NewObjectFromBound implicitly uses arguments passed to this
-        // function. We do not pass the arguments object explicitly to avoid
-        // materializing it and guarantee that this function will be optimized.
-        return %NewObjectFromBound(fn, bound_args);
-      }
+    var argc = %_ArgumentsLength();
+    if (argc == 0) {
+      return %Apply(bindings[0], bindings[1], bindings, 2, bindings.length - 2);
+    }
+    if (bindings.length === 2) {
+      return %Apply(bindings[0], bindings[1], arguments, 0, argc);
+    }
+    var bound_argc = bindings.length - 2;
+    var argv = new InternalArray(bound_argc + argc);
+    for (var i = 0; i < bound_argc; i++) {
+      argv[i] = bindings[i + 2];
+    }
+    for (var j = 0; j < argc; j++) {
+      argv[i++] = %_Arguments(j);
+    }
+    return %Apply(bindings[0], bindings[1], argv, 0, bound_argc + argc);
+  };
 
-      // Combine the args we got from the bind call with the args
-      // given as argument to the invocation.
+  %FunctionRemovePrototype(boundFunction);
+  var new_length = 0;
+  if (%_ClassOf(this) == "Function") {
+    // Function or FunctionProxy.
+    var old_length = this.length;
+    // FunctionProxies might provide a non-UInt32 value. If so, ignore it.
+    if ((typeof old_length === "number") &&
+        ((old_length >>> 0) === old_length)) {
       var argc = %_ArgumentsLength();
-      var args = new InternalArray(argc + argc_bound);
-      // Add bound arguments.
-      for (var i = 0; i < argc_bound; i++) {
-        args[i] = bound_args[i];
-      }
-      // Add arguments from call.
-      for (var i = 0; i < argc; i++) {
-        args[argc_bound + i] = %_Arguments(i);
-      }
-      return %Apply(fn, this_arg, args, 0, argc + argc_bound);
-    };
+      if (argc > 0) argc--;  // Don't count the thisArg as parameter.
+      new_length = old_length - argc;
+      if (new_length < 0) new_length = 0;
+    }
   }
+  // This runtime function finds any remaining arguments on the stack,
+  // so we don't pass the arguments object.
+  var result = %FunctionBindArguments(boundFunction, this, this_arg, new_length);
 
   // We already have caller and arguments properties on functions,
   // which are non-configurable. It therefore makes no sence to
@@ -1571,17 +1574,7 @@
   // that bind should make these throw a TypeError if get or set
   // is called and make them non-enumerable and non-configurable.
   // To be consistent with our normal functions we leave this as it is.
-
-  %FunctionRemovePrototype(result);
-  %FunctionSetBound(result);
-  // Set the correct length. If this is a function proxy, this.length might
-  // throw, or return a bogus result. Leave length alone in that case.
-  // TODO(rossberg): This is underspecified in the current proxy proposal.
-  try {
-    var old_length = ToInteger(this.length);
-    var length = (old_length - argc_bound) > 0 ? old_length - argc_bound : 0;
-    %BoundFunctionSetLength(result, length);
-  } catch(x) {}
+  // TODO(lrn): Do set these to be thrower.
   return result;
 }
 
diff --git a/src/variables.cc b/src/variables.cc
index 076cdc0..d85e1b2 100644
--- a/src/variables.cc
+++ b/src/variables.cc
@@ -41,6 +41,7 @@
   switch (mode) {
     case VAR: return "VAR";
     case CONST: return "CONST";
+    case CONST_HARMONY: return "CONST";
     case LET: return "LET";
     case DYNAMIC: return "DYNAMIC";
     case DYNAMIC_GLOBAL: return "DYNAMIC_GLOBAL";
diff --git a/src/variables.h b/src/variables.h
index 612d8d3..8b2d869 100644
--- a/src/variables.h
+++ b/src/variables.h
@@ -118,6 +118,15 @@
             mode_ == DYNAMIC_GLOBAL ||
             mode_ == DYNAMIC_LOCAL);
   }
+  bool is_const_mode() const {
+    return (mode_ == CONST ||
+            mode_ == CONST_HARMONY);
+  }
+  bool binding_needs_init() const {
+    return (mode_ == LET ||
+            mode_ == CONST ||
+            mode_ == CONST_HARMONY);
+  }
 
   bool is_global() const;
   bool is_this() const { return kind_ == THIS; }
@@ -154,6 +163,10 @@
   Location location_;
   int index_;
 
+  // If this field is set, this variable references the stored locally bound
+  // variable, but it might be shadowed by variable bindings introduced by
+  // non-strict 'eval' calls between the reference scope (inclusive) and the
+  // binding scope (exclusive).
   Variable* local_if_not_shadowed_;
 
   // Valid as a LHS? (const and this are not valid LHS, for example)
diff --git a/src/version.cc b/src/version.cc
index 0b8037c..d34638b 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     7
-#define BUILD_NUMBER      0
-#define PATCH_LEVEL       1
+#define BUILD_NUMBER      1
+#define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index 10f0b88..f7b87ec 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -238,12 +238,12 @@
 }
 
 
-void RelocInfo::set_target_address(Address target) {
+void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
   if (IsCodeTarget(rmode_)) {
     Assembler::set_target_address_at(pc_, target);
     Object* target_code = Code::GetCodeFromTargetAddress(target);
-    if (host() != NULL) {
+    if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
       host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
           host(), this, HeapObject::cast(target_code));
     }
@@ -282,11 +282,13 @@
 }
 
 
-void RelocInfo::set_target_object(Object* target) {
+void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   Memory::Object_at(pc_) = target;
   CPU::FlushICache(pc_, sizeof(Address));
-  if (host() != NULL && target->IsHeapObject()) {
+  if (mode == UPDATE_WRITE_BARRIER &&
+      host() != NULL &&
+      target->IsHeapObject()) {
     host()->GetHeap()->incremental_marking()->RecordWrite(
         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
   }
@@ -310,12 +312,14 @@
 }
 
 
-void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) {
+void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
+                                WriteBarrierMode mode) {
   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
   Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
   Memory::Address_at(pc_) = address;
   CPU::FlushICache(pc_, sizeof(Address));
-  if (host() != NULL) {
+  if (mode == UPDATE_WRITE_BARRIER &&
+      host() != NULL) {
     // TODO(1550) We are passing NULL as a slot because cell can never be on
     // evacuation candidate.
     host()->GetHeap()->incremental_marking()->RecordWrite(
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 79ddb13..8baa2f3 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -670,7 +670,7 @@
     __ testq(rax, rax);
     __ j(not_zero, &done);
     __ pop(rbx);
-    __ Push(FACTORY->undefined_value());
+    __ Push(masm->isolate()->factory()->undefined_value());
     __ push(rbx);
     __ incq(rax);
     __ bind(&done);
@@ -993,10 +993,6 @@
 }
 
 
-// Number of empty elements to allocate for an empty array.
-static const int kPreallocatedArrayElements = 4;
-
-
 // Allocate an empty JSArray. The allocated array is put into the result
 // register. If the parameter initial_capacity is larger than zero an elements
 // backing store is allocated with this size and filled with the hole values.
@@ -1007,9 +1003,9 @@
                                  Register scratch1,
                                  Register scratch2,
                                  Register scratch3,
-                                 int initial_capacity,
                                  Label* gc_required) {
-  ASSERT(initial_capacity >= 0);
+  const int initial_capacity = JSArray::kPreallocatedArrayElements;
+  STATIC_ASSERT(initial_capacity >= 0);
 
   // Load the initial map from the array function.
   __ movq(scratch1, FieldOperand(array_function,
@@ -1033,9 +1029,10 @@
   // result: JSObject
   // scratch1: initial map
   // scratch2: start of next object
+  Factory* factory = masm->isolate()->factory();
   __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
   __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
-          FACTORY->empty_fixed_array());
+          factory->empty_fixed_array());
   // Field JSArray::kElementsOffset is initialized later.
   __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
 
@@ -1043,7 +1040,7 @@
   // fixed array.
   if (initial_capacity == 0) {
     __ Move(FieldOperand(result, JSArray::kElementsOffset),
-            FACTORY->empty_fixed_array());
+            factory->empty_fixed_array());
     return;
   }
 
@@ -1060,15 +1057,14 @@
   // scratch1: elements array
   // scratch2: start of next object
   __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
-          FACTORY->fixed_array_map());
+          factory->fixed_array_map());
   __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
           Smi::FromInt(initial_capacity));
 
   // Fill the FixedArray with the hole value. Inline the code if short.
   // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
   static const int kLoopUnfoldLimit = 4;
-  ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
-  __ Move(scratch3, FACTORY->the_hole_value());
+  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
   if (initial_capacity <= kLoopUnfoldLimit) {
     // Use a scratch register here to have only one reloc info when unfolding
     // the loop.
@@ -1101,38 +1097,25 @@
 // register elements_array is scratched.
 static void AllocateJSArray(MacroAssembler* masm,
                             Register array_function,  // Array function.
-                            Register array_size,  // As a smi.
+                            Register array_size,  // As a smi, cannot be 0.
                             Register result,
                             Register elements_array,
                             Register elements_array_end,
                             Register scratch,
                             bool fill_with_hole,
                             Label* gc_required) {
-  Label not_empty, allocated;
-
   // Load the initial map from the array function.
   __ movq(elements_array,
           FieldOperand(array_function,
                        JSFunction::kPrototypeOrInitialMapOffset));
 
-  // Check whether an empty sized array is requested.
-  __ testq(array_size, array_size);
-  __ j(not_zero, &not_empty);
-
-  // If an empty array is requested allocate a small elements array anyway. This
-  // keeps the code below free of special casing for the empty array.
-  int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
-  __ AllocateInNewSpace(size,
-                        result,
-                        elements_array_end,
-                        scratch,
-                        gc_required,
-                        TAG_OBJECT);
-  __ jmp(&allocated);
+  if (FLAG_debug_code) {  // Assert that array size is not zero.
+    __ testq(array_size, array_size);
+    __ Assert(not_zero, "array size is unexpectedly 0");
+  }
 
   // Allocate the JSArray object together with space for a FixedArray with the
   // requested elements.
-  __ bind(&not_empty);
   SmiIndex index =
       masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
   __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
@@ -1150,9 +1133,9 @@
   // elements_array: initial map
   // elements_array_end: start of next object
   // array_size: size of array (smi)
-  __ bind(&allocated);
+  Factory* factory = masm->isolate()->factory();
   __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
-  __ Move(elements_array, FACTORY->empty_fixed_array());
+  __ Move(elements_array, factory->empty_fixed_array());
   __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
   // Field JSArray::kElementsOffset is initialized later.
   __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
@@ -1171,16 +1154,7 @@
   // elements_array_end: start of next object
   // array_size: size of array (smi)
   __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
-          FACTORY->fixed_array_map());
-  Label not_empty_2, fill_array;
-  __ SmiTest(array_size);
-  __ j(not_zero, &not_empty_2);
-  // Length of the FixedArray is the number of pre-allocated elements even
-  // though the actual JSArray has length 0.
-  __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
-          Smi::FromInt(kPreallocatedArrayElements));
-  __ jmp(&fill_array);
-  __ bind(&not_empty_2);
+          factory->fixed_array_map());
   // For non-empty JSArrays the length of the FixedArray and the JSArray is the
   // same.
   __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
@@ -1189,10 +1163,9 @@
   // result: JSObject
   // elements_array: elements array
   // elements_array_end: start of next object
-  __ bind(&fill_array);
   if (fill_with_hole) {
     Label loop, entry;
-    __ Move(scratch, FACTORY->the_hole_value());
+    __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
     __ lea(elements_array, Operand(elements_array,
                                    FixedArray::kHeaderSize - kHeapObjectTag));
     __ jmp(&entry);
@@ -1222,12 +1195,13 @@
 // a construct call and a normal call.
 static void ArrayNativeCode(MacroAssembler* masm,
                             Label *call_generic_code) {
-  Label argc_one_or_more, argc_two_or_more;
+  Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array;
 
   // Check for array construction with zero arguments.
   __ testq(rax, rax);
   __ j(not_zero, &argc_one_or_more);
 
+  __ bind(&empty_array);
   // Handle construction of an empty array.
   AllocateEmptyJSArray(masm,
                        rdi,
@@ -1235,7 +1209,6 @@
                        rcx,
                        rdx,
                        r8,
-                       kPreallocatedArrayElements,
                        call_generic_code);
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->array_function_native(), 1);
@@ -1248,6 +1221,16 @@
   __ cmpq(rax, Immediate(1));
   __ j(not_equal, &argc_two_or_more);
   __ movq(rdx, Operand(rsp, kPointerSize));  // Get the argument from the stack.
+
+  __ SmiTest(rdx);
+  __ j(not_zero, &not_empty_array);
+  __ pop(r8);  // Adjust stack.
+  __ Drop(1);
+  __ push(r8);
+  __ movq(rax, Immediate(0));  // Treat this as a call with argc of zero.
+  __ jmp(&empty_array);
+
+  __ bind(&not_empty_array);
   __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
 
   // Handle construction of an empty array of a certain size. Bail out if size
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 7d41ffe..3dfebee 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -227,7 +227,12 @@
   // [rsp + (3 * kPointerSize)]: literals array.
 
   // All sizes here are multiples of kPointerSize.
-  int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
+  int elements_size = 0;
+  if (length_ > 0) {
+    elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
+        ? FixedDoubleArray::SizeFor(length_)
+        : FixedArray::SizeFor(length_);
+  }
   int size = JSArray::kSize + elements_size;
 
   // Load boilerplate object into rcx and check if we need to create a
@@ -247,6 +252,9 @@
     if (mode_ == CLONE_ELEMENTS) {
       message = "Expected (writable) fixed array";
       expected_map_index = Heap::kFixedArrayMapRootIndex;
+    } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
+      message = "Expected (writable) fixed double array";
+      expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
     } else {
       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
       message = "Expected copy-on-write fixed array";
@@ -280,9 +288,24 @@
     __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
 
     // Copy the elements array.
-    for (int i = 0; i < elements_size; i += kPointerSize) {
-      __ movq(rbx, FieldOperand(rcx, i));
-      __ movq(FieldOperand(rdx, i), rbx);
+    if (mode_ == CLONE_ELEMENTS) {
+      for (int i = 0; i < elements_size; i += kPointerSize) {
+        __ movq(rbx, FieldOperand(rcx, i));
+        __ movq(FieldOperand(rdx, i), rbx);
+      }
+    } else {
+      ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS);
+      int i;
+      for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
+        __ movq(rbx, FieldOperand(rcx, i));
+        __ movq(FieldOperand(rdx, i), rbx);
+      }
+      while (i < elements_size) {
+        __ movsd(xmm0, FieldOperand(rcx, i));
+        __ movsd(FieldOperand(rdx, i), xmm0);
+        i += kDoubleSize;
+      }
+      ASSERT(i == elements_size);
     }
   }
 
@@ -3879,7 +3902,7 @@
     __ bind(&miss);
   }
 
-  __ TryGetFunctionPrototype(rdx, rbx, &slow);
+  __ TryGetFunctionPrototype(rdx, rbx, &slow, true);
 
   // Check that the function prototype is a JS object.
   __ JumpIfSmi(rbx, &slow);
@@ -5438,7 +5461,68 @@
 }
 
 
-MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
+void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
+                                                        Label* miss,
+                                                        Label* done,
+                                                        Register properties,
+                                                        Handle<String> name,
+                                                        Register r0) {
+  // If names of slots in range from 1 to kProbes - 1 for the hash value are
+  // not equal to the name and kProbes-th slot is not used (its name is the
+  // undefined value), it guarantees the hash table doesn't contain the
+  // property. It's true even if some slots represent deleted properties
+  // (their names are the null value).
+  for (int i = 0; i < kInlinedProbes; i++) {
+    // r0 points to properties hash.
+    // Compute the masked index: (hash + i + i * i) & mask.
+    Register index = r0;
+    // Capacity is smi 2^n.
+    __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
+    __ decl(index);
+    __ and_(index,
+            Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
+
+    // Scale the index by multiplying by the entry size.
+    ASSERT(StringDictionary::kEntrySize == 3);
+    __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
+
+    Register entity_name = r0;
+    // Having undefined at this place means the name is not contained.
+    ASSERT_EQ(kSmiTagSize, 1);
+    __ movq(entity_name, Operand(properties,
+                                 index,
+                                 times_pointer_size,
+                                 kElementsStartOffset - kHeapObjectTag));
+    __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
+    __ j(equal, done);
+
+    // Stop if found the property.
+    __ Cmp(entity_name, Handle<String>(name));
+    __ j(equal, miss);
+
+    // Check if the entry name is not a symbol.
+    __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
+    __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
+             Immediate(kIsSymbolMask));
+    __ j(zero, miss);
+  }
+
+  StringDictionaryLookupStub stub(properties,
+                                  r0,
+                                  r0,
+                                  StringDictionaryLookupStub::NEGATIVE_LOOKUP);
+  __ Push(Handle<Object>(name));
+  __ push(Immediate(name->Hash()));
+  __ CallStub(&stub);
+  __ testq(r0, r0);
+  __ j(not_zero, miss);
+  __ jmp(done);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
     MacroAssembler* masm,
     Label* miss,
     Label* done,
@@ -5665,6 +5749,15 @@
   { rbx, rdx, rcx, EMIT_REMEMBERED_SET},
   // KeyedStoreStubCompiler::GenerateStoreFastElement.
   { rdi, rdx, rcx, EMIT_REMEMBERED_SET},
+  // ElementsTransitionGenerator::GenerateSmiOnlyToObject
+  // and ElementsTransitionGenerator::GenerateSmiOnlyToObject
+  // and ElementsTransitionGenerator::GenerateDoubleToObject
+  { rdx, rbx, rdi, EMIT_REMEMBERED_SET},
+  // ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+  // and ElementsTransitionGenerator::GenerateDoubleToObject
+  { rdx, r11, r15, EMIT_REMEMBERED_SET},
+  // ElementsTransitionGenerator::GenerateDoubleToObject
+  { r11, rax, r15, EMIT_REMEMBERED_SET},
   // Null termination.
   { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
 };
@@ -5912,7 +6005,6 @@
   // Fall through when we need to inform the incremental marker.
 }
 
-
 #undef __
 
 } }  // namespace v8::internal
diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h
index 698ba40..ffa3f4d 100644
--- a/src/x64/code-stubs-x64.h
+++ b/src/x64/code-stubs-x64.h
@@ -423,7 +423,16 @@
 
   void Generate(MacroAssembler* masm);
 
-  MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup(
+  static void GenerateNegativeLookup(MacroAssembler* masm,
+                                     Label* miss,
+                                     Label* done,
+                                     Register properties,
+                                     Handle<String> name,
+                                     Register r0);
+
+  // TODO(kmillikin): Eliminate this function when the stub cache is fully
+  // handlified.
+  MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup(
       MacroAssembler* masm,
       Label* miss,
       Label* done,
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index f6102c7..4c216e8 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -30,6 +30,7 @@
 #if defined(V8_TARGET_ARCH_X64)
 
 #include "codegen.h"
+#include "macro-assembler.h"
 
 namespace v8 {
 namespace internal {
@@ -143,6 +144,224 @@
 
 #endif
 
+#undef __
+
+// -------------------------------------------------------------------------
+// Code generators
+
+#define __ ACCESS_MASM(masm)
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- rax    : value
+  //  -- rbx    : target map
+  //  -- rcx    : key
+  //  -- rdx    : receiver
+  //  -- rsp[0] : return address
+  // -----------------------------------
+  // Set transitioned map.
+  __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
+  __ RecordWriteField(rdx,
+                      HeapObject::kMapOffset,
+                      rbx,
+                      rdi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+}
+
+
+void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- rax    : value
+  //  -- rbx    : target map
+  //  -- rcx    : key
+  //  -- rdx    : receiver
+  //  -- rsp[0] : return address
+  // -----------------------------------
+  // The fail label is not actually used since we do not allocate.
+  Label allocated, cow_array;
+
+  // Check backing store for COW-ness.  If the negative case, we do not have to
+  // allocate a new array, since FixedArray and FixedDoubleArray do not differ
+  // in size.
+  __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset));
+  __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
+  __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
+                 Heap::kFixedCOWArrayMapRootIndex);
+  __ j(equal, &cow_array);
+  __ movq(r14, r8);  // Destination array equals source array.
+
+  __ bind(&allocated);
+  // r8 : source FixedArray
+  // r9 : elements array length
+  // r14: destination FixedDoubleArray
+  // Set backing store's map
+  __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
+  __ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi);
+
+  // Set transitioned map.
+  __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
+  __ RecordWriteField(rdx,
+                      HeapObject::kMapOffset,
+                      rbx,
+                      rdi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+
+  // Convert smis to doubles and holes to hole NaNs.  The Array's length
+  // remains unchanged.
+  STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset);
+  STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
+
+  Label loop, entry, convert_hole;
+  __ movq(r15, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE);
+  // r15: the-hole NaN
+  __ jmp(&entry);
+
+  // Allocate new array if the source array is a COW array.
+  __ bind(&cow_array);
+  __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize));
+  __ AllocateInNewSpace(rdi, r14, r11, r15, fail, TAG_OBJECT);
+  // Set receiver's backing store.
+  __ movq(FieldOperand(rdx, JSObject::kElementsOffset), r14);
+  __ movq(r11, r14);
+  __ RecordWriteField(rdx,
+                      JSObject::kElementsOffset,
+                      r11,
+                      r15,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Set backing store's length.
+  __ Integer32ToSmi(r11, r9);
+  __ movq(FieldOperand(r14, FixedDoubleArray::kLengthOffset), r11);
+  __ jmp(&allocated);
+
+  // Conversion loop.
+  __ bind(&loop);
+  __ decq(r9);
+  __ movq(rbx,
+          FieldOperand(r8, r9, times_8, FixedArray::kHeaderSize));
+  // r9 : current element's index
+  // rbx: current element (smi-tagged)
+  __ JumpIfNotSmi(rbx, &convert_hole);
+  __ SmiToInteger32(rbx, rbx);
+  __ cvtlsi2sd(xmm0, rbx);
+  __ movsd(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize),
+           xmm0);
+  __ jmp(&entry);
+  __ bind(&convert_hole);
+  __ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
+  __ bind(&entry);
+  __ testq(r9, r9);
+  __ j(not_zero, &loop);
+}
+
+
+void ElementsTransitionGenerator::GenerateDoubleToObject(
+    MacroAssembler* masm, Label* fail) {
+  // ----------- S t a t e -------------
+  //  -- rax    : value
+  //  -- rbx    : target map
+  //  -- rcx    : key
+  //  -- rdx    : receiver
+  //  -- rsp[0] : return address
+  // -----------------------------------
+  Label loop, entry, convert_hole, gc_required;
+  __ push(rax);
+
+  __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset));
+  __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
+  // r8 : source FixedDoubleArray
+  // r9 : number of elements
+  __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize));
+  __ AllocateInNewSpace(rdi, r11, r14, r15, &gc_required, TAG_OBJECT);
+  // r11: destination FixedArray
+  __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex);
+  __ movq(FieldOperand(r11, HeapObject::kMapOffset), rdi);
+  __ Integer32ToSmi(r14, r9);
+  __ movq(FieldOperand(r11, FixedArray::kLengthOffset), r14);
+
+  // Prepare for conversion loop.
+  __ movq(rsi, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE);
+  __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex);
+  // rsi: the-hole NaN
+  // rdi: pointer to the-hole
+  __ jmp(&entry);
+
+  // Call into runtime if GC is required.
+  __ bind(&gc_required);
+  __ pop(rax);
+  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+  __ jmp(fail);
+
+  // Box doubles into heap numbers.
+  __ bind(&loop);
+  __ decq(r9);
+  __ movq(r14, FieldOperand(r8,
+                            r9,
+                            times_pointer_size,
+                            FixedDoubleArray::kHeaderSize));
+  // r9 : current element's index
+  // r14: current element
+  __ cmpq(r14, rsi);
+  __ j(equal, &convert_hole);
+
+  // Non-hole double, copy value into a heap number.
+  __ AllocateHeapNumber(rax, r15, &gc_required);
+  // rax: new heap number
+  __ movq(FieldOperand(rax, HeapNumber::kValueOffset), r14);
+  __ movq(FieldOperand(r11,
+                       r9,
+                       times_pointer_size,
+                       FixedArray::kHeaderSize),
+          rax);
+  __ movq(r15, r9);
+  __ RecordWriteArray(r11,
+                      rax,
+                      r15,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  __ jmp(&entry, Label::kNear);
+
+  // Replace the-hole NaN with the-hole pointer.
+  __ bind(&convert_hole);
+  __ movq(FieldOperand(r11,
+                       r9,
+                       times_pointer_size,
+                       FixedArray::kHeaderSize),
+          rdi);
+
+  __ bind(&entry);
+  __ testq(r9, r9);
+  __ j(not_zero, &loop);
+
+  // Set transitioned map.
+  __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
+  __ RecordWriteField(rdx,
+                      HeapObject::kMapOffset,
+                      rbx,
+                      rdi,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  // Replace receiver's backing store with newly created and filled FixedArray.
+  __ movq(FieldOperand(rdx, JSObject::kElementsOffset), r11);
+  __ RecordWriteField(rdx,
+                      JSObject::kElementsOffset,
+                      r11,
+                      r15,
+                      kDontSaveFPRegs,
+                      EMIT_REMEMBERED_SET,
+                      OMIT_SMI_CHECK);
+  __ pop(rax);
+  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+}
 
 #undef __
 
diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc
index b7e334e..d0a052b 100644
--- a/src/x64/deoptimizer-x64.cc
+++ b/src/x64/deoptimizer-x64.cc
@@ -258,16 +258,13 @@
   Assembler::set_target_address_at(call_target_address,
                                    replacement_code->entry());
 
-  RelocInfo rinfo(call_target_address,
-                  RelocInfo::CODE_TARGET,
-                  0,
-                  unoptimized_code);
-  unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode(
-      unoptimized_code, &rinfo, replacement_code);
+  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+      unoptimized_code, call_target_address, replacement_code);
 }
 
 
-void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
+void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
+                                         Address pc_after,
                                          Code* check_code,
                                          Code* replacement_code) {
   Address call_target_address = pc_after - kIntSize;
@@ -282,8 +279,9 @@
   *(call_target_address - 2) = 0x07;  // offset
   Assembler::set_target_address_at(call_target_address,
                                    check_code->entry());
-  check_code->GetHeap()->incremental_marking()->
-      RecordCodeTargetPatch(call_target_address, check_code);
+
+  check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+      unoptimized_code, call_target_address, check_code);
 }
 
 
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index b5c5fc5..bf640db 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -254,7 +254,10 @@
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
         int ignored = 0;
-        EmitDeclaration(scope()->function(), CONST, NULL, &ignored);
+        VariableProxy* proxy = scope()->function();
+        ASSERT(proxy->var()->mode() == CONST ||
+               proxy->var()->mode() == CONST_HARMONY);
+        EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -684,6 +687,8 @@
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
   Variable* variable = proxy->var();
+  bool binding_needs_init =
+      mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
       ++(*global_count);
@@ -695,7 +700,7 @@
         Comment cmnt(masm_, "[ Declaration");
         VisitForAccumulatorValue(function);
         __ movq(StackOperand(variable), result_register());
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         Comment cmnt(masm_, "[ Declaration");
         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
         __ movq(StackOperand(variable), kScratchRegister);
@@ -728,7 +733,7 @@
                                   EMIT_REMEMBERED_SET,
                                   OMIT_SMI_CHECK);
         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         Comment cmnt(masm_, "[ Declaration");
         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
         __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
@@ -741,9 +746,13 @@
       Comment cmnt(masm_, "[ Declaration");
       __ push(rsi);
       __ Push(variable->name());
-      // Declaration nodes are always introduced in one of three modes.
-      ASSERT(mode == VAR || mode == CONST || mode == LET);
-      PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE;
+      // Declaration nodes are always introduced in one of four modes.
+      ASSERT(mode == VAR ||
+             mode == CONST ||
+             mode == CONST_HARMONY ||
+             mode == LET);
+      PropertyAttributes attr =
+          (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE;
       __ Push(Smi::FromInt(attr));
       // Push initial value, if any.
       // Note: For variables we must not push an initial value (such as
@@ -751,7 +760,7 @@
       // must not destroy the current value.
       if (function != NULL) {
         VisitForStackValue(function);
-      } else if (mode == CONST || mode == LET) {
+      } else if (binding_needs_init) {
         __ PushRoot(Heap::kTheHoleValueRootIndex);
       } else {
         __ Push(Smi::FromInt(0));  // Indicates no initial value.
@@ -890,11 +899,17 @@
   __ bind(&done_convert);
   __ push(rax);
 
+  // Check for proxies.
+  Label call_runtime;
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
+  __ j(below_equal, &call_runtime);
+
   // Check cache validity in generated code. This is a fast case for
   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   // guarantee cache validity, call the runtime system to check cache
   // validity or get the property names in a fixed array.
-  Label next, call_runtime;
+  Label next;
   Register empty_fixed_array_value = r8;
   __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
   Register empty_descriptor_array_value = r9;
@@ -970,9 +985,17 @@
   __ jmp(&loop);
 
   // We got a fixed array in register rax. Iterate through that.
+  Label non_proxy;
   __ bind(&fixed_array);
-  __ Push(Smi::FromInt(0));  // Map (0) - force slow check.
-  __ push(rax);
+  __ Move(rbx, Smi::FromInt(1));  // Smi indicates slow check
+  __ movq(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
+  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
+  __ j(above, &non_proxy);
+  __ Move(rbx, Smi::FromInt(0));  // Zero indicates proxy
+  __ bind(&non_proxy);
+  __ push(rbx);  // Smi
+  __ push(rax);  // Array
   __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
   __ push(rax);  // Fixed array length (as smi).
   __ Push(Smi::FromInt(0));  // Initial index.
@@ -991,17 +1014,22 @@
                             index.scale,
                             FixedArray::kHeaderSize));
 
-  // Get the expected map from the stack or a zero map in the
+  // Get the expected map from the stack or a smi in the
   // permanent slow case into register rdx.
   __ movq(rdx, Operand(rsp, 3 * kPointerSize));
 
   // Check if the expected map still matches that of the enumerable.
-  // If not, we have to filter the key.
+  // If not, we may have to filter the key.
   Label update_each;
   __ movq(rcx, Operand(rsp, 4 * kPointerSize));
   __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
   __ j(equal, &update_each, Label::kNear);
 
+  // For proxies, no filtering is done.
+  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
+  __ Cmp(rdx, Smi::FromInt(0));
+  __ j(equal, &update_each, Label::kNear);
+
   // Convert the entry to a string or null if it isn't a property
   // anymore. If the property has been removed while iterating, we
   // just skip it.
@@ -1055,7 +1083,7 @@
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(info->strict_mode_flag());
     __ Push(info);
     __ CallStub(&stub);
   } else {
@@ -1085,7 +1113,7 @@
   Scope* s = scope();
   while (s != NULL) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
                 Immediate(0));
@@ -1099,7 +1127,7 @@
     // If no outer scope calls eval, we do not need to check more
     // context extensions.  If we have reached an eval scope, we check
     // all extensions from this point.
-    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
     s = s->outer_scope();
   }
 
@@ -1145,7 +1173,7 @@
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
     if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
+      if (s->calls_non_strict_eval()) {
         // Check that extension is NULL.
         __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
                 Immediate(0));
@@ -1182,12 +1210,14 @@
   } else if (var->mode() == DYNAMIC_LOCAL) {
     Variable* local = var->local_if_not_shadowed();
     __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
-    if (local->mode() == CONST || local->mode() == LET) {
+    if (local->mode() == CONST ||
+        local->mode() == CONST_HARMONY ||
+        local->mode() == LET) {
       __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
       __ j(not_equal, done);
       if (local->mode() == CONST) {
         __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
-      } else {  // LET
+      } else {  // LET || CONST_HARMONY
         __ Push(var->name());
         __ CallRuntime(Runtime::kThrowReferenceError, 1);
       }
@@ -1221,7 +1251,7 @@
     case Variable::LOCAL:
     case Variable::CONTEXT: {
       Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
-      if (var->mode() != LET && var->mode() != CONST) {
+      if (!var->binding_needs_init()) {
         context()->Plug(var);
       } else {
         // Let and const need a read barrier.
@@ -1229,10 +1259,14 @@
         GetVar(rax, var);
         __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
         __ j(not_equal, &done, Label::kNear);
-        if (var->mode() == LET) {
+        if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+          // Throw a reference error when using an uninitialized let/const
+          // binding in harmony mode.
           __ Push(var->name());
           __ CallRuntime(Runtime::kThrowReferenceError, 1);
-        } else {  // CONST
+        } else {
+          // Uninitalized const bindings outside of harmony mode are unholed.
+          ASSERT(var->mode() == CONST);
           __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
         }
         __ bind(&done);
@@ -1417,12 +1451,18 @@
 
   ZoneList<Expression*>* subexprs = expr->values();
   int length = subexprs->length();
+  Handle<FixedArray> constant_elements = expr->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+  Handle<FixedArrayBase> constant_elements_values(
+      FixedArrayBase::cast(constant_elements->get(1)));
 
   __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
   __ Push(Smi::FromInt(expr->literal_index()));
-  __ Push(expr->constant_elements());
-  if (expr->constant_elements()->map() ==
+  __ Push(constant_elements);
+  if (constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
@@ -1433,8 +1473,14 @@
   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   } else {
-    FastCloneShallowArrayStub stub(
-        FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
+    ASSERT(constant_elements_kind == FAST_ELEMENTS ||
+           constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+           FLAG_smi_only_arrays);
+    FastCloneShallowArrayStub::Mode mode =
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+    FastCloneShallowArrayStub stub(mode, length);
     __ CallStub(&stub);
   }
 
@@ -1459,22 +1505,59 @@
 
     // Store the subexpression value in the array's elements.
     __ movq(r8, Operand(rsp, 0));  // Copy of array literal.
+    __ movq(rdi, FieldOperand(r8, JSObject::kMapOffset));
     __ movq(rbx, FieldOperand(r8, JSObject::kElementsOffset));
     int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-    __ movq(FieldOperand(rbx, offset), result_register());
 
-    Label no_map_change;
-    __ JumpIfSmi(result_register(), &no_map_change);
+    Label element_done;
+    Label double_elements;
+    Label smi_element;
+    Label slow_elements;
+    Label fast_elements;
+    __ CheckFastElements(rdi, &double_elements);
+
+    // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+    __ JumpIfSmi(result_register(), &smi_element);
+    __ CheckFastSmiOnlyElements(rdi, &fast_elements);
+
+    // Store into the array literal requires a elements transition. Call into
+    // the runtime.
+    __ bind(&slow_elements);
+    __ push(r8);  // Copy of array literal.
+    __ Push(Smi::FromInt(i));
+    __ push(result_register());
+    __ Push(Smi::FromInt(NONE));  // PropertyAttributes
+    __ Push(Smi::FromInt(strict_mode_flag()));  // Strict mode.
+    __ CallRuntime(Runtime::kSetProperty, 5);
+    __ jmp(&element_done);
+
+    // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+    __ bind(&double_elements);
+    __ movq(rcx, Immediate(i));
+    __ StoreNumberToDoubleElements(result_register(),
+                                   rbx,
+                                   rcx,
+                                   xmm0,
+                                   &slow_elements);
+    __ jmp(&element_done);
+
+    // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+    __ bind(&fast_elements);
+    __ movq(FieldOperand(rbx, offset), result_register());
     // Update the write barrier for the array store.
     __ RecordWriteField(rbx, offset, result_register(), rcx,
                         kDontSaveFPRegs,
                         EMIT_REMEMBERED_SET,
                         OMIT_SMI_CHECK);
-    __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
-    __ CheckFastSmiOnlyElements(rdi, &no_map_change, Label::kNear);
-    __ push(r8);
-    __ CallRuntime(Runtime::kNonSmiElementStored, 1);
-    __ bind(&no_map_change);
+    __ jmp(&element_done);
+
+    // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+    // FAST_ELEMENTS, and value is Smi.
+    __ bind(&smi_element);
+    __ movq(FieldOperand(rbx, offset), result_register());
+    // Fall through
+
+    __ bind(&element_done);
 
     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   }
@@ -1805,8 +1888,9 @@
       }
     }
 
-  } else if (var->mode() != CONST) {
-    // Assignment to var or initializing assignment to let.
+  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
+    // Assignment to var or initializing assignment to let/const
+    // in harmony mode.
     if (var->IsStackAllocated() || var->IsContextSlot()) {
       MemOperand location = VarOperand(var, rcx);
       if (FLAG_debug_code && op == Token::INIT_LET) {
@@ -2657,9 +2741,12 @@
   // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
   __ PrepareCallCFunction(1);
 #ifdef _WIN64
-  __ LoadAddress(rcx, ExternalReference::isolate_address());
+  __ movq(rcx, ContextOperand(context_register(), Context::GLOBAL_INDEX));
+  __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
+
 #else
-  __ LoadAddress(rdi, ExternalReference::isolate_address());
+  __ movq(rdi, ContextOperand(context_register(), Context::GLOBAL_INDEX));
+  __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
 #endif
   __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
 
@@ -3997,33 +4084,25 @@
         case Token::EQ_STRICT:
         case Token::EQ:
           cc = equal;
-          __ pop(rdx);
           break;
         case Token::LT:
           cc = less;
-          __ pop(rdx);
           break;
         case Token::GT:
-          // Reverse left and right sizes to obtain ECMA-262 conversion order.
-          cc = less;
-          __ movq(rdx, result_register());
-          __ pop(rax);
+          cc = greater;
          break;
         case Token::LTE:
-          // Reverse left and right sizes to obtain ECMA-262 conversion order.
-          cc = greater_equal;
-          __ movq(rdx, result_register());
-          __ pop(rax);
+          cc = less_equal;
           break;
         case Token::GTE:
           cc = greater_equal;
-          __ pop(rdx);
           break;
         case Token::IN:
         case Token::INSTANCEOF:
         default:
           UNREACHABLE();
       }
+      __ pop(rdx);
 
       bool inline_smi_code = ShouldInlineSmiCase(op);
       JumpPatchSite patch_site(masm_);
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 27a9667..e8ab06c 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -712,12 +712,11 @@
   // Writing a non-smi, check whether array allows non-smi elements.
   // r9: receiver's map
   __ CheckFastObjectElements(r9, &slow, Label::kNear);
-  __ lea(rcx,
-         FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize));
-  __ movq(Operand(rcx, 0), rax);
-  __ movq(rdx, rax);
-  __ RecordWrite(
-      rbx, rcx, rdx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+  __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
+          rax);
+  __ movq(rdx, rax);  // Preserve the value which is returned.
+  __ RecordWriteArray(
+      rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   __ ret(0);
 
   __ bind(&fast_double_with_map_check);
@@ -736,10 +735,10 @@
 
 // The generated code does not accept smi keys.
 // The generated code falls through if both probes miss.
-static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
-                                          int argc,
-                                          Code::Kind kind,
-                                          Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
+                                               int argc,
+                                               Code::Kind kind,
+                                               Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   // rcx                      : function name
   // rdx                      : receiver
@@ -749,7 +748,7 @@
   // Probe the stub cache.
   Code::Flags flags = Code::ComputeFlags(kind,
                                          MONOMORPHIC,
-                                         extra_ic_state,
+                                         extra_state,
                                          NORMAL,
                                          argc);
   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
@@ -822,7 +821,7 @@
 
 
 // The generated code falls through if the call should be handled by runtime.
-static void GenerateCallNormal(MacroAssembler* masm, int argc) {
+void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   // rcx                    : function name
   // rsp[0]                 : return address
@@ -849,10 +848,10 @@
 }
 
 
-static void GenerateCallMiss(MacroAssembler* masm,
-                             int argc,
-                             IC::UtilityId id,
-                             Code::ExtraICState extra_ic_state) {
+void CallICBase::GenerateMiss(MacroAssembler* masm,
+                              int argc,
+                              IC::UtilityId id,
+                              Code::ExtraICState extra_state) {
   // ----------- S t a t e -------------
   // rcx                      : function name
   // rsp[0]                   : return address
@@ -910,7 +909,7 @@
   }
 
   // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   ParameterCount actual(argc);
@@ -942,39 +941,6 @@
 }
 
 
-void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  // rcx                      : function name
-  // rsp[0]                   : return address
-  // rsp[8]                   : argument argc
-  // rsp[16]                  : argument argc - 1
-  // ...
-  // rsp[argc * 8]            : argument 1
-  // rsp[(argc + 1) * 8]      : argument 0 = receiver
-  // -----------------------------------
-
-  GenerateCallNormal(masm, argc);
-  GenerateMiss(masm, argc, Code::kNoExtraICState);
-}
-
-
-void CallIC::GenerateMiss(MacroAssembler* masm,
-                          int argc,
-                          Code::ExtraICState extra_ic_state) {
-  // ----------- S t a t e -------------
-  // rcx                      : function name
-  // rsp[0]                   : return address
-  // rsp[8]                   : argument argc
-  // rsp[16]                  : argument argc - 1
-  // ...
-  // rsp[argc * 8]            : argument 1
-  // rsp[(argc + 1) * 8]      : argument 0 = receiver
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
-}
-
-
 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   // ----------- S t a t e -------------
   // rcx                      : function name
@@ -1102,27 +1068,12 @@
   __ JumpIfSmi(rcx, &miss);
   Condition cond = masm->IsObjectStringType(rcx, rax, rax);
   __ j(NegateCondition(cond), &miss);
-  GenerateCallNormal(masm, argc);
+  CallICBase::GenerateNormal(masm, argc);
   __ bind(&miss);
   GenerateMiss(masm, argc);
 }
 
 
-void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
-  // ----------- S t a t e -------------
-  // rcx                      : function name
-  // rsp[0]                   : return address
-  // rsp[8]                   : argument argc
-  // rsp[16]                  : argument argc - 1
-  // ...
-  // rsp[argc * 8]            : argument 1
-  // rsp[(argc + 1) * 8]      : argument 0 = receiver
-  // -----------------------------------
-
-  GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
-}
-
-
 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
                                              Register object,
                                              Register key,
@@ -1602,6 +1553,51 @@
 }
 
 
+void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- rbx     : target map
+  //  -- rdx     : receiver
+  //  -- rsp[0]  : return address
+  // -----------------------------------
+  // Must return the modified receiver in eax.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+    __ movq(rax, rdx);
+    __ Ret();
+    __ bind(&fail);
+  }
+
+  __ pop(rbx);
+  __ push(rdx);
+  __ push(rbx);  // return address
+  __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
+}
+
+
+void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- rbx     : target map
+  //  -- rdx     : receiver
+  //  -- rsp[0]  : return address
+  // -----------------------------------
+  // Must return the modified receiver in eax.
+  if (!FLAG_trace_elements_transitions) {
+    Label fail;
+    ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
+    __ movq(rax, rdx);
+    __ Ret();
+    __ bind(&fail);
+  }
+
+  __ pop(rbx);
+  __ push(rdx);
+  __ push(rbx);  // return address
+  __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
+}
+
+
 #undef __
 
 
@@ -1613,11 +1609,9 @@
     case Token::LT:
       return less;
     case Token::GT:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return less;
+      return greater;
     case Token::LTE:
-      // Reverse left and right operands to obtain ECMA-262 conversion order.
-      return greater_equal;
+      return less_equal;
     case Token::GTE:
       return greater_equal;
     default:
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 45aaad7..38a8c18 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -374,6 +374,12 @@
 }
 
 
+double LCodeGen::ToDouble(LConstantOperand* op) const {
+  Handle<Object> value = chunk_->LookupLiteral(op);
+  return value->Number();
+}
+
+
 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
   Handle<Object> literal = chunk_->LookupLiteral(op);
   ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
@@ -1526,39 +1532,51 @@
 }
 
 
-void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
-  if (right->IsConstantOperand()) {
-    int32_t value = ToInteger32(LConstantOperand::cast(right));
-    if (left->IsRegister()) {
-      __ cmpl(ToRegister(left), Immediate(value));
-    } else {
-      __ cmpl(ToOperand(left), Immediate(value));
-    }
-  } else if (right->IsRegister()) {
-    __ cmpl(ToRegister(left), ToRegister(right));
-  } else {
-    __ cmpl(ToRegister(left), ToOperand(right));
-  }
-}
-
-
 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
   LOperand* left = instr->InputAt(0);
   LOperand* right = instr->InputAt(1);
   int false_block = chunk_->LookupDestination(instr->false_block_id());
   int true_block = chunk_->LookupDestination(instr->true_block_id());
-
-  if (instr->is_double()) {
-    // Don't base result on EFLAGS when a NaN is involved. Instead
-    // jump to the false block.
-    __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
-    __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
-  } else {
-    EmitCmpI(left, right);
-  }
-
   Condition cc = TokenToCondition(instr->op(), instr->is_double());
-  EmitBranch(true_block, false_block, cc);
+
+  if (left->IsConstantOperand() && right->IsConstantOperand()) {
+    // We can statically evaluate the comparison.
+    double left_val = ToDouble(LConstantOperand::cast(left));
+    double right_val = ToDouble(LConstantOperand::cast(right));
+    int next_block =
+      EvalComparison(instr->op(), left_val, right_val) ? true_block
+                                                       : false_block;
+    EmitGoto(next_block);
+  } else {
+    if (instr->is_double()) {
+      // Don't base result on EFLAGS when a NaN is involved. Instead
+      // jump to the false block.
+      __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
+      __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
+    } else {
+      int32_t value;
+      if (right->IsConstantOperand()) {
+        value = ToInteger32(LConstantOperand::cast(right));
+        __ cmpl(ToRegister(left), Immediate(value));
+      } else if (left->IsConstantOperand()) {
+        value = ToInteger32(LConstantOperand::cast(left));
+        if (right->IsRegister()) {
+          __ cmpl(ToRegister(right), Immediate(value));
+        } else {
+          __ cmpl(ToOperand(right), Immediate(value));
+        }
+        // We transposed the operands. Reverse the condition.
+        cc = ReverseCondition(cc);
+      } else {
+        if (right->IsRegister()) {
+          __ cmpl(ToRegister(left), ToRegister(right));
+        } else {
+          __ cmpl(ToRegister(left), ToOperand(right));
+        }
+      }
+    }
+    EmitBranch(true_block, false_block, cc);
+  }
 }
 
 
@@ -1979,9 +1997,6 @@
   CallCode(ic, RelocInfo::CODE_TARGET, instr);
 
   Condition condition = TokenToCondition(op, false);
-  if (op == Token::GT || op == Token::LTE) {
-    condition = ReverseCondition(condition);
-  }
   Label true_value, done;
   __ testq(rax, rax);
   __ j(condition, &true_value, Label::kNear);
@@ -2055,19 +2070,24 @@
   // Store the value.
   __ movq(Operand(address, 0), value);
 
-  Label smi_store;
-  __ JumpIfSmi(value, &smi_store, Label::kNear);
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    Label smi_store;
+    HType type = instr->hydrogen()->value()->type();
+    if (!type.IsHeapNumber() && !type.IsString() && !type.IsNonPrimitive()) {
+      __ JumpIfSmi(value, &smi_store, Label::kNear);
+    }
 
-  int offset = JSGlobalPropertyCell::kValueOffset - kHeapObjectTag;
-  __ lea(object, Operand(address, -offset));
-  // Cells are always in the remembered set.
-  __ RecordWrite(object,
-                 address,
-                 value,
-                 kSaveFPRegs,
-                 OMIT_REMEMBERED_SET,
-                 OMIT_SMI_CHECK);
-  __ bind(&smi_store);
+    int offset = JSGlobalPropertyCell::kValueOffset - kHeapObjectTag;
+    __ lea(object, Operand(address, -offset));
+    // Cells are always in the remembered set.
+    __ RecordWrite(object,
+                   address,
+                   value,
+                   kSaveFPRegs,
+                   OMIT_REMEMBERED_SET,
+                   OMIT_SMI_CHECK);
+    __ bind(&smi_store);
+  }
 }
 
 
@@ -2094,10 +2114,19 @@
   Register context = ToRegister(instr->context());
   Register value = ToRegister(instr->value());
   __ movq(ContextOperand(context, instr->slot_index()), value);
-  if (instr->needs_write_barrier()) {
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
     int offset = Context::SlotOffset(instr->slot_index());
     Register scratch = ToRegister(instr->TempAt(0));
-    __ RecordWriteContextSlot(context, offset, value, scratch, kSaveFPRegs);
+    __ RecordWriteContextSlot(context,
+                              offset,
+                              value,
+                              scratch,
+                              kSaveFPRegs,
+                              EMIT_REMEMBERED_SET,
+                              check_needed);
   }
 }
 
@@ -2118,7 +2147,7 @@
                                                Register object,
                                                Handle<Map> type,
                                                Handle<String> name) {
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   type->LookupInDescriptors(NULL, *name, &lookup);
   ASSERT(lookup.IsProperty() &&
          (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
@@ -2561,7 +2590,7 @@
 
 void LCodeGen::DoThisFunction(LThisFunction* instr) {
   Register result = ToRegister(instr->result());
-  __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+  LoadHeapObject(result, instr->hydrogen()->closure());
 }
 
 
@@ -3061,21 +3090,36 @@
   }
 
   // Do the store.
+  HType type = instr->hydrogen()->value()->type();
+  SmiCheck check_needed =
+      type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
   if (instr->is_in_object()) {
     __ movq(FieldOperand(object, offset), value);
-    if (instr->needs_write_barrier()) {
+    if (instr->hydrogen()->NeedsWriteBarrier()) {
       Register temp = ToRegister(instr->TempAt(0));
       // Update the write barrier for the object for in-object properties.
-      __ RecordWriteField(object, offset, value, temp, kSaveFPRegs);
+      __ RecordWriteField(object,
+                          offset,
+                          value,
+                          temp,
+                          kSaveFPRegs,
+                          EMIT_REMEMBERED_SET,
+                          check_needed);
     }
   } else {
     Register temp = ToRegister(instr->TempAt(0));
     __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
     __ movq(FieldOperand(temp, offset), value);
-    if (instr->needs_write_barrier()) {
+    if (instr->hydrogen()->NeedsWriteBarrier()) {
       // Update the write barrier for the properties array.
       // object is used as a scratch register.
-      __ RecordWriteField(temp, offset, value, object, kSaveFPRegs);
+      __ RecordWriteField(temp,
+                          offset,
+                          value,
+                          object,
+                          kSaveFPRegs,
+                          EMIT_REMEMBERED_SET,
+                          check_needed);
     }
   }
 }
@@ -3182,12 +3226,20 @@
   }
 
   if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
     // Compute address of modified element and store it into key register.
     __ lea(key, FieldOperand(elements,
                              key,
                              times_pointer_size,
                              FixedArray::kHeaderSize));
-    __ RecordWrite(elements, key, value, kSaveFPRegs);
+    __ RecordWrite(elements,
+                   key,
+                   value,
+                   kSaveFPRegs,
+                   EMIT_REMEMBERED_SET,
+                   check_needed);
   }
 }
 
@@ -3223,6 +3275,47 @@
 }
 
 
+void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
+  Register object_reg = ToRegister(instr->object());
+  Register new_map_reg = ToRegister(instr->new_map_reg());
+
+  Handle<Map> from_map = instr->original_map();
+  Handle<Map> to_map = instr->transitioned_map();
+  ElementsKind from_kind = from_map->elements_kind();
+  ElementsKind to_kind = to_map->elements_kind();
+
+  Label not_applicable;
+  __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
+  __ j(not_equal, &not_applicable);
+  __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
+  if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
+    // Write barrier.
+    ASSERT_NE(instr->temp_reg(), NULL);
+    __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
+                        ToRegister(instr->temp_reg()), kDontSaveFPRegs);
+  } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
+      to_kind == FAST_DOUBLE_ELEMENTS) {
+    Register fixed_object_reg = ToRegister(instr->temp_reg());
+    ASSERT(fixed_object_reg.is(rdx));
+    ASSERT(new_map_reg.is(rbx));
+    __ movq(fixed_object_reg, object_reg);
+    CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
+             RelocInfo::CODE_TARGET, instr);
+  } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+    Register fixed_object_reg = ToRegister(instr->temp_reg());
+    ASSERT(fixed_object_reg.is(rdx));
+    ASSERT(new_map_reg.is(rbx));
+    __ movq(fixed_object_reg, object_reg);
+    CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
+             RelocInfo::CODE_TARGET, instr);
+  } else {
+    UNREACHABLE();
+  }
+  __ bind(&not_applicable);
+}
+
+
 void LCodeGen::DoStringAdd(LStringAdd* instr) {
   EmitPushTaggedOperand(instr->left());
   EmitPushTaggedOperand(instr->right());
@@ -3825,6 +3918,11 @@
 
 
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
+  Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
+  ASSERT_EQ(2, constant_elements->length());
+  ElementsKind constant_elements_kind =
+      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+
   // Setup the parameters to the stub/runtime call.
   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
@@ -3845,7 +3943,9 @@
     CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
   } else {
     FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ELEMENTS;
+        constant_elements_kind == FAST_DOUBLE_ELEMENTS
+        ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+        : FastCloneShallowArrayStub::CLONE_ELEMENTS;
     FastCloneShallowArrayStub stub(mode, length);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   }
@@ -3934,8 +4034,7 @@
   Handle<SharedFunctionInfo> shared_info = instr->shared_info();
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && shared_info->num_literals() == 0) {
-    FastNewClosureStub stub(
-        shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
+    FastNewClosureStub stub(shared_info->strict_mode_flag());
     __ Push(shared_info);
     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   } else {
@@ -3975,12 +4074,11 @@
   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   Label* false_label = chunk_->GetAssemblyLabel(false_block);
 
-  Condition final_branch_condition = EmitTypeofIs(true_label,
-                                                  false_label,
-                                                  input,
-                                                  instr->type_literal());
-
-  EmitBranch(true_block, false_block, final_branch_condition);
+  Condition final_branch_condition =
+      EmitTypeofIs(true_label, false_label, input, instr->type_literal());
+  if (final_branch_condition != no_condition) {
+    EmitBranch(true_block, false_block, final_branch_condition);
+  }
 }
 
 
@@ -4048,7 +4146,6 @@
     final_branch_condition = zero;
 
   } else {
-    final_branch_condition = never;
     __ jmp(false_label);
   }
 
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index 106d7bb..f3cb667 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -77,6 +77,7 @@
   XMMRegister ToDoubleRegister(LOperand* op) const;
   bool IsInteger32Constant(LConstantOperand* op) const;
   int ToInteger32(LConstantOperand* op) const;
+  double ToDouble(LConstantOperand* op) const;
   bool IsTaggedConstant(LConstantOperand* op) const;
   Handle<Object> ToHandle(LConstantOperand* op) const;
   Operand ToOperand(LOperand* op) const;
@@ -125,8 +126,8 @@
   bool is_done() const { return status_ == DONE; }
   bool is_aborted() const { return status_ == ABORTED; }
 
-  int strict_mode_flag() const {
-    return info()->is_strict_mode() ? kStrictMode : kNonStrictMode;
+  StrictModeFlag strict_mode_flag() const {
+    return info()->strict_mode_flag();
   }
 
   LChunk* chunk() const { return chunk_; }
@@ -190,9 +191,8 @@
                                int argc,
                                LInstruction* instr);
 
-
   // Generate a direct call to a known function.  Expects the function
-  // to be in edi.
+  // to be in rdi.
   void CallKnownFunction(Handle<JSFunction> function,
                          int arity,
                          LInstruction* instr,
@@ -251,7 +251,6 @@
   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
   void EmitGoto(int block);
   void EmitBranch(int left_block, int right_block, Condition cc);
-  void EmitCmpI(LOperand* left, LOperand* right);
   void EmitNumberUntagD(Register input,
                         XMMRegister result,
                         bool deoptimize_on_undefined,
@@ -260,8 +259,10 @@
   // Emits optimized code for typeof x == "y".  Modifies input register.
   // Returns the condition on which a final split to
   // true and false label should be made, to optimize fallthrough.
-  Condition EmitTypeofIs(Label* true_label, Label* false_label,
-                         Register input, Handle<String> type_name);
+  Condition EmitTypeofIs(Label* true_label,
+                         Label* false_label,
+                         Register input,
+                         Handle<String> type_name);
 
   // Emits optimized code for %_IsObject(x).  Preserves input register.
   // Returns the condition on which a final split to
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index a67a593..0af2ce4 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -447,6 +447,12 @@
 }
 
 
+void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
+  object()->PrintTo(stream);
+  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
+}
+
+
 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
   LInstructionGap* gap = new LInstructionGap(block);
   int index = -1;
@@ -1396,12 +1402,10 @@
 
 
 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
-  Token::Value op = instr->token();
   ASSERT(instr->left()->representation().IsTagged());
   ASSERT(instr->right()->representation().IsTagged());
-  bool reversed = (op == Token::GT || op == Token::LTE);
-  LOperand* left = UseFixed(instr->left(), reversed ? rax : rdx);
-  LOperand* right = UseFixed(instr->right(), reversed ? rdx : rax);
+  LOperand* left = UseFixed(instr->left(), rdx);
+  LOperand* right = UseFixed(instr->right(), rax);
   LCmpT* result = new LCmpT(left, right);
   return MarkAsCall(DefineFixed(result, rax), instr);
 }
@@ -1413,15 +1417,22 @@
   if (r.IsInteger32()) {
     ASSERT(instr->left()->representation().IsInteger32());
     ASSERT(instr->right()->representation().IsInteger32());
-    LOperand* left = UseRegisterAtStart(instr->left());
+    LOperand* left = UseRegisterOrConstantAtStart(instr->left());
     LOperand* right = UseOrConstantAtStart(instr->right());
     return new LCmpIDAndBranch(left, right);
   } else {
     ASSERT(r.IsDouble());
     ASSERT(instr->left()->representation().IsDouble());
     ASSERT(instr->right()->representation().IsDouble());
-    LOperand* left = UseRegisterAtStart(instr->left());
-    LOperand* right = UseRegisterAtStart(instr->right());
+    LOperand* left;
+    LOperand* right;
+    if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
+      left = UseRegisterOrConstantAtStart(instr->left());
+      right = UseRegisterOrConstantAtStart(instr->right());
+    } else {
+      left = UseRegisterAtStart(instr->left());
+      right = UseRegisterAtStart(instr->right());
+    }
     return new LCmpIDAndBranch(left, right);
   }
 }
@@ -1956,6 +1967,27 @@
 }
 
 
+LInstruction* LChunkBuilder::DoTransitionElementsKind(
+    HTransitionElementsKind* instr) {
+  if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
+      instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+    LOperand* object = UseRegister(instr->object());
+    LOperand* new_map_reg = TempRegister();
+    LOperand* temp_reg = TempRegister();
+    LTransitionElementsKind* result =
+        new LTransitionElementsKind(object, new_map_reg, temp_reg);
+    return DefineSameAsFirst(result);
+  } else {
+    LOperand* object = UseFixed(instr->object(), rax);
+    LOperand* fixed_object_reg = FixedTemp(rdx);
+    LOperand* new_map_reg = FixedTemp(rbx);
+    LTransitionElementsKind* result =
+        new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+    return MarkAsCall(DefineFixed(result, rax), instr);
+  }
+}
+
+
 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
   bool needs_write_barrier = instr->NeedsWriteBarrier();
 
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index d43a86a..20a6937 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -162,6 +162,7 @@
   V(ThisFunction)                               \
   V(Throw)                                      \
   V(ToFastProperties)                           \
+  V(TransitionElementsKind)                     \
   V(Typeof)                                     \
   V(TypeofIsAndBranch)                          \
   V(UnaryMathOperation)                         \
@@ -1260,7 +1261,6 @@
   LOperand* context() { return InputAt(0); }
   LOperand* value() { return InputAt(1); }
   int slot_index() { return hydrogen()->slot_index(); }
-  int needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); }
 
   virtual void PrintDataTo(StringStream* stream);
 };
@@ -1277,7 +1277,9 @@
 
 
 class LThisFunction: public LTemplateInstruction<1, 0, 0> {
+ public:
   DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
+  DECLARE_HYDROGEN_ACCESSOR(ThisFunction)
 };
 
 
@@ -1551,7 +1553,6 @@
   Handle<Object> name() const { return hydrogen()->name(); }
   bool is_in_object() { return hydrogen()->is_in_object(); }
   int offset() { return hydrogen()->offset(); }
-  bool needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); }
   Handle<Map> transition() const { return hydrogen()->transition(); }
 };
 
@@ -1571,7 +1572,8 @@
   LOperand* object() { return inputs_[0]; }
   LOperand* value() { return inputs_[1]; }
   Handle<Object> name() const { return hydrogen()->name(); }
-  bool strict_mode() { return hydrogen()->strict_mode(); }
+  StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
+  bool strict_mode() { return strict_mode_flag() == kStrictMode; }
 };
 
 
@@ -1660,6 +1662,30 @@
 };
 
 
+class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
+ public:
+  LTransitionElementsKind(LOperand* object,
+                          LOperand* new_map_temp,
+                          LOperand* temp_reg) {
+    inputs_[0] = object;
+    temps_[0] = new_map_temp;
+    temps_[1] = temp_reg;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
+                               "transition-elements-kind")
+  DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  LOperand* object() { return inputs_[0]; }
+  LOperand* new_map_reg() { return temps_[0]; }
+  LOperand* temp_reg() { return temps_[1]; }
+  Handle<Map> original_map() { return hydrogen()->original_map(); }
+  Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
+};
+
+
 class LStringAdd: public LTemplateInstruction<1, 2, 0> {
  public:
   LStringAdd(LOperand* left, LOperand* right) {
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 7fe6d58..e3d4634 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -55,7 +55,7 @@
 
 static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
   Address roots_register_value = kRootRegisterBias +
-      reinterpret_cast<Address>(isolate->heap()->roots_address());
+      reinterpret_cast<Address>(isolate->heap()->roots_array_start());
   intptr_t delta = other.address() - roots_register_value;
   return delta;
 }
@@ -326,6 +326,40 @@
 }
 
 
+void MacroAssembler::RecordWriteArray(Register object,
+                                      Register value,
+                                      Register index,
+                                      SaveFPRegsMode save_fp,
+                                      RememberedSetAction remembered_set_action,
+                                      SmiCheck smi_check) {
+  // First, check if a write barrier is even needed. The tests below
+  // catch stores of Smis.
+  Label done;
+
+  // Skip barrier if writing a smi.
+  if (smi_check == INLINE_SMI_CHECK) {
+    JumpIfSmi(value, &done);
+  }
+
+  // Array access: calculate the destination address. Index is not a smi.
+  Register dst = index;
+  lea(dst, Operand(object, index, times_pointer_size,
+                   FixedArray::kHeaderSize - kHeapObjectTag));
+
+  RecordWrite(
+      object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
+
+  bind(&done);
+
+  // Clobber clobbered input registers when running with the debug-code flag
+  // turned on to provoke errors.
+  if (emit_debug_code()) {
+    movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
+    movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
+  }
+}
+
+
 void MacroAssembler::RecordWrite(Register object,
                                  Register address,
                                  Register value,
@@ -2317,6 +2351,13 @@
 }
 
 
+void MacroAssembler::TestBit(const Operand& src, int bits) {
+  int byte_offset = bits / kBitsPerByte;
+  int bit_in_byte = bits & (kBitsPerByte - 1);
+  testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
+}
+
+
 void MacroAssembler::Jump(ExternalReference ext) {
   LoadAddress(kScratchRegister, ext);
   jmp(kScratchRegister);
@@ -2683,7 +2724,7 @@
 void MacroAssembler::StoreNumberToDoubleElements(
     Register maybe_number,
     Register elements,
-    Register key,
+    Register index,
     XMMRegister xmm_scratch,
     Label* fail) {
   Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
@@ -2704,7 +2745,7 @@
   bind(&not_nan);
   movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
   bind(&have_double_value);
-  movsd(FieldOperand(elements, key, times_8, FixedDoubleArray::kHeaderSize),
+  movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
         xmm_scratch);
   jmp(&done);
 
@@ -2727,7 +2768,7 @@
   // Preserve original value.
   SmiToInteger32(kScratchRegister, maybe_number);
   cvtlsi2sd(xmm_scratch, kScratchRegister);
-  movsd(FieldOperand(elements, key, times_8, FixedDoubleArray::kHeaderSize),
+  movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
         xmm_scratch);
   bind(&done);
 }
@@ -2866,7 +2907,8 @@
 
 void MacroAssembler::TryGetFunctionPrototype(Register function,
                                              Register result,
-                                             Label* miss) {
+                                             Label* miss,
+                                             bool miss_on_bound_function) {
   // Check that the receiver isn't a smi.
   testl(function, Immediate(kSmiTagMask));
   j(zero, miss);
@@ -2875,6 +2917,17 @@
   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   j(not_equal, miss);
 
+  if (miss_on_bound_function) {
+    movq(kScratchRegister,
+         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
+    // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
+    // field).
+    TestBit(FieldOperand(kScratchRegister,
+                         SharedFunctionInfo::kCompilerHintsOffset),
+            SharedFunctionInfo::kBoundFunction);
+    j(not_zero, miss);
+  }
+
   // Make sure that the function has an instance prototype.
   Label non_instance;
   testb(FieldOperand(result, Map::kBitFieldOffset),
@@ -3067,29 +3120,16 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
-  ASSERT(function->is_compiled());
   // Get the function and setup the context.
   Move(rdi, Handle<JSFunction>(function));
   movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
 
-  if (V8::UseCrankshaft()) {
-    // Since Crankshaft can recompile a function, we need to load
-    // the Code object every time we call the function.
-    movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
-    ParameterCount expected(function->shared()->formal_parameter_count());
-    InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
-  } else {
-    // Invoke the cached code.
-    Handle<Code> code(function->code());
-    ParameterCount expected(function->shared()->formal_parameter_count());
-    InvokeCode(code,
-               expected,
-               actual,
-               RelocInfo::CODE_TARGET,
-               flag,
-               call_wrapper,
-               call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+  ParameterCount expected(function->shared()->formal_parameter_count());
+  InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
 }
 
 
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 7e0ba00..f5f81b1 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -256,8 +256,8 @@
 
   // Notify the garbage collector that we wrote a pointer into a fixed array.
   // |array| is the array being stored into, |value| is the
-  // object being stored.  |index| is the array index represented as a
-  // Smi. All registers are clobbered by the operation RecordWriteArray
+  // object being stored.  |index| is the array index represented as a non-smi.
+  // All registers are clobbered by the operation RecordWriteArray
   // filters out smis so it does not update the write barrier if the
   // value is a smi.
   void RecordWriteArray(
@@ -319,9 +319,9 @@
   void LoadFromSafepointRegisterSlot(Register dst, Register src);
 
   void InitializeRootRegister() {
-    ExternalReference roots_address =
-        ExternalReference::roots_address(isolate());
-    movq(kRootRegister, roots_address);
+    ExternalReference roots_array_start =
+        ExternalReference::roots_array_start(isolate());
+    movq(kRootRegister, roots_array_start);
     addq(kRootRegister, Immediate(kRootRegisterBias));
   }
 
@@ -726,6 +726,7 @@
   void Push(Smi* smi);
   void Test(const Operand& dst, Smi* source);
 
+
   // ---------------------------------------------------------------------------
   // String macros.
 
@@ -771,6 +772,9 @@
   // Move if the registers are not identical.
   void Move(Register target, Register source);
 
+  // Bit-field support.
+  void TestBit(const Operand& dst, int bit_index);
+
   // Handle support
   void Move(Register dst, Handle<Object> source);
   void Move(const Operand& dst, Handle<Object> source);
@@ -860,12 +864,12 @@
                                 Label::Distance distance = Label::kFar);
 
   // Check to see if maybe_number can be stored as a double in
-  // FastDoubleElements. If it can, store it at the index specified by key in
-  // the FastDoubleElements array elements, otherwise jump to fail.
-  // Note that key must not be smi-tagged.
+  // FastDoubleElements. If it can, store it at the index specified by index in
+  // the FastDoubleElements array elements, otherwise jump to fail.  Note that
+  // index must not be smi-tagged.
   void StoreNumberToDoubleElements(Register maybe_number,
                                    Register elements,
-                                   Register key,
+                                   Register index,
                                    XMMRegister xmm_scratch,
                                    Label* fail);
 
@@ -1074,7 +1078,8 @@
   // clobbered.
   void TryGetFunctionPrototype(Register function,
                                Register result,
-                               Label* miss);
+                               Label* miss,
+                               bool miss_on_bound_function = false);
 
   // Generates code for reporting that an illegal operation has
   // occurred.
diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc
index 55fabc0..1e0cd6a 100644
--- a/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/x64/regexp-macro-assembler-x64.cc
@@ -1248,6 +1248,11 @@
     frame_entry<const String*>(re_frame, kInputString) = *subject;
     frame_entry<const byte*>(re_frame, kInputStart) = new_address;
     frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
+  } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) {
+    // Subject string might have been a ConsString that underwent
+    // short-circuiting during GC. That will not change start_address but
+    // will change pointer inside the subject handle.
+    frame_entry<const String*>(re_frame, kInputString) = *subject;
   }
 
   return 0;
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index c4b2672..8af1bf2 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -82,7 +82,55 @@
 // must always call a backup property check that is complete.
 // This function is safe to call if the receiver has fast properties.
 // Name must be a symbol and receiver must be a heap object.
-MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
+static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
+                                             Label* miss_label,
+                                             Register receiver,
+                                             Handle<String> name,
+                                             Register r0,
+                                             Register r1) {
+  ASSERT(name->IsSymbol());
+  Counters* counters = masm->isolate()->counters();
+  __ IncrementCounter(counters->negative_lookups(), 1);
+  __ IncrementCounter(counters->negative_lookups_miss(), 1);
+
+  __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
+
+  const int kInterceptorOrAccessCheckNeededMask =
+      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
+
+  // Bail out if the receiver has a named interceptor or requires access checks.
+  __ testb(FieldOperand(r0, Map::kBitFieldOffset),
+           Immediate(kInterceptorOrAccessCheckNeededMask));
+  __ j(not_zero, miss_label);
+
+  // Check that receiver is a JSObject.
+  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
+  __ j(below, miss_label);
+
+  // Load properties array.
+  Register properties = r0;
+  __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
+
+  // Check that the properties array is a dictionary.
+  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
+                 Heap::kHashTableMapRootIndex);
+  __ j(not_equal, miss_label);
+
+  Label done;
+  StringDictionaryLookupStub::GenerateNegativeLookup(masm,
+                                                     miss_label,
+                                                     &done,
+                                                     properties,
+                                                     name,
+                                                     r1);
+  __ bind(&done);
+  __ DecrementCounter(counters->negative_lookups_miss(), 1);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup(
     MacroAssembler* masm,
     Label* miss_label,
     Register receiver,
@@ -118,7 +166,7 @@
   __ j(not_equal, miss_label);
 
   Label done;
-  MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
+  MaybeObject* result = StringDictionaryLookupStub::TryGenerateNegativeLookup(
       masm,
       miss_label,
       &done,
@@ -312,8 +360,10 @@
 // are loaded directly otherwise the property is loaded from the properties
 // fixed array.
 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
-                                            Register dst, Register src,
-                                            JSObject* holder, int index) {
+                                            Register dst,
+                                            Register src,
+                                            Handle<JSObject> holder,
+                                            int index) {
   // Adjust for the number of properties stored in the holder.
   index -= holder->map()->inobject_properties();
   if (index < 0) {
@@ -700,15 +750,10 @@
 
 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
   ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
-  Code* code = NULL;
-  if (kind == Code::LOAD_IC) {
-    code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
-  } else {
-    code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
-  }
-
-  Handle<Code> ic(code);
-  __ Jump(ic, RelocInfo::CODE_TARGET);
+  Handle<Code> code = (kind == Code::LOAD_IC)
+      ? masm->isolate()->builtins()->LoadIC_Miss()
+      : masm->isolate()->builtins()->KeyedLoadIC_Miss();
+  __ Jump(code, RelocInfo::CODE_TARGET);
 }
 
 
@@ -723,9 +768,9 @@
 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
 // but may be destroyed if store is successful.
 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
-                                      JSObject* object,
+                                      Handle<JSObject> object,
                                       int index,
-                                      Map* transition,
+                                      Handle<Map> transition,
                                       Register receiver_reg,
                                       Register name_reg,
                                       Register scratch,
@@ -748,12 +793,12 @@
   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
 
   // Perform map transition for the receiver if necessary.
-  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
+  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
     // The properties must be extended before we can store the value.
     // We jump to a runtime call that extends the properties array.
     __ pop(scratch);  // Return address.
     __ push(receiver_reg);
-    __ Push(Handle<Map>(transition));
+    __ Push(transition);
     __ push(rax);
     __ push(scratch);
     __ TailCallExternalReference(
@@ -764,11 +809,10 @@
     return;
   }
 
-  if (transition != NULL) {
+  if (!transition.is_null()) {
     // Update the map of the object; no write barrier updating is
     // needed because the map is never in new space.
-    __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
-            Handle<Map>(transition));
+    __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset), transition);
   }
 
   // Adjust for the number of properties stored in the object. Even in the
@@ -808,7 +852,24 @@
 // Generate code to check that a global property cell is empty. Create
 // the property cell at compilation time if no cell exists for the
 // property.
-MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
+static void GenerateCheckPropertyCell(MacroAssembler* masm,
+                                      Handle<GlobalObject> global,
+                                      Handle<String> name,
+                                      Register scratch,
+                                      Label* miss) {
+  Handle<JSGlobalPropertyCell> cell =
+      GlobalObject::EnsurePropertyCell(global, name);
+  ASSERT(cell->value()->IsTheHole());
+  __ Move(scratch, cell);
+  __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
+         masm->isolate()->factory()->the_hole_value());
+  __ j(not_equal, miss);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell(
     MacroAssembler* masm,
     GlobalObject* global,
     String* name,
@@ -828,10 +889,172 @@
 }
 
 
+// Calls GenerateCheckPropertyCell for each global object in the prototype chain
+// from object to (but not including) holder.
+static void GenerateCheckPropertyCells(MacroAssembler* masm,
+                                       Handle<JSObject> object,
+                                       Handle<JSObject> holder,
+                                       Handle<String> name,
+                                       Register scratch,
+                                       Label* miss) {
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    if (current->IsGlobalObject()) {
+      GenerateCheckPropertyCell(masm,
+                                Handle<GlobalObject>::cast(current),
+                                name,
+                                scratch,
+                                miss);
+    }
+    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
+  }
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells(
+    MacroAssembler* masm,
+    JSObject* object,
+    JSObject* holder,
+    String* name,
+    Register scratch,
+    Label* miss) {
+  JSObject* current = object;
+  while (current != holder) {
+    if (current->IsGlobalObject()) {
+      // Returns a cell or a failure.
+      MaybeObject* result = TryGenerateCheckPropertyCell(
+          masm,
+          GlobalObject::cast(current),
+          name,
+          scratch,
+          miss);
+      if (result->IsFailure()) return result;
+    }
+    ASSERT(current->IsJSObject());
+    current = JSObject::cast(current->GetPrototype());
+  }
+  return NULL;
+}
+
+
 #undef __
 #define __ ACCESS_MASM((masm()))
 
 
+Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
+                                       Register object_reg,
+                                       Handle<JSObject> holder,
+                                       Register holder_reg,
+                                       Register scratch1,
+                                       Register scratch2,
+                                       Handle<String> name,
+                                       int save_at_depth,
+                                       Label* miss) {
+  // Make sure there's no overlap between holder and object registers.
+  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
+  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
+         && !scratch2.is(scratch1));
+
+  // Keep track of the current object in register reg.  On the first
+  // iteration, reg is an alias for object_reg, on later iterations,
+  // it is an alias for holder_reg.
+  Register reg = object_reg;
+  int depth = 0;
+
+  if (save_at_depth == depth) {
+    __ movq(Operand(rsp, kPointerSize), object_reg);
+  }
+
+  // Check the maps in the prototype chain.
+  // Traverse the prototype chain from the object and do map checks.
+  Handle<JSObject> current = object;
+  while (!current.is_identical_to(holder)) {
+    ++depth;
+
+    // Only global objects and objects that do not require access
+    // checks are allowed in stubs.
+    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
+
+    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
+    if (!current->HasFastProperties() &&
+        !current->IsJSGlobalObject() &&
+        !current->IsJSGlobalProxy()) {
+      if (!name->IsSymbol()) {
+        name = factory()->LookupSymbol(name);
+      }
+      ASSERT(current->property_dictionary()->FindEntry(*name) ==
+             StringDictionary::kNotFound);
+
+      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
+                                       scratch1, scratch2);
+
+      __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+      __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
+    } else {
+      bool in_new_space = heap()->InNewSpace(*prototype);
+      Handle<Map> current_map(current->map());
+      if (in_new_space) {
+        // Save the map in scratch1 for later.
+        __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
+        __ Cmp(scratch1, current_map);
+      } else {
+        __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), current_map);
+      }
+      // Branch on the result of the map check.
+      __ j(not_equal, miss);
+      // Check access rights to the global object.  This has to happen after
+      // the map check so that we know that the object is actually a global
+      // object.
+      if (current->IsJSGlobalProxy()) {
+        __ CheckAccessGlobalProxy(reg, scratch2, miss);
+      }
+      reg = holder_reg;  // From now on the object will be in holder_reg.
+
+      if (in_new_space) {
+        // The prototype is in new space; we cannot store a reference to it
+        // in the code.  Load it from the map.
+        __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
+      } else {
+        // The prototype is in old space; load it directly.
+        __ Move(reg, prototype);
+      }
+    }
+
+    if (save_at_depth == depth) {
+      __ movq(Operand(rsp, kPointerSize), reg);
+    }
+
+    // Go to the next object in the prototype chain.
+    current = prototype;
+  }
+  ASSERT(current.is_identical_to(holder));
+
+  // Log the check depth.
+  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
+
+  // Check the holder map.
+  __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
+  __ j(not_equal, miss);
+
+  // Perform security check for access to the global object.
+  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
+  if (current->IsJSGlobalProxy()) {
+    __ CheckAccessGlobalProxy(reg, scratch1, miss);
+  }
+
+  // If we've skipped any global objects, it's not enough to verify that
+  // their maps haven't changed.  We also need to check that the property
+  // cell for the property is still empty.
+  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
+
+  // Return the register containing the holder.
+  return reg;
+}
+
+
 Register StubCompiler::CheckPrototypes(JSObject* object,
                                        Register object_reg,
                                        JSObject* holder,
@@ -882,12 +1105,13 @@
       ASSERT(current->property_dictionary()->FindEntry(name) ==
              StringDictionary::kNotFound);
 
-      MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
-                                                                      miss,
-                                                                      reg,
-                                                                      name,
-                                                                      scratch1,
-                                                                      scratch2);
+      MaybeObject* negative_lookup =
+          TryGenerateDictionaryNegativeLookup(masm(),
+                                              miss,
+                                              reg,
+                                              name,
+                                              scratch1,
+                                              scratch2);
       if (negative_lookup->IsFailure()) {
         set_failure(Failure::cast(negative_lookup));
         return reg;
@@ -960,43 +1184,34 @@
   // If we've skipped any global objects, it's not enough to verify
   // that their maps haven't changed.  We also need to check that the
   // property cell for the property is still empty.
-  current = object;
-  while (current != holder) {
-    if (current->IsGlobalObject()) {
-      MaybeObject* cell = GenerateCheckPropertyCell(masm(),
-                                                    GlobalObject::cast(current),
-                                                    name,
-                                                    scratch1,
-                                                    miss);
-      if (cell->IsFailure()) {
-        set_failure(Failure::cast(cell));
-        return reg;
-      }
-    }
-    current = JSObject::cast(current->GetPrototype());
-  }
+  MaybeObject* result = TryGenerateCheckPropertyCells(masm(),
+                                                      object,
+                                                      holder,
+                                                      name,
+                                                      scratch1,
+                                                      miss);
+  if (result->IsFailure()) set_failure(Failure::cast(result));
 
   // Return the register containing the holder.
   return reg;
 }
 
 
-void StubCompiler::GenerateLoadField(JSObject* object,
-                                     JSObject* holder,
+void StubCompiler::GenerateLoadField(Handle<JSObject> object,
+                                     Handle<JSObject> holder,
                                      Register receiver,
                                      Register scratch1,
                                      Register scratch2,
                                      Register scratch3,
                                      int index,
-                                     String* name,
+                                     Handle<String> name,
                                      Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss);
 
   // Check the prototype chain.
-  Register reg =
-      CheckPrototypes(object, receiver, holder,
-                      scratch1, scratch2, scratch3, name, miss);
+  Register reg = CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
 
   // Get the value from the properties.
   GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
@@ -1081,24 +1296,24 @@
 }
 
 
-void StubCompiler::GenerateLoadConstant(JSObject* object,
-                                        JSObject* holder,
+void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
+                                        Handle<JSObject> holder,
                                         Register receiver,
                                         Register scratch1,
                                         Register scratch2,
                                         Register scratch3,
-                                        Object* value,
-                                        String* name,
+                                        Handle<Object> value,
+                                        Handle<String> name,
                                         Label* miss) {
   // Check that the receiver isn't a smi.
   __ JumpIfSmi(receiver, miss);
 
   // Check that the maps haven't changed.
-  CheckPrototypes(object, receiver, holder,
-                  scratch1, scratch2, scratch3, name, miss);
+  CheckPrototypes(
+      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
 
   // Return the constant value.
-  __ Move(rax, Handle<Object>(value));
+  __ Move(rax, value);
   __ ret(0);
 }
 
@@ -1198,7 +1413,8 @@
       // We found FIELD property in prototype chain of interceptor's holder.
       // Retrieve a field from field's holder.
       GenerateFastPropertyLoad(masm(), rax, holder_reg,
-                               lookup->holder(), lookup->GetFieldIndex());
+                               Handle<JSObject>(lookup->holder()),
+                               lookup->GetFieldIndex());
       __ ret(0);
     } else {
       // We found CALLBACKS property in prototype chain of interceptor's
@@ -1244,9 +1460,9 @@
 }
 
 
-void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
+void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
   if (kind_ == Code::KEYED_CALL_IC) {
-    __ Cmp(rcx, Handle<String>(name));
+    __ Cmp(rcx, name);
     __ j(not_equal, miss);
   }
 }
@@ -1305,11 +1521,22 @@
 }
 
 
-MaybeObject* CallStubCompiler::GenerateMissBranch() {
-  MaybeObject* maybe_obj =
+void CallStubCompiler::GenerateMissBranch() {
+  Handle<Code> code =
       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
                                                kind_,
-                                               extra_ic_state_);
+                                               extra_state_);
+  __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+
+// TODO(kmillikin): Eliminate this function when the stub cache is fully
+// handlified.
+MaybeObject* CallStubCompiler::TryGenerateMissBranch() {
+  MaybeObject* maybe_obj =
+      isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(),
+                                                  kind_,
+                                                  extra_state_);
   Object* obj;
   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
@@ -1317,10 +1544,10 @@
 }
 
 
-MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   // rcx                 : function name
   // rsp[0]              : return address
@@ -1360,7 +1587,7 @@
   }
 
   // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
@@ -1368,8 +1595,7 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
-  if (maybe_result->IsFailure()) return maybe_result;
+  GenerateMissBranch();
 
   // Return the generated code.
   return GetCode(FIELD, name);
@@ -1394,7 +1620,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -1488,8 +1714,8 @@
       // the new element is non-Smi. For now, delegate to the builtin.
       Label no_fast_elements_check;
       __ JumpIfSmi(rdi, &no_fast_elements_check);
-      __ movq(rsi, FieldOperand(rdx, HeapObject::kMapOffset));
-      __ CheckFastObjectElements(rsi, &call_builtin, Label::kFar);
+      __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
+      __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
       __ bind(&no_fast_elements_check);
 
       ExternalReference new_space_allocation_top =
@@ -1553,11 +1779,11 @@
   }
 
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1579,7 +1805,7 @@
 
   Label miss, return_undefined, call_builtin;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -1636,11 +1862,11 @@
       1);
 
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1669,12 +1895,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1720,11 +1946,11 @@
   // Restore function name in rcx.
   __ Move(rcx, Handle<String>(name));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1753,12 +1979,12 @@
   Label* index_out_of_range_label = &index_out_of_range;
 
   if (kind_ == Code::CALL_IC &&
-      (CallICBase::StringStubState::decode(extra_ic_state_) ==
+      (CallICBase::StringStubState::decode(extra_state_) ==
        DEFAULT_STRING_STUB)) {
     index_out_of_range_label = &miss;
   }
 
-  GenerateNameCheck(name, &name_miss);
+  GenerateNameCheck(Handle<String>(name), &name_miss);
 
   // Check that the maps starting from the prototype haven't changed.
   GenerateDirectLoadGlobalFunctionPrototype(masm(),
@@ -1806,11 +2032,11 @@
   // Restore function name in rcx.
   __ Move(rcx, Handle<String>(name));
   __ bind(&name_miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -1835,7 +2061,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ movq(rdx, Operand(rsp, 2 * kPointerSize));
@@ -1871,7 +2097,7 @@
   // Tail call the full function. We do not have to patch the receiver
   // because the function makes no use of it.
   __ bind(&slow);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
@@ -1879,11 +2105,11 @@
 
   __ bind(&miss);
   // rcx: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -1917,7 +2143,7 @@
   if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
 
   Label miss;
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   if (cell == NULL) {
     __ movq(rdx, Operand(rsp, 2 * kPointerSize));
@@ -1988,7 +2214,7 @@
   // Tail call the full function. We do not have to patch the receiver
   // because the function makes no use of it.
   __ bind(&slow);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
@@ -1996,11 +2222,11 @@
 
   __ bind(&miss);
   // rcx: function name.
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
+  return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name);
 }
 
 
@@ -2023,7 +2249,7 @@
 
   Label miss, miss_before_stack_reserved;
 
-  GenerateNameCheck(name, &miss_before_stack_reserved);
+  GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2055,11 +2281,11 @@
   __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
 
   __ bind(&miss_before_stack_reserved);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2089,7 +2315,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the receiver from the stack.
   const int argc = arguments().immediate();
@@ -2186,7 +2412,7 @@
       UNREACHABLE();
   }
 
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(function, arguments(), JUMP_FUNCTION,
@@ -2194,11 +2420,11 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(function);
+  return TryGetCode(function);
 }
 
 
@@ -2216,18 +2442,18 @@
   // -----------------------------------
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
   // Get the receiver from the stack.
   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
 
-  CallInterceptorCompiler compiler(this, arguments(), rcx, extra_ic_state_);
+  CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_);
   MaybeObject* result = compiler.Compile(masm(),
                                          object,
                                          holder,
@@ -2257,7 +2483,7 @@
 
   // Invoke the function.
   __ movq(rdi, rax);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
   __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
@@ -2265,11 +2491,11 @@
 
   // Handle load cache miss.
   __ bind(&miss);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
@@ -2299,7 +2525,7 @@
 
   Label miss;
 
-  GenerateNameCheck(name, &miss);
+  GenerateNameCheck(Handle<String>(name), &miss);
 
   // Get the number of arguments.
   const int argc = arguments().immediate();
@@ -2320,39 +2546,32 @@
   // Jump to the cached code (tail call).
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1);
-  ASSERT(function->is_compiled());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
+  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
       ? CALL_AS_FUNCTION
       : CALL_AS_METHOD;
-  if (V8::UseCrankshaft()) {
-    // TODO(kasperl): For now, we always call indirectly through the
-    // code field in the function to allow recompilation to take effect
-    // without changing any of the call sites.
-    __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
-    __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION,
-                  NullCallWrapper(), call_kind);
-  } else {
-    Handle<Code> code(function->code());
-    __ InvokeCode(code, expected, arguments(),
-                  RelocInfo::CODE_TARGET, JUMP_FUNCTION,
-                  NullCallWrapper(), call_kind);
-  }
+  // We call indirectly through the code field in the function to
+  // allow recompilation to take effect without changing any of the
+  // call sites.
+  __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+  __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION,
+                NullCallWrapper(), call_kind);
+
   // Handle call cache miss.
   __ bind(&miss);
   __ IncrementCounter(counters->call_global_inline_miss(), 1);
-  MaybeObject* maybe_result = GenerateMissBranch();
+  MaybeObject* maybe_result = TryGenerateMissBranch();
   if (maybe_result->IsFailure()) return maybe_result;
 
   // Return the generated code.
-  return GetCode(NORMAL, name);
+  return TryGetCode(NORMAL, name);
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
+Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
                                                   int index,
-                                                  Map* transition,
-                                                  String* name) {
+                                                  Handle<Map> transition,
+                                                  Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : value
   //  -- rcx    : name
@@ -2362,12 +2581,7 @@
   Label miss;
 
   // Generate store field code.  Preserves receiver and name on jump to miss.
-  GenerateStoreField(masm(),
-                     object,
-                     index,
-                     transition,
-                     rdx, rcx, rbx,
-                     &miss);
+  GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss);
 
   // Handle store cache miss.
   __ bind(&miss);
@@ -2375,13 +2589,14 @@
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
-                                                     AccessorInfo* callback,
-                                                     String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreCallback(
+    Handle<JSObject> object,
+    Handle<AccessorInfo> callback,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : value
   //  -- rcx    : name
@@ -2409,7 +2624,7 @@
 
   __ pop(rbx);  // remove the return address
   __ push(rdx);  // receiver
-  __ Push(Handle<AccessorInfo>(callback));  // callback info
+  __ Push(callback);  // callback info
   __ push(rcx);  // name
   __ push(rax);  // value
   __ push(rbx);  // restore return address
@@ -2429,8 +2644,9 @@
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
-                                                        String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
+    Handle<JSObject> receiver,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : value
   //  -- rcx    : name
@@ -2478,9 +2694,10 @@
 }
 
 
-MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
-                                                   JSGlobalPropertyCell* cell,
-                                                   String* name) {
+Handle<Code> StoreStubCompiler::CompileStoreGlobal(
+    Handle<GlobalObject> object,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : value
   //  -- rcx    : name
@@ -2495,7 +2712,7 @@
   __ j(not_equal, &miss);
 
   // Compute the cell operand to use.
-  __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
+  __ Move(rbx, cell);
   Operand cell_operand = FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset);
 
   // Check that the value in the cell is not the hole. If it is, this
@@ -2539,10 +2756,10 @@
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
+Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
                                                        int index,
-                                                       Map* transition,
-                                                       String* name) {
+                                                       Handle<Map> transition,
+                                                       Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax     : value
   //  -- rcx     : key
@@ -2555,16 +2772,11 @@
   __ IncrementCounter(counters->keyed_store_field(), 1);
 
   // Check that the name has not changed.
-  __ Cmp(rcx, Handle<String>(name));
+  __ Cmp(rcx, name);
   __ j(not_equal, &miss);
 
   // Generate store field code.  Preserves receiver and name on jump to miss.
-  GenerateStoreField(masm(),
-                     object,
-                     index,
-                     transition,
-                     rdx, rcx, rbx,
-                     &miss);
+  GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss);
 
   // Handle store cache miss.
   __ bind(&miss);
@@ -2573,40 +2785,38 @@
   __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
+Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
+    Handle<Map> receiver_map) {
   // ----------- S t a t e -------------
   //  -- rax    : value
   //  -- rcx    : key
   //  -- rdx    : receiver
   //  -- rsp[0] : return address
   // -----------------------------------
-  Code* stub;
+
   ElementsKind elements_kind = receiver_map->elements_kind();
   bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
-  MaybeObject* maybe_stub =
-      KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  __ DispatchMap(rdx,
-                 Handle<Map>(receiver_map),
-                 Handle<Code>(stub),
-                 DO_SMI_CHECK);
+  Handle<Code> stub =
+      KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
+
+  __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
 
   Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, factory()->empty_string());
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic(
-    MapList* receiver_maps,
-    CodeList* handler_stubs,
-    MapList* transitioned_maps) {
+Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
+    MapHandleList* receiver_maps,
+    CodeHandleList* handler_stubs,
+    MapHandleList* transitioned_maps) {
   // ----------- S t a t e -------------
   //  -- rax    : value
   //  -- rcx    : key
@@ -2620,17 +2830,14 @@
   int receiver_count = receiver_maps->length();
   for (int i = 0; i < receiver_count; ++i) {
     // Check map and tail call if there's a match
-    Handle<Map> map(receiver_maps->at(i));
-    __ Cmp(rdi, map);
-    if (transitioned_maps->at(i) == NULL) {
-      __ j(equal, Handle<Code>(handler_stubs->at(i)), RelocInfo::CODE_TARGET);
+    __ Cmp(rdi, receiver_maps->at(i));
+    if (transitioned_maps->at(i).is_null()) {
+      __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
     } else {
       Label next_map;
       __ j(not_equal, &next_map, Label::kNear);
-      __ movq(rbx,
-              Handle<Map>(transitioned_maps->at(i)),
-              RelocInfo::EMBEDDED_OBJECT);
-      __ jmp(Handle<Code>(handler_stubs->at(i)), RelocInfo::CODE_TARGET);
+      __ movq(rbx, transitioned_maps->at(i), RelocInfo::EMBEDDED_OBJECT);
+      __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
       __ bind(&next_map);
     }
   }
@@ -2640,13 +2847,13 @@
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
-                                                      JSObject* object,
-                                                      JSObject* last) {
+Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
+                                                      Handle<JSObject> object,
+                                                      Handle<JSObject> last) {
   // ----------- S t a t e -------------
   //  -- rax    : receiver
   //  -- rcx    : name
@@ -2665,15 +2872,8 @@
   // If the last object in the prototype chain is a global object,
   // check that the global property cell is empty.
   if (last->IsGlobalObject()) {
-    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
-                                                  GlobalObject::cast(last),
-                                                  name,
-                                                  rdx,
-                                                  &miss);
-    if (cell->IsFailure()) {
-      miss.Unuse();
-      return cell;
-    }
+    GenerateCheckPropertyCell(
+        masm(), Handle<GlobalObject>::cast(last), name, rdx, &miss);
   }
 
   // Return undefined if maps of the full prototype chain are still the
@@ -2685,14 +2885,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NONEXISTENT, heap()->empty_string());
+  return GetCode(NONEXISTENT, factory()->empty_string());
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
-                                                JSObject* holder,
+Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
+                                                Handle<JSObject> holder,
                                                 int index,
-                                                String* name) {
+                                                Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : receiver
   //  -- rcx    : name
@@ -2731,14 +2931,14 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
-                                                   JSObject* holder,
-                                                   Object* value,
-                                                   String* name) {
+Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
+                                                   Handle<JSObject> holder,
+                                                   Handle<Object> value,
+                                                   Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : receiver
   //  -- rcx    : name
@@ -2765,7 +2965,7 @@
   // -----------------------------------
   Label miss;
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
   // TODO(368): Compile in the whole chain: all the interceptors in
@@ -2785,15 +2985,16 @@
   GenerateLoadMiss(masm(), Code::LOAD_IC);
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
-                                                 GlobalObject* holder,
-                                                 JSGlobalPropertyCell* cell,
-                                                 String* name,
-                                                 bool is_dont_delete) {
+Handle<Code> LoadStubCompiler::CompileLoadGlobal(
+    Handle<JSObject> object,
+    Handle<GlobalObject> holder,
+    Handle<JSGlobalPropertyCell> cell,
+    Handle<String> name,
+    bool is_dont_delete) {
   // ----------- S t a t e -------------
   //  -- rax    : receiver
   //  -- rcx    : name
@@ -2804,7 +3005,7 @@
   // If the object is the holder then we know that it's a global
   // object which can only happen for contextual loads. In this case,
   // the receiver cannot be a smi.
-  if (object != holder) {
+  if (!object.is_identical_to(holder)) {
     __ JumpIfSmi(rax, &miss);
   }
 
@@ -2812,7 +3013,7 @@
   CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
 
   // Get the value from the cell.
-  __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
+  __ Move(rbx, cell);
   __ movq(rbx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
 
   // Check for deleted property if property can actually be deleted.
@@ -2838,9 +3039,9 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
-                                                     JSObject* receiver,
-                                                     JSObject* holder,
+Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
+                                                     Handle<JSObject> receiver,
+                                                     Handle<JSObject> holder,
                                                      int index) {
   // ----------- S t a t e -------------
   //  -- rax     : key
@@ -2853,7 +3054,7 @@
   __ IncrementCounter(counters->keyed_load_field(), 1);
 
   // Check that the name has not changed.
-  __ Cmp(rax, Handle<String>(name));
+  __ Cmp(rax, name);
   __ j(not_equal, &miss);
 
   GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
@@ -2899,14 +3100,15 @@
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(CALLBACKS, name);
+  return TryGetCode(CALLBACKS, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
-                                                        JSObject* receiver,
-                                                        JSObject* holder,
-                                                        Object* value) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
+    Handle<String> name,
+    Handle<JSObject> receiver,
+    Handle<JSObject> holder,
+    Handle<Object> value) {
   // ----------- S t a t e -------------
   //  -- rax    : key
   //  -- rdx    : receiver
@@ -2949,7 +3151,7 @@
   __ Cmp(rax, Handle<String>(name));
   __ j(not_equal, &miss);
 
-  LookupResult lookup;
+  LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
   GenerateLoadInterceptor(receiver,
                           holder,
@@ -2966,11 +3168,12 @@
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(INTERCEPTOR, name);
+  return TryGetCode(INTERCEPTOR, name);
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : key
   //  -- rdx    : receiver
@@ -2982,7 +3185,7 @@
   __ IncrementCounter(counters->keyed_load_array_length(), 1);
 
   // Check that the name has not changed.
-  __ Cmp(rax, Handle<String>(name));
+  __ Cmp(rax, name);
   __ j(not_equal, &miss);
 
   GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
@@ -2995,7 +3198,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : key
   //  -- rdx    : receiver
@@ -3007,7 +3211,7 @@
   __ IncrementCounter(counters->keyed_load_string_length(), 1);
 
   // Check that the name has not changed.
-  __ Cmp(rax, Handle<String>(name));
+  __ Cmp(rax, name);
   __ j(not_equal, &miss);
 
   GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
@@ -3020,7 +3224,8 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
+    Handle<String> name) {
   // ----------- S t a t e -------------
   //  -- rax    : key
   //  -- rdx    : receiver
@@ -3032,7 +3237,7 @@
   __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
 
   // Check that the name has not changed.
-  __ Cmp(rax, Handle<String>(name));
+  __ Cmp(rax, name);
   __ j(not_equal, &miss);
 
   GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
@@ -3045,32 +3250,29 @@
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
+    Handle<Map> receiver_map) {
   // ----------- S t a t e -------------
   //  -- rax    : key
   //  -- rdx    : receiver
   //  -- rsp[0] : return address
   // -----------------------------------
-  Code* stub;
   ElementsKind elements_kind = receiver_map->elements_kind();
-  MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
-  if (!maybe_stub->To(&stub)) return maybe_stub;
-  __ DispatchMap(rdx,
-                 Handle<Map>(receiver_map),
-                 Handle<Code>(stub),
-                 DO_SMI_CHECK);
+  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
+
+  __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
 
   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
   __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, factory()->empty_string());
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic(
-    MapList* receiver_maps,
-    CodeList* handler_ics) {
+Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
+    MapHandleList* receiver_maps,
+    CodeHandleList* handler_ics) {
   // ----------- S t a t e -------------
   //  -- rax    : key
   //  -- rdx    : receiver
@@ -3084,18 +3286,15 @@
   int receiver_count = receiver_maps->length();
   for (int current = 0; current < receiver_count; ++current) {
     // Check map and tail call if there's a match
-    Handle<Map> map(receiver_maps->at(current));
-    __ Cmp(map_reg, map);
-    __ j(equal,
-         Handle<Code>(handler_ics->at(current)),
-         RelocInfo::CODE_TARGET);
+    __ Cmp(map_reg, receiver_maps->at(current));
+    __ j(equal, handler_ics->at(current), RelocInfo::CODE_TARGET);
   }
 
   __  bind(&miss);
   GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL, MEGAMORPHIC);
+  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
 }
 
 
diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status
index 759f69f..7161345 100644
--- a/test/cctest/cctest.status
+++ b/test/cctest/cctest.status
@@ -52,9 +52,6 @@
 # We do not yet shrink weak maps after they have been emptied by the GC
 test-weakmaps/Shrinking: FAIL
 
-# NewGC: BUG(1717)
-test-api/OutOfMemoryNested: PASS || TIMEOUT
-
 ##############################################################################
 [ $arch == arm ]
 
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 167c4cd..5081a64 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -5438,67 +5438,109 @@
 
 
 THREADED_TEST(StringWrite) {
+  LocalContext context;
   v8::HandleScope scope;
   v8::Handle<String> str = v8_str("abcde");
   // abc<Icelandic eth><Unicode snowman>.
   v8::Handle<String> str2 = v8_str("abc\303\260\342\230\203");
+  const int kStride = 4;  // Must match stride in for loops in JS below.
+  CompileRun(
+      "var left = '';"
+      "for (var i = 0; i < 0xd800; i += 4) {"
+      "  left = left + String.fromCharCode(i);"
+      "}");
+  CompileRun(
+      "var right = '';"
+      "for (var i = 0; i < 0xd800; i += 4) {"
+      "  right = String.fromCharCode(i) + right;"
+      "}");
+  v8::Handle<v8::Object> global = Context::GetCurrent()->Global();
+  Handle<String> left_tree = global->Get(v8_str("left")).As<String>();
+  Handle<String> right_tree = global->Get(v8_str("right")).As<String>();
 
   CHECK_EQ(5, str2->Length());
+  CHECK_EQ(0xd800 / kStride, left_tree->Length());
+  CHECK_EQ(0xd800 / kStride, right_tree->Length());
 
   char buf[100];
-  char utf8buf[100];
+  char utf8buf[0xd800 * 3];
   uint16_t wbuf[100];
   int len;
   int charlen;
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, sizeof(utf8buf), &charlen);
   CHECK_EQ(9, len);
   CHECK_EQ(5, charlen);
   CHECK_EQ(0, strcmp(utf8buf, "abc\303\260\342\230\203"));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 8, &charlen);
   CHECK_EQ(8, len);
   CHECK_EQ(5, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\342\230\203\1", 9));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 7, &charlen);
   CHECK_EQ(5, len);
   CHECK_EQ(4, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\1", 5));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 6, &charlen);
   CHECK_EQ(5, len);
   CHECK_EQ(4, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\1", 5));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 5, &charlen);
   CHECK_EQ(5, len);
   CHECK_EQ(4, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\1", 5));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 4, &charlen);
   CHECK_EQ(3, len);
   CHECK_EQ(3, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "abc\1", 4));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 3, &charlen);
   CHECK_EQ(3, len);
   CHECK_EQ(3, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "abc\1", 4));
 
-  memset(utf8buf, 0x1, sizeof(utf8buf));
+  memset(utf8buf, 0x1, 1000);
   len = str2->WriteUtf8(utf8buf, 2, &charlen);
   CHECK_EQ(2, len);
   CHECK_EQ(2, charlen);
   CHECK_EQ(0, strncmp(utf8buf, "ab\1", 3));
 
+  memset(utf8buf, 0x1, sizeof(utf8buf));
+  len = left_tree->Utf8Length();
+  int utf8_expected =
+      (0x80 + (0x800 - 0x80) * 2 + (0xd800 - 0x800) * 3) / kStride;
+  CHECK_EQ(utf8_expected, len);
+  len = left_tree->WriteUtf8(utf8buf, utf8_expected, &charlen);
+  CHECK_EQ(utf8_expected, len);
+  CHECK_EQ(0xd800 / kStride, charlen);
+  CHECK_EQ(0xed, static_cast<unsigned char>(utf8buf[utf8_expected - 3]));
+  CHECK_EQ(0x9f, static_cast<unsigned char>(utf8buf[utf8_expected - 2]));
+  CHECK_EQ(0xc0 - kStride,
+           static_cast<unsigned char>(utf8buf[utf8_expected - 1]));
+  CHECK_EQ(1, utf8buf[utf8_expected]);
+
+  memset(utf8buf, 0x1, sizeof(utf8buf));
+  len = right_tree->Utf8Length();
+  CHECK_EQ(utf8_expected, len);
+  len = right_tree->WriteUtf8(utf8buf, utf8_expected, &charlen);
+  CHECK_EQ(utf8_expected, len);
+  CHECK_EQ(0xd800 / kStride, charlen);
+  CHECK_EQ(0xed, static_cast<unsigned char>(utf8buf[0]));
+  CHECK_EQ(0x9f, static_cast<unsigned char>(utf8buf[1]));
+  CHECK_EQ(0xc0 - kStride, static_cast<unsigned char>(utf8buf[2]));
+  CHECK_EQ(1, utf8buf[utf8_expected]);
+
   memset(buf, 0x1, sizeof(buf));
   memset(wbuf, 0x1, sizeof(wbuf));
   len = str->WriteAscii(buf);
@@ -11440,6 +11482,7 @@
 // Test that we can still flatten a string if the components it is built up
 // from have been turned into 16 bit strings in the mean time.
 THREADED_TEST(MorphCompositeStringTest) {
+  char utf_buffer[129];
   const char* c_string = "Now is the time for all good men"
                          " to come to the aid of the party";
   uint16_t* two_byte_string = AsciiToTwoByteString(c_string);
@@ -11468,6 +11511,17 @@
     MorphAString(*v8::Utils::OpenHandle(*lhs), &ascii_resource, &uc16_resource);
     MorphAString(*v8::Utils::OpenHandle(*rhs), &ascii_resource, &uc16_resource);
 
+    // This should UTF-8 without flattening, since everything is ASCII.
+    Handle<String> cons = v8_compile("cons")->Run().As<String>();
+    CHECK_EQ(128, cons->Utf8Length());
+    int nchars = -1;
+    CHECK_EQ(129, cons->WriteUtf8(utf_buffer, -1, &nchars));
+    CHECK_EQ(128, nchars);
+    CHECK_EQ(0, strcmp(
+        utf_buffer,
+        "Now is the time for all good men to come to the aid of the party"
+        "Now is the time for all good men to come to the aid of the party"));
+
     // Now do some stuff to make sure the strings are flattened, etc.
     CompileRun(
         "/[^a-z]/.test(cons);"
diff --git a/test/cctest/test-debug.cc b/test/cctest/test-debug.cc
index de60d49..cf723ba 100644
--- a/test/cctest/test-debug.cc
+++ b/test/cctest/test-debug.cc
@@ -409,11 +409,8 @@
 
 
 static Handle<Code> ComputeCallDebugBreak(int argc) {
-  CALL_HEAP_FUNCTION(
-      v8::internal::Isolate::Current(),
-      v8::internal::Isolate::Current()->stub_cache()->ComputeCallDebugBreak(
-          argc, Code::CALL_IC),
-      Code);
+  return Isolate::Current()->stub_cache()->ComputeCallDebugBreak(argc,
+                                                                 Code::CALL_IC);
 }
 
 
@@ -425,8 +422,8 @@
   CHECK_EQ(NULL, Isolate::Current()->debug()->debug_info_list_);
 
   // Collect garbage to ensure weak handles are cleared.
-  HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
-  HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
 
   // Iterate the head and check that there are no debugger related objects left.
   HeapIterator iterator;
diff --git a/test/cctest/test-dictionary.cc b/test/cctest/test-dictionary.cc
index 15a854b..793e228 100644
--- a/test/cctest/test-dictionary.cc
+++ b/test/cctest/test-dictionary.cc
@@ -38,6 +38,7 @@
 
 using namespace v8::internal;
 
+
 TEST(ObjectHashTable) {
   v8::HandleScope scope;
   LocalContext context;
@@ -66,7 +67,8 @@
   CHECK_EQ(table->NumberOfDeletedElements(), 1);
   CHECK_EQ(table->Lookup(*a), HEAP->undefined_value());
 
-  // Keys should map back to their respective values.
+  // Keys should map back to their respective values and also should get
+  // an identity hash code generated.
   for (int i = 0; i < 100; i++) {
     Handle<JSObject> key = FACTORY->NewJSArray(7);
     Handle<JSObject> value = FACTORY->NewJSArray(11);
@@ -74,12 +76,67 @@
     CHECK_EQ(table->NumberOfElements(), i + 1);
     CHECK_NE(table->FindEntry(*key), ObjectHashTable::kNotFound);
     CHECK_EQ(table->Lookup(*key), *value);
+    CHECK(key->GetIdentityHash(OMIT_CREATION)->ToObjectChecked()->IsSmi());
   }
 
-  // Keys never added to the map should not be found.
-  for (int i = 0; i < 1000; i++) {
-    Handle<JSObject> o = FACTORY->NewJSArray(100);
-    CHECK_EQ(table->FindEntry(*o), ObjectHashTable::kNotFound);
-    CHECK_EQ(table->Lookup(*o), HEAP->undefined_value());
+  // Keys never added to the map which already have an identity hash
+  // code should not be found.
+  for (int i = 0; i < 100; i++) {
+    Handle<JSObject> key = FACTORY->NewJSArray(7);
+    CHECK(key->GetIdentityHash(ALLOW_CREATION)->ToObjectChecked()->IsSmi());
+    CHECK_EQ(table->FindEntry(*key), ObjectHashTable::kNotFound);
+    CHECK_EQ(table->Lookup(*key), HEAP->undefined_value());
+    CHECK(key->GetIdentityHash(OMIT_CREATION)->ToObjectChecked()->IsSmi());
+  }
+
+  // Keys that don't have an identity hash should not be found and also
+  // should not get an identity hash code generated.
+  for (int i = 0; i < 100; i++) {
+    Handle<JSObject> key = FACTORY->NewJSArray(7);
+    CHECK_EQ(table->Lookup(*key), HEAP->undefined_value());
+    CHECK_EQ(key->GetIdentityHash(OMIT_CREATION), HEAP->undefined_value());
   }
 }
+
+
+#ifdef DEBUG
+TEST(ObjectHashSetCausesGC) {
+  v8::HandleScope scope;
+  LocalContext context;
+  Handle<ObjectHashSet> table = FACTORY->NewObjectHashSet(1);
+  Handle<JSObject> key = FACTORY->NewJSArray(0);
+
+  // Simulate a full heap so that generating an identity hash code
+  // in subsequent calls will request GC.
+  FLAG_gc_interval = 0;
+
+  // Calling Contains() should not cause GC ever.
+  CHECK(!table->Contains(*key));
+
+  // Calling Remove() should not cause GC ever.
+  CHECK(!table->Remove(*key)->IsFailure());
+
+  // Calling Add() should request GC by returning a failure.
+  CHECK(table->Add(*key)->IsRetryAfterGC());
+}
+#endif
+
+
+#ifdef DEBUG
+TEST(ObjectHashTableCausesGC) {
+  v8::HandleScope scope;
+  LocalContext context;
+  Handle<ObjectHashTable> table = FACTORY->NewObjectHashTable(1);
+  Handle<JSObject> key = FACTORY->NewJSArray(0);
+
+  // Simulate a full heap so that generating an identity hash code
+  // in subsequent calls will request GC.
+  FLAG_gc_interval = 0;
+
+  // Calling Lookup() should not cause GC ever.
+  CHECK(table->Lookup(*key)->IsUndefined());
+
+  // Calling Put() should request GC by returning a failure.
+  CHECK(table->Put(*key, *key)->IsRetryAfterGC());
+}
+#endif
diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc
index d695d74..87e7a7d 100644
--- a/test/cctest/test-heap-profiler.cc
+++ b/test/cctest/test-heap-profiler.cc
@@ -252,6 +252,28 @@
   CHECK_EQ(v8::HeapGraphNode::kHeapNumber, b->GetType());
 }
 
+TEST(HeapSnapshotSlicedString) {
+  v8::HandleScope scope;
+  LocalContext env;
+  CompileRun(
+      "parent_string = \"123456789.123456789.123456789.123456789.123456789."
+      "123456789.123456789.123456789.123456789.123456789."
+      "123456789.123456789.123456789.123456789.123456789."
+      "123456789.123456789.123456789.123456789.123456789.\";"
+      "child_string = parent_string.slice(100);");
+  const v8::HeapSnapshot* snapshot =
+      v8::HeapProfiler::TakeSnapshot(v8_str("strings"));
+  const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+  const v8::HeapGraphNode* parent_string =
+      GetProperty(global, v8::HeapGraphEdge::kShortcut, "parent_string");
+  CHECK_NE(NULL, parent_string);
+  const v8::HeapGraphNode* child_string =
+      GetProperty(global, v8::HeapGraphEdge::kShortcut, "child_string");
+  CHECK_NE(NULL, child_string);
+  const v8::HeapGraphNode* parent =
+      GetProperty(child_string, v8::HeapGraphEdge::kInternal, "parent");
+  CHECK_EQ(parent_string, parent);
+}
 
 TEST(HeapSnapshotInternalReferences) {
   v8::HandleScope scope;
diff --git a/test/cctest/test-parsing.cc b/test/cctest/test-parsing.cc
index 8f217e6..8cfd5f7 100755
--- a/test/cctest/test-parsing.cc
+++ b/test/cctest/test-parsing.cc
@@ -260,10 +260,11 @@
     i::JavaScriptScanner scanner(i::Isolate::Current()->unicode_cache());
     scanner.Initialize(&stream);
 
+    int flags = i::kAllowLazy | i::kAllowNativesSyntax;
     v8::preparser::PreParser::PreParseResult result =
         v8::preparser::PreParser::PreParseProgram(&scanner,
                                                   &log,
-                                                  true,
+                                                  flags,
                                                   stack_limit);
     CHECK_EQ(v8::preparser::PreParser::kPreParseSuccess, result);
     i::ScriptDataImpl data(log.ExtractData());
@@ -272,6 +273,43 @@
 }
 
 
+TEST(StandAlonePreParserNoNatives) {
+  v8::V8::Initialize();
+
+  int marker;
+  i::Isolate::Current()->stack_guard()->SetStackLimit(
+      reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
+
+  const char* programs[] = {
+      "%ArgleBargle(glop);",
+      "var x = %_IsSmi(42);",
+      NULL
+  };
+
+  uintptr_t stack_limit = i::Isolate::Current()->stack_guard()->real_climit();
+  for (int i = 0; programs[i]; i++) {
+    const char* program = programs[i];
+    i::Utf8ToUC16CharacterStream stream(
+        reinterpret_cast<const i::byte*>(program),
+        static_cast<unsigned>(strlen(program)));
+    i::CompleteParserRecorder log;
+    i::JavaScriptScanner scanner(i::Isolate::Current()->unicode_cache());
+    scanner.Initialize(&stream);
+
+    // Flags don't allow natives syntax.
+    v8::preparser::PreParser::PreParseResult result =
+        v8::preparser::PreParser::PreParseProgram(&scanner,
+                                                  &log,
+                                                  i::kAllowLazy,
+                                                  stack_limit);
+    CHECK_EQ(v8::preparser::PreParser::kPreParseSuccess, result);
+    i::ScriptDataImpl data(log.ExtractData());
+    // Data contains syntax error.
+    CHECK(data.has_error());
+  }
+}
+
+
 TEST(RegressChromium62639) {
   v8::V8::Initialize();
 
@@ -706,3 +744,135 @@
   TestScanRegExp("/=/", "=");
   TestScanRegExp("/=?/", "=?");
 }
+
+
+TEST(ScopePositions) {
+  // Test the parser for correctly setting the start and end positions
+  // of a scope. We check the scope positions of exactly one scope
+  // nested in the global scope of a program. 'inner source' is the
+  // source code that determines the part of the source belonging
+  // to the nested scope. 'outer_prefix' and 'outer_suffix' are
+  // parts of the source that belong to the global scope.
+  struct SourceData {
+    const char* outer_prefix;
+    const char* inner_source;
+    const char* outer_suffix;
+    i::ScopeType scope_type;
+  };
+
+  const SourceData source_data[] = {
+    { "  with ({}) ", "{ block; }", " more;", i::WITH_SCOPE },
+    { "  with ({}) ", "{ block; }", "; more;", i::WITH_SCOPE },
+    { "  with ({}) ", "{\n"
+      "    block;\n"
+      "  }", "\n"
+      "  more;", i::WITH_SCOPE },
+    { "  with ({}) ", "statement;", " more;", i::WITH_SCOPE },
+    { "  with ({}) ", "statement", "\n"
+      "  more;", i::WITH_SCOPE },
+    { "  with ({})\n"
+      "    ", "statement;", "\n"
+      "  more;", i::WITH_SCOPE },
+    { "  try {} catch ", "(e) { block; }", " more;", i::CATCH_SCOPE },
+    { "  try {} catch ", "(e) { block; }", "; more;", i::CATCH_SCOPE },
+    { "  try {} catch ", "(e) {\n"
+      "    block;\n"
+      "  }", "\n"
+      "  more;", i::CATCH_SCOPE },
+    { "  try {} catch ", "(e) { block; }", " finally { block; } more;",
+      i::CATCH_SCOPE },
+    { "  start;\n"
+      "  ", "{ let block; }", " more;", i::BLOCK_SCOPE },
+    { "  start;\n"
+      "  ", "{ let block; }", "; more;", i::BLOCK_SCOPE },
+    { "  start;\n"
+      "  ", "{\n"
+      "    let block;\n"
+      "  }", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { "  start;\n"
+      "  function fun", "(a,b) { infunction; }", " more;",
+      i::FUNCTION_SCOPE },
+    { "  start;\n"
+      "  function fun", "(a,b) {\n"
+      "    infunction;\n"
+      "  }", "\n"
+      "  more;", i::FUNCTION_SCOPE },
+    { "  (function fun", "(a,b) { infunction; }", ")();",
+      i::FUNCTION_SCOPE },
+    { "  for ", "(let x = 1 ; x < 10; ++ x) { block; }", " more;",
+      i::BLOCK_SCOPE },
+    { "  for ", "(let x = 1 ; x < 10; ++ x) { block; }", "; more;",
+      i::BLOCK_SCOPE },
+    { "  for ", "(let x = 1 ; x < 10; ++ x) {\n"
+      "    block;\n"
+      "  }", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x = 1 ; x < 10; ++ x) statement;", " more;",
+      i::BLOCK_SCOPE },
+    { "  for ", "(let x = 1 ; x < 10; ++ x) statement", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x = 1 ; x < 10; ++ x)\n"
+      "    statement;", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x in {}) { block; }", " more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x in {}) { block; }", "; more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x in {}) {\n"
+      "    block;\n"
+      "  }", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x in {}) statement;", " more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x in {}) statement", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { "  for ", "(let x in {})\n"
+      "    statement;", "\n"
+      "  more;", i::BLOCK_SCOPE },
+    { NULL, NULL, NULL, i::EVAL_SCOPE }
+  };
+
+  v8::HandleScope handles;
+  v8::Persistent<v8::Context> context = v8::Context::New();
+  v8::Context::Scope context_scope(context);
+
+  int marker;
+  i::Isolate::Current()->stack_guard()->SetStackLimit(
+      reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
+
+  for (int i = 0; source_data[i].outer_prefix; i++) {
+    int kPrefixLen = i::StrLength(source_data[i].outer_prefix);
+    int kInnerLen = i::StrLength(source_data[i].inner_source);
+    int kSuffixLen = i::StrLength(source_data[i].outer_suffix);
+    int kProgramSize = kPrefixLen + kInnerLen + kSuffixLen;
+    i::Vector<char> program = i::Vector<char>::New(kProgramSize + 1);
+    int length;
+    length = i::OS::SNPrintF(program, "%s%s%s",
+                             source_data[i].outer_prefix,
+                             source_data[i].inner_source,
+                             source_data[i].outer_suffix);
+    ASSERT(length == kProgramSize);
+
+    // Parse program source.
+    i::Handle<i::String> source(
+        FACTORY->NewStringFromAscii(i::CStrVector(program.start())));
+    i::Handle<i::Script> script = FACTORY->NewScript(source);
+    i::Parser parser(script, false, NULL, NULL);
+    parser.SetHarmonyScoping(true);
+    i::FunctionLiteral* function =
+        parser.ParseProgram(source, true, i::kNonStrictMode);
+    ASSERT(function != NULL);
+
+    // Check scope types and positions.
+    i::Scope* scope = function->scope();
+    CHECK(scope->is_global_scope());
+    CHECK_EQ(scope->start_position(), 0);
+    CHECK_EQ(scope->end_position(), kProgramSize);
+    CHECK_EQ(scope->inner_scopes()->length(), 1);
+
+    i::Scope* inner_scope = scope->inner_scopes()->at(0);
+    CHECK_EQ(inner_scope->type(), source_data[i].scope_type);
+    CHECK_EQ(inner_scope->start_position(), kPrefixLen);
+    // The end position of a token is one position after the last
+    // character belonging to that token.
+    CHECK_EQ(inner_scope->end_position(), kPrefixLen + kInnerLen);
+  }
+}
diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc
index cccd2ee..b5c1a09 100644
--- a/test/cctest/test-serialize.cc
+++ b/test/cctest/test-serialize.cc
@@ -130,7 +130,8 @@
            encoder.Encode(
                ExternalReference::new_space_start(isolate).address()));
   CHECK_EQ(make_code(UNCLASSIFIED, 3),
-           encoder.Encode(ExternalReference::roots_address(isolate).address()));
+           encoder.Encode(
+               ExternalReference::roots_array_start(isolate).address()));
 }
 
 
diff --git a/test/mjsunit/apply.js b/test/mjsunit/apply.js
index c166110..413ee93 100644
--- a/test/mjsunit/apply.js
+++ b/test/mjsunit/apply.js
@@ -190,3 +190,10 @@
     "moreseper-prime");
 
 delete(Array.prototype["1"]);
+
+// Check correct handling of non-array argument lists.
+assertSame(this, f0.apply(this, {}), "non-array-1");
+assertSame(this, f0.apply(this, { length:1 }), "non-array-2");
+assertEquals(void 0, f1.apply(this, { length:1 }), "non-array-3");
+assertEquals(void 0, f1.apply(this, { 0:"foo" }), "non-array-4");
+assertEquals("foo", f1.apply(this, { length:1, 0:"foo" }), "non-array-5");
diff --git a/test/mjsunit/array-literal-transitions.js b/test/mjsunit/array-literal-transitions.js
new file mode 100644
index 0000000..321340c
--- /dev/null
+++ b/test/mjsunit/array-literal-transitions.js
@@ -0,0 +1,125 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
+// Test element kind of objects.
+// Since --smi-only-arrays affects builtins, its default setting at compile
+// time sticks if built with snapshot.  If --smi-only-arrays is deactivated
+// by default, only a no-snapshot build actually has smi-only arrays enabled
+// in this test case.  Depending on whether smi-only arrays are actually
+// enabled, this test takes the appropriate code path to check smi-only arrays.
+
+support_smi_only_arrays = %HasFastSmiOnlyElements(new Array());
+
+// IC and Crankshaft support for smi-only elements in dynamic array literals.
+function get(foo) { return foo; }  // Used to generate dynamic values.
+
+function array_literal_test() {
+  var a0 = [1, 2, 3];
+  assertTrue(%HasFastSmiOnlyElements(a0));
+  var a1 = [get(1), get(2), get(3)];
+  assertTrue(%HasFastSmiOnlyElements(a1));
+
+  var b0 = [1, 2, get("three")];
+  assertTrue(%HasFastElements(b0));
+  var b1 = [get(1), get(2), get("three")];
+  assertTrue(%HasFastElements(b1));
+
+  var c0 = [1, 2, get(3.5)];
+  assertTrue(%HasFastDoubleElements(c0));
+  assertEquals(3.5, c0[2]);
+  assertEquals(2, c0[1]);
+  assertEquals(1, c0[0]);
+
+  var c1 = [1, 2, 3.5];
+  assertTrue(%HasFastDoubleElements(c1));
+  assertEquals(3.5, c1[2]);
+  assertEquals(2, c1[1]);
+  assertEquals(1, c1[0]);
+
+  var c2 = [get(1), get(2), get(3.5)];
+  assertTrue(%HasFastDoubleElements(c2));
+  assertEquals(3.5, c2[2]);
+  assertEquals(2, c2[1]);
+  assertEquals(1, c2[0]);
+
+  var object = new Object();
+  var d0 = [1, 2, object];
+  assertTrue(%HasFastElements(d0));
+  assertEquals(object, d0[2]);
+  assertEquals(2, d0[1]);
+  assertEquals(1, d0[0]);
+
+  var e0 = [1, 2, 3.5];
+  assertTrue(%HasFastDoubleElements(e0));
+  assertEquals(3.5, e0[2]);
+  assertEquals(2, e0[1]);
+  assertEquals(1, e0[0]);
+
+  var f0 = [1, 2, [1, 2]];
+  assertTrue(%HasFastElements(f0));
+  assertEquals([1,2], f0[2]);
+  assertEquals(2, f0[1]);
+  assertEquals(1, f0[0]);
+}
+
+if (support_smi_only_arrays) {
+  for (var i = 0; i < 3; i++) {
+    array_literal_test();
+  }
+  %OptimizeFunctionOnNextCall(array_literal_test);
+  array_literal_test();
+
+  function test_large_literal() {
+
+    function d() {
+      gc();
+      return 2.5;
+    }
+
+    function o() {
+      gc();
+      return new Object();
+    }
+
+    large =
+        [ 0, 1, 2, 3, 4, 5, d(), d(), d(), d(), d(), d(), o(), o(), o(), o() ];
+    assertFalse(%HasDictionaryElements(large));
+    assertFalse(%HasFastSmiOnlyElements(large));
+    assertFalse(%HasFastDoubleElements(large));
+    assertTrue(%HasFastElements(large));
+    assertEquals(large,
+                 [0, 1, 2, 3, 4, 5, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5,
+                  new Object(), new Object(), new Object(), new Object()]);
+  }
+
+  for (var i = 0; i < 3; i++) {
+    test_large_literal();
+  }
+  %OptimizeFunctionOnNextCall(test_large_literal);
+  test_large_literal();
+}
diff --git a/test/mjsunit/compiler/compare.js b/test/mjsunit/compiler/compare.js
index 3f96087..460b0ab 100644
--- a/test/mjsunit/compiler/compare.js
+++ b/test/mjsunit/compiler/compare.js
@@ -83,9 +83,9 @@
 }
 
 TestNonPrimitive("xy", MaxLT);
-TestNonPrimitive("yx", MaxLE);
+TestNonPrimitive("xy", MaxLE);
 TestNonPrimitive("xy", MaxGE);
-TestNonPrimitive("yx", MaxGT);
+TestNonPrimitive("xy", MaxGT);
 
 // Test compare in case of aliased registers.
 function CmpX(x) { if (x == x) return 42; }
diff --git a/test/mjsunit/cyclic-error-to-string.js b/test/mjsunit/compiler/regress-deopt-call-as-function.js
similarity index 66%
copy from test/mjsunit/cyclic-error-to-string.js
copy to test/mjsunit/compiler/regress-deopt-call-as-function.js
index 2502b53..d82c690 100644
--- a/test/mjsunit/cyclic-error-to-string.js
+++ b/test/mjsunit/compiler/regress-deopt-call-as-function.js
@@ -25,22 +25,38 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test printing of cyclic errors which return the empty string for
-// compatibility with Safari and Firefox.
+// Test deoptimization after inlined call.
 
-var e = new Error();
-assertEquals('Error', e + '');
+function bar(a, b) {try { return a; } finally { } }
 
-e = new Error();
-e.name = e;
-e.message = e;
-e.stack = e;
-e.arguments = e;
-assertEquals(': ', e + '');
+function test_context() {
+  function foo(x) { return 42; }
+  var s, t;
+  for (var i = 0x7ff00000; i < 0x80000000; i++) {
+    bar(t = foo(i) ? bar(42 + i - i) : bar(0), s = i + t);
+  }
+  return s;
+}
+assertEquals(0x7fffffff + 42, test_context());
 
-e = new Error();
-e.name = [ e ];
-e.message = [ e ];
-e.stack = [ e ];
-e.arguments = [ e ];
-assertEquals(': ', e + '');
+
+function value_context() {
+  function foo(x) { return 42; }
+  var s, t;
+  for (var i = 0x7ff00000; i < 0x80000000; i++) {
+    bar(t = foo(i), s = i + t);
+  }
+  return s;
+}
+assertEquals(0x7fffffff + 42, value_context());
+
+
+function effect_context() {
+  function foo(x) { return 42; }
+  var s, t;
+  for (var i = 0x7ff00000; i < 0x80000000; i++) {
+    bar(foo(i), s = i + 42);
+  }
+  return s;
+}
+assertEquals(0x7fffffff + 42, effect_context());
diff --git a/test/mjsunit/cyclic-error-to-string.js b/test/mjsunit/compiler/regress-inline-callfunctionstub.js
similarity index 80%
rename from test/mjsunit/cyclic-error-to-string.js
rename to test/mjsunit/compiler/regress-inline-callfunctionstub.js
index 2502b53..a39d26d 100644
--- a/test/mjsunit/cyclic-error-to-string.js
+++ b/test/mjsunit/compiler/regress-inline-callfunctionstub.js
@@ -25,22 +25,22 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test printing of cyclic errors which return the empty string for
-// compatibility with Safari and Firefox.
+// Flags: --allow-natives-syntax
 
-var e = new Error();
-assertEquals('Error', e + '');
+// Test inlined of calls-as-function two levels deep.
+function f() { return 42; }
 
-e = new Error();
-e.name = e;
-e.message = e;
-e.stack = e;
-e.arguments = e;
-assertEquals(': ', e + '');
+var o = {g : function () { return f(); } }
+function main(func) {
+  var v=0;
+  for (var i=0; i<1; i++) {
+    if (func()) v = 42;
+  }
+}
 
-e = new Error();
-e.name = [ e ];
-e.message = [ e ];
-e.stack = [ e ];
-e.arguments = [ e ];
-assertEquals(': ', e + '');
+main(o.g);
+main(o.g);
+main(o.g);
+%OptimizeFunctionOnNextCall(main);
+main(o.g);
+
diff --git a/test/mjsunit/cyclic-error-to-string.js b/test/mjsunit/compiler/strict-recompile.js
similarity index 77%
copy from test/mjsunit/cyclic-error-to-string.js
copy to test/mjsunit/compiler/strict-recompile.js
index 2502b53..96e8bca 100644
--- a/test/mjsunit/cyclic-error-to-string.js
+++ b/test/mjsunit/compiler/strict-recompile.js
@@ -25,22 +25,27 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test printing of cyclic errors which return the empty string for
-// compatibility with Safari and Firefox.
+// Flags: --allow-natives-syntax
 
-var e = new Error();
-assertEquals('Error', e + '');
+function foo() {
+  try {
+    var o = {};
+    Object.defineProperty(o, 'x', {value: 12, writable: false});
+    o.x = 13;
+  } catch(e) {
+    return true;
+  }
+  return false;
+}
 
-e = new Error();
-e.name = e;
-e.message = e;
-e.stack = e;
-e.arguments = e;
-assertEquals(': ', e + '');
+assertFalse(foo());
 
-e = new Error();
-e.name = [ e ];
-e.message = [ e ];
-e.stack = [ e ];
-e.arguments = [ e ];
-assertEquals(': ', e + '');
+function do_eval(str) {
+  "use strict";
+  return eval(str);
+}
+
+var eval_foo = do_eval('(' + foo + ')');
+for (var i = 0; i < 5; i++) assertTrue(eval_foo());
+%OptimizeFunctionOnNextCall(eval_foo);
+assertTrue(eval_foo());
diff --git a/test/mjsunit/debug-scopes.js b/test/mjsunit/debug-scopes.js
index 1c23b0b..0788a55 100644
--- a/test/mjsunit/debug-scopes.js
+++ b/test/mjsunit/debug-scopes.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --expose-debug-as debug
+// Flags: --expose-debug-as debug --allow-natives-syntax
 // The functions used for testing backtraces. They are at the top to make the
 // testing of source line/column easier.
 
@@ -439,6 +439,26 @@
 EndTest();
 
 
+// With block in function that is marked for optimization while being executed.
+BeginTest("With 7");
+
+function with_7() {
+  with({}) {
+    %OptimizeFunctionOnNextCall(with_7);
+    debugger;
+  }
+}
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.With,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({}, 0, exec_state);
+};
+with_7();
+EndTest();
+
+
 // Simple closure formed by returning an inner function referering the outer
 // functions arguments.
 BeginTest("Closure 1");
@@ -950,6 +970,28 @@
 EndTest();
 
 
+// Catch block in function that is marked for optimization while being executed.
+BeginTest("Catch block 7");
+function catch_block_7() {
+  %OptimizeFunctionOnNextCall(catch_block_7);
+  try {
+    throw 'Exception';
+  } catch (e) {
+    debugger;
+  }
+};
+
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.Catch,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({e:'Exception'}, 0, exec_state);
+};
+catch_block_7();
+EndTest();
+
+
 assertEquals(begin_test_count, break_count,
              'one or more tests did not enter the debugger');
 assertEquals(begin_test_count, end_test_count,
diff --git a/test/mjsunit/debug-step-3.js b/test/mjsunit/debug-step-3.js
new file mode 100644
index 0000000..ad03667
--- /dev/null
+++ b/test/mjsunit/debug-step-3.js
@@ -0,0 +1,95 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// This test tests that full code compiled without debug break slots
+// is recompiled with debug break slots when debugging is started.
+
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+var bp;
+var done = false;
+var step_count = 0;
+var set_bp = false
+
+// Debug event listener which steps until the global variable done is true.
+function listener(event, exec_state, event_data, data) {
+  if (event == Debug.DebugEvent.Break) {
+    if (!done) exec_state.prepareStep(Debug.StepAction.StepNext);
+    step_count++;
+  }
+};
+
+// Set the global variables state to prpare the stepping test.
+function prepare_step_test() {
+  done = false;
+  step_count = 0;
+}
+
+// Test function to step through.
+function f() {
+  var a = 0;
+  if (set_bp) { bp = Debug.setBreakPoint(f, 3); }
+  var i = 1;
+  var j = 2;
+  done = true;
+};
+
+prepare_step_test();
+f();
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+// Make f set a breakpoint with an activation on the stack.
+prepare_step_test();
+set_bp = true;
+f();
+// TODO(1782): Fix issue to bring back this assert.
+//assertEquals(4, step_count);
+Debug.clearBreakPoint(bp);
+
+// Set a breakpoint on the first var statement (line 1).
+set_bp = false;
+bp = Debug.setBreakPoint(f, 3);
+
+// Step through the function ensuring that the var statements are hit as well.
+prepare_step_test();
+f();
+// TODO(1782): Fix issue to bring back this assert.
+//assertEquals(4, step_count);
+
+// Clear the breakpoint and check that no stepping happens.
+Debug.clearBreakPoint(bp);
+prepare_step_test();
+f();
+assertEquals(0, step_count);
+
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/test/mjsunit/element-kind.js b/test/mjsunit/element-kind.js
deleted file mode 100644
index 46fd8f5..0000000
--- a/test/mjsunit/element-kind.js
+++ /dev/null
@@ -1,261 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --allow-natives-syntax --smi-only-arrays
-// Test element kind of objects.
-// Since --smi-only-arrays affects builtins, its default setting at compile
-// time sticks if built with snapshot.  If --smi-only-arrays is deactivated
-// by default, only a no-snapshot build actually has smi-only arrays enabled
-// in this test case.  Depending on whether smi-only arrays are actually
-// enabled, this test takes the appropriate code path to check smi-only arrays.
-
-
-support_smi_only_arrays = %HasFastSmiOnlyElements([]);
-
-if (support_smi_only_arrays) {
-  print("Tests include smi-only arrays.");
-} else {
-  print("Tests do NOT include smi-only arrays.");
-}
-
-var element_kind = {
-  fast_smi_only_elements            :  0,
-  fast_elements                     :  1,
-  fast_double_elements              :  2,
-  dictionary_elements               :  3,
-  external_byte_elements            :  4,
-  external_unsigned_byte_elements   :  5,
-  external_short_elements           :  6,
-  external_unsigned_short_elements  :  7,
-  external_int_elements             :  8,
-  external_unsigned_int_elements    :  9,
-  external_float_elements           : 10,
-  external_double_elements          : 11,
-  external_pixel_elements           : 12
-}
-
-// We expect an object to only be of one element kind.
-function assertKind(expected, obj) {
-  if (support_smi_only_arrays) {
-    assertEquals(expected == element_kind.fast_smi_only_elements,
-                 %HasFastSmiOnlyElements(obj));
-    assertEquals(expected == element_kind.fast_elements,
-                 %HasFastElements(obj));
-  } else {
-    assertEquals(expected == element_kind.fast_elements ||
-                 expected == element_kind.fast_smi_only_elements,
-                 %HasFastElements(obj));
-  }
-  assertEquals(expected == element_kind.fast_double_elements,
-               %HasFastDoubleElements(obj));
-  assertEquals(expected == element_kind.dictionary_elements,
-               %HasDictionaryElements(obj));
-  assertEquals(expected == element_kind.external_byte_elements,
-               %HasExternalByteElements(obj));
-  assertEquals(expected == element_kind.external_unsigned_byte_elements,
-               %HasExternalUnsignedByteElements(obj));
-  assertEquals(expected == element_kind.external_short_elements,
-               %HasExternalShortElements(obj));
-  assertEquals(expected == element_kind.external_unsigned_short_elements,
-               %HasExternalUnsignedShortElements(obj));
-  assertEquals(expected == element_kind.external_int_elements,
-               %HasExternalIntElements(obj));
-  assertEquals(expected == element_kind.external_unsigned_int_elements,
-               %HasExternalUnsignedIntElements(obj));
-  assertEquals(expected == element_kind.external_float_elements,
-               %HasExternalFloatElements(obj));
-  assertEquals(expected == element_kind.external_double_elements,
-               %HasExternalDoubleElements(obj));
-  assertEquals(expected == element_kind.external_pixel_elements,
-               %HasExternalPixelElements(obj));
-  // every external kind is also an external array
-  assertEquals(expected >= element_kind.external_byte_elements,
-               %HasExternalArrayElements(obj));
-}
-
-var me = {};
-assertKind(element_kind.fast_elements, me);
-me.dance = 0xD15C0;
-me.drink = 0xC0C0A;
-assertKind(element_kind.fast_elements, me);
-
-var too = [1,2,3];
-assertKind(element_kind.fast_smi_only_elements, too);
-too.dance = 0xD15C0;
-too.drink = 0xC0C0A;
-assertKind(element_kind.fast_smi_only_elements, too);
-
-// Make sure the element kind transitions from smionly when a non-smi is stored.
-var you = new Array();
-assertKind(element_kind.fast_smi_only_elements, you);
-for (var i = 0; i < 1337; i++) {
-  var val = i;
-  if (i == 1336) {
-    assertKind(element_kind.fast_smi_only_elements, you);
-    val = new Object();
-  }
-  you[i] = val;
-}
-assertKind(element_kind.fast_elements, you);
-
-assertKind(element_kind.dictionary_elements, new Array(0xDECAF));
-
-var fast_double_array = new Array(0xDECAF);
-for (var i = 0; i < 0xDECAF; i++) fast_double_array[i] = i / 2;
-assertKind(element_kind.fast_double_elements, fast_double_array);
-
-assertKind(element_kind.external_byte_elements,           new Int8Array(9001));
-assertKind(element_kind.external_unsigned_byte_elements,  new Uint8Array(007));
-assertKind(element_kind.external_short_elements,          new Int16Array(666));
-assertKind(element_kind.external_unsigned_short_elements, new Uint16Array(42));
-assertKind(element_kind.external_int_elements,            new Int32Array(0xF));
-assertKind(element_kind.external_unsigned_int_elements,   new Uint32Array(23));
-assertKind(element_kind.external_float_elements,          new Float32Array(7));
-assertKind(element_kind.external_double_elements,         new Float64Array(0));
-assertKind(element_kind.external_pixel_elements,          new PixelArray(512));
-
-// Crankshaft support for smi-only array elements.
-function monomorphic(array) {
-  for (var i = 0; i < 3; i++) {
-    array[i] = i + 10;
-  }
-  assertKind(element_kind.fast_smi_only_elements, array);
-  for (var i = 0; i < 3; i++) {
-    var a = array[i];
-    assertEquals(i + 10, a);
-  }
-}
-var smi_only = [1, 2, 3];
-for (var i = 0; i < 3; i++) monomorphic(smi_only);
-%OptimizeFunctionOnNextCall(monomorphic);
-monomorphic(smi_only);
-function polymorphic(array, expected_kind) {
-  array[1] = 42;
-  assertKind(expected_kind, array);
-  var a = array[1];
-  assertEquals(42, a);
-}
-var smis = [1, 2, 3];
-var strings = ["one", "two", "three"];
-var doubles = [0, 0, 0]; doubles[0] = 1.5; doubles[1] = 2.5; doubles[2] = 3.5;
-assertKind(support_smi_only_arrays
-               ? element_kind.fast_double_elements
-               : element_kind.fast_elements,
-           doubles);
-for (var i = 0; i < 3; i++) {
-  polymorphic(smis, element_kind.fast_smi_only_elements);
-  polymorphic(strings, element_kind.fast_elements);
-  polymorphic(doubles, support_smi_only_arrays
-                           ? element_kind.fast_double_elements
-                           : element_kind.fast_elements);
-}
-%OptimizeFunctionOnNextCall(polymorphic);
-polymorphic(smis, element_kind.fast_smi_only_elements);
-polymorphic(strings, element_kind.fast_elements);
-polymorphic(doubles, support_smi_only_arrays
-    ? element_kind.fast_double_elements
-    : element_kind.fast_elements);
-
-// Crankshaft support for smi-only elements in dynamic array literals.
-function get(foo) { return foo; }  // Used to generate dynamic values.
-
-function crankshaft_test() {
-  var a = [get(1), get(2), get(3)];
-  assertKind(element_kind.fast_smi_only_elements, a);
-  var b = [get(1), get(2), get("three")];
-  assertKind(element_kind.fast_elements, b);
-  var c = [get(1), get(2), get(3.5)];
-  // The full code generator doesn't support conversion to fast_double_elements
-  // yet. Crankshaft does, but only with --smi-only-arrays support.
-  if ((%GetOptimizationStatus(crankshaft_test) & 1) &&
-      support_smi_only_arrays) {
-    assertKind(element_kind.fast_double_elements, c);
-  } else {
-    assertKind(element_kind.fast_elements, c);
-  }
-}
-for (var i = 0; i < 3; i++) {
-  crankshaft_test();
-}
-%OptimizeFunctionOnNextCall(crankshaft_test);
-crankshaft_test();
-
-// Elements_kind transitions for arrays.
-
-// A map can have three different elements_kind transitions: SMI->DOUBLE,
-// DOUBLE->OBJECT, and SMI->OBJECT. No matter in which order these three are
-// created, they must always end up with the same FAST map.
-
-// This test is meaningless without FAST_SMI_ONLY_ELEMENTS.
-if (support_smi_only_arrays) {
-  // Preparation: create one pair of identical objects for each case.
-  var a = [1, 2, 3];
-  var b = [1, 2, 3];
-  assertTrue(%HaveSameMap(a, b));
-  assertKind(element_kind.fast_smi_only_elements, a);
-  var c = [1, 2, 3];
-  c["case2"] = true;
-  var d = [1, 2, 3];
-  d["case2"] = true;
-  assertTrue(%HaveSameMap(c, d));
-  assertFalse(%HaveSameMap(a, c));
-  assertKind(element_kind.fast_smi_only_elements, c);
-  var e = [1, 2, 3];
-  e["case3"] = true;
-  var f = [1, 2, 3];
-  f["case3"] = true;
-  assertTrue(%HaveSameMap(e, f));
-  assertFalse(%HaveSameMap(a, e));
-  assertFalse(%HaveSameMap(c, e));
-  assertKind(element_kind.fast_smi_only_elements, e);
-  // Case 1: SMI->DOUBLE, DOUBLE->OBJECT, SMI->OBJECT.
-  a[0] = 1.5;
-  assertKind(element_kind.fast_double_elements, a);
-  a[0] = "foo";
-  assertKind(element_kind.fast_elements, a);
-  b[0] = "bar";
-  assertTrue(%HaveSameMap(a, b));
-  // Case 2: SMI->DOUBLE, SMI->OBJECT, DOUBLE->OBJECT.
-  c[0] = 1.5;
-  assertKind(element_kind.fast_double_elements, c);
-  assertFalse(%HaveSameMap(c, d));
-  d[0] = "foo";
-  assertKind(element_kind.fast_elements, d);
-  assertFalse(%HaveSameMap(c, d));
-  c[0] = "bar";
-  assertTrue(%HaveSameMap(c, d));
-  // Case 3: SMI->OBJECT, SMI->DOUBLE, DOUBLE->OBJECT.
-  e[0] = "foo";
-  assertKind(element_kind.fast_elements, e);
-  assertFalse(%HaveSameMap(e, f));
-  f[0] = 1.5;
-  assertKind(element_kind.fast_double_elements, f);
-  assertFalse(%HaveSameMap(e, f));
-  f[0] = "bar";
-  assertKind(element_kind.fast_elements, f);
-  assertTrue(%HaveSameMap(e, f));
-}
diff --git a/test/mjsunit/elements-kind.js b/test/mjsunit/elements-kind.js
new file mode 100644
index 0000000..cfd47c7
--- /dev/null
+++ b/test/mjsunit/elements-kind.js
@@ -0,0 +1,309 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
+
+// Test element kind of objects.
+// Since --smi-only-arrays affects builtins, its default setting at compile
+// time sticks if built with snapshot.  If --smi-only-arrays is deactivated
+// by default, only a no-snapshot build actually has smi-only arrays enabled
+// in this test case.  Depending on whether smi-only arrays are actually
+// enabled, this test takes the appropriate code path to check smi-only arrays.
+
+support_smi_only_arrays = %HasFastSmiOnlyElements([]);
+
+if (support_smi_only_arrays) {
+  print("Tests include smi-only arrays.");
+} else {
+  print("Tests do NOT include smi-only arrays.");
+}
+
+var elements_kind = {
+  fast_smi_only            :  'fast smi only elements',
+  fast                     :  'fast elements',
+  fast_double              :  'fast double elements',
+  dictionary               :  'dictionary elements',
+  external_byte            :  'external byte elements',
+  external_unsigned_byte   :  'external unsigned byte elements',
+  external_short           :  'external short elements',
+  external_unsigned_short  :  'external unsigned short elements',
+  external_int             :  'external int elements',
+  external_unsigned_int    :  'external unsigned int elements',
+  external_float           :  'external float elements',
+  external_double          :  'external double elements',
+  external_pixel           :  'external pixel elements'
+}
+
+function getKind(obj) {
+  if (%HasFastSmiOnlyElements(obj)) return elements_kind.fast_smi_only;
+  if (%HasFastElements(obj)) return elements_kind.fast;
+  if (%HasFastDoubleElements(obj)) return elements_kind.fast_double;
+  if (%HasDictionaryElements(obj)) return elements_kind.dictionary;
+  // Every external kind is also an external array.
+  assertTrue(%HasExternalArrayElements(obj));
+  if (%HasExternalByteElements(obj)) {
+    return elements_kind.external_byte;
+  }
+  if (%HasExternalUnsignedByteElements(obj)) {
+    return elements_kind.external_unsigned_byte;
+  }
+  if (%HasExternalShortElements(obj)) {
+    return elements_kind.external_short;
+  }
+  if (%HasExternalUnsignedShortElements(obj)) {
+    return elements_kind.external_unsigned_short;
+  }
+  if (%HasExternalIntElements(obj)) {
+    return elements_kind.external_int;
+  }
+  if (%HasExternalUnsignedIntElements(obj)) {
+    return elements_kind.external_unsigned_int;
+  }
+  if (%HasExternalFloatElements(obj)) {
+    return elements_kind.external_float;
+  }
+  if (%HasExternalDoubleElements(obj)) {
+    return elements_kind.external_double;
+  }
+  if (%HasExternalPixelElements(obj)) {
+    return elements_kind.external_pixel;
+  }
+}
+
+function assertKind(expected, obj, name_opt) {
+  if (!support_smi_only_arrays &&
+      expected == elements_kind.fast_smi_only) {
+    expected = elements_kind.fast;
+  }
+  assertEquals(expected, getKind(obj), name_opt);
+}
+
+var me = {};
+assertKind(elements_kind.fast, me);
+me.dance = 0xD15C0;
+me.drink = 0xC0C0A;
+assertKind(elements_kind.fast, me);
+
+var too = [1,2,3];
+assertKind(elements_kind.fast_smi_only, too);
+too.dance = 0xD15C0;
+too.drink = 0xC0C0A;
+assertKind(elements_kind.fast_smi_only, too);
+
+// Make sure the element kind transitions from smionly when a non-smi is stored.
+var you = new Array();
+assertKind(elements_kind.fast_smi_only, you);
+for (var i = 0; i < 1337; i++) {
+  var val = i;
+  if (i == 1336) {
+    assertKind(elements_kind.fast_smi_only, you);
+    val = new Object();
+  }
+  you[i] = val;
+}
+assertKind(elements_kind.fast, you);
+
+assertKind(elements_kind.dictionary, new Array(0xDECAF));
+
+var fast_double_array = new Array(0xDECAF);
+for (var i = 0; i < 0xDECAF; i++) fast_double_array[i] = i / 2;
+assertKind(elements_kind.fast_double, fast_double_array);
+
+assertKind(elements_kind.external_byte,           new Int8Array(9001));
+assertKind(elements_kind.external_unsigned_byte,  new Uint8Array(007));
+assertKind(elements_kind.external_short,          new Int16Array(666));
+assertKind(elements_kind.external_unsigned_short, new Uint16Array(42));
+assertKind(elements_kind.external_int,            new Int32Array(0xF));
+assertKind(elements_kind.external_unsigned_int,   new Uint32Array(23));
+assertKind(elements_kind.external_float,          new Float32Array(7));
+assertKind(elements_kind.external_double,         new Float64Array(0));
+assertKind(elements_kind.external_pixel,          new PixelArray(512));
+
+// Crankshaft support for smi-only array elements.
+function monomorphic(array) {
+  for (var i = 0; i < 3; i++) {
+    array[i] = i + 10;
+  }
+  assertKind(elements_kind.fast_smi_only, array);
+  for (var i = 0; i < 3; i++) {
+    var a = array[i];
+    assertEquals(i + 10, a);
+  }
+}
+var smi_only = [1, 2, 3];
+for (var i = 0; i < 3; i++) monomorphic(smi_only);
+%OptimizeFunctionOnNextCall(monomorphic);
+monomorphic(smi_only);
+
+if (support_smi_only_arrays) {
+  function construct_smis() {
+    var a = [0, 0, 0];
+    a[0] = 0;  // Send the COW array map to the steak house.
+    assertKind(elements_kind.fast_smi_only, a);
+    return a;
+  }
+  function construct_doubles() {
+    var a = construct_smis();
+    a[0] = 1.5;
+    assertKind(elements_kind.fast_double, a);
+    return a;
+  }
+  function construct_objects() {
+    var a = construct_smis();
+    a[0] = "one";
+    assertKind(elements_kind.fast, a);
+    return a;
+  }
+
+  // Test crankshafted transition SMI->DOUBLE.
+  function convert_to_double(array) {
+    array[1] = 2.5;
+    assertKind(elements_kind.fast_double, array);
+    assertEquals(2.5, array[1]);
+  }
+  var smis = construct_smis();
+  for (var i = 0; i < 3; i++) convert_to_double(smis);
+  %OptimizeFunctionOnNextCall(convert_to_double);
+  smis = construct_smis();
+  convert_to_double(smis);
+  // Test crankshafted transitions SMI->FAST and DOUBLE->FAST.
+  function convert_to_fast(array) {
+    array[1] = "two";
+    assertKind(elements_kind.fast, array);
+    assertEquals("two", array[1]);
+  }
+  smis = construct_smis();
+  for (var i = 0; i < 3; i++) convert_to_fast(smis);
+  var doubles = construct_doubles();
+  for (var i = 0; i < 3; i++) convert_to_fast(doubles);
+  smis = construct_smis();
+  doubles = construct_doubles();
+  %OptimizeFunctionOnNextCall(convert_to_fast);
+  convert_to_fast(smis);
+  convert_to_fast(doubles);
+  // Test transition chain SMI->DOUBLE->FAST (crankshafted function will
+  // transition to FAST directly).
+  function convert_mixed(array, value, kind) {
+    array[1] = value;
+    assertKind(kind, array);
+    assertEquals(value, array[1]);
+  }
+  smis = construct_smis();
+  for (var i = 0; i < 3; i++) {
+    convert_mixed(smis, 1.5, elements_kind.fast_double);
+  }
+  doubles = construct_doubles();
+  for (var i = 0; i < 3; i++) {
+    convert_mixed(doubles, "three", elements_kind.fast);
+  }
+  smis = construct_smis();
+  doubles = construct_doubles();
+  %OptimizeFunctionOnNextCall(convert_mixed);
+  convert_mixed(smis, 1, elements_kind.fast);
+  convert_mixed(doubles, 1, elements_kind.fast);
+  assertTrue(%HaveSameMap(smis, doubles));
+}
+
+// Crankshaft support for smi-only elements in dynamic array literals.
+function get(foo) { return foo; }  // Used to generate dynamic values.
+
+function crankshaft_test() {
+  var a = [get(1), get(2), get(3)];
+  assertKind(elements_kind.fast_smi_only, a);
+  var b = [get(1), get(2), get("three")];
+  assertKind(elements_kind.fast, b);
+  var c = [get(1), get(2), get(3.5)];
+  if (support_smi_only_arrays) {
+    assertKind(elements_kind.fast_double, c);
+  } else {
+    assertKind(elements_kind.fast, c);
+  }
+}
+for (var i = 0; i < 3; i++) {
+  crankshaft_test();
+}
+%OptimizeFunctionOnNextCall(crankshaft_test);
+crankshaft_test();
+
+// Elements_kind transitions for arrays.
+
+// A map can have three different elements_kind transitions: SMI->DOUBLE,
+// DOUBLE->OBJECT, and SMI->OBJECT. No matter in which order these three are
+// created, they must always end up with the same FAST map.
+
+// This test is meaningless without FAST_SMI_ONLY_ELEMENTS.
+if (support_smi_only_arrays) {
+  // Preparation: create one pair of identical objects for each case.
+  var a = [1, 2, 3];
+  var b = [1, 2, 3];
+  assertTrue(%HaveSameMap(a, b));
+  assertKind(elements_kind.fast_smi_only, a);
+  var c = [1, 2, 3];
+  c["case2"] = true;
+  var d = [1, 2, 3];
+  d["case2"] = true;
+  assertTrue(%HaveSameMap(c, d));
+  assertFalse(%HaveSameMap(a, c));
+  assertKind(elements_kind.fast_smi_only, c);
+  var e = [1, 2, 3];
+  e["case3"] = true;
+  var f = [1, 2, 3];
+  f["case3"] = true;
+  assertTrue(%HaveSameMap(e, f));
+  assertFalse(%HaveSameMap(a, e));
+  assertFalse(%HaveSameMap(c, e));
+  assertKind(elements_kind.fast_smi_only, e);
+  // Case 1: SMI->DOUBLE, DOUBLE->OBJECT, SMI->OBJECT.
+  a[0] = 1.5;
+  assertKind(elements_kind.fast_double, a);
+  a[0] = "foo";
+  assertKind(elements_kind.fast, a);
+  b[0] = "bar";
+  assertTrue(%HaveSameMap(a, b));
+  // Case 2: SMI->DOUBLE, SMI->OBJECT, DOUBLE->OBJECT.
+  c[0] = 1.5;
+  assertKind(elements_kind.fast_double, c);
+  assertFalse(%HaveSameMap(c, d));
+  d[0] = "foo";
+  assertKind(elements_kind.fast, d);
+  assertFalse(%HaveSameMap(c, d));
+  c[0] = "bar";
+  assertTrue(%HaveSameMap(c, d));
+  // Case 3: SMI->OBJECT, SMI->DOUBLE, DOUBLE->OBJECT.
+  e[0] = "foo";
+  assertKind(elements_kind.fast, e);
+  assertFalse(%HaveSameMap(e, f));
+  f[0] = 1.5;
+  assertKind(elements_kind.fast_double, f);
+  assertFalse(%HaveSameMap(e, f));
+  f[0] = "bar";
+  assertKind(elements_kind.fast, f);
+  assertTrue(%HaveSameMap(e, f));
+}
+
+// Throw away type information in the ICs for next stress run.
+gc();
diff --git a/test/mjsunit/elements-transition.js b/test/mjsunit/elements-transition.js
new file mode 100644
index 0000000..5f6cc4f
--- /dev/null
+++ b/test/mjsunit/elements-transition.js
@@ -0,0 +1,107 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays
+
+support_smi_only_arrays = %HasFastSmiOnlyElements([]);
+
+if (support_smi_only_arrays) {
+  function test(test_double, test_object, set, length) {
+    // We apply the same operations to two identical arrays.  The first array
+    // triggers an IC miss, upon which the conversion stub is generated, but the
+    // actual conversion is done in runtime.  The second array, arriving at
+    // the previously patched IC, is then converted using the conversion stub.
+    var array_1 = new Array(length);
+    var array_2 = new Array(length);
+
+    assertTrue(%HasFastSmiOnlyElements(array_1));
+    assertTrue(%HasFastSmiOnlyElements(array_2));
+    for (var i = 0; i < length; i++) {
+      if (i == length - 5 && test_double) {
+        // Trigger conversion to fast double elements at length-5.
+        set(array_1, i, 0.5);
+        set(array_2, i, 0.5);
+        assertTrue(%HasFastDoubleElements(array_1));
+        assertTrue(%HasFastDoubleElements(array_2));
+      } else if (i == length - 3 && test_object) {
+        // Trigger conversion to fast object elements at length-3.
+        set(array_1, i, 'object');
+        set(array_2, i, 'object');
+        assertTrue(%HasFastElements(array_1));
+        assertTrue(%HasFastElements(array_2));
+      } else if (i != length - 7) {
+        // Set the element to an integer but leave a hole at length-7.
+        set(array_1, i, 2*i+1);
+        set(array_2, i, 2*i+1);
+      }
+    }
+
+    for (var i = 0; i < length; i++) {
+      if (i == length - 5 && test_double) {
+        assertEquals(0.5, array_1[i]);
+        assertEquals(0.5, array_2[i]);
+      } else if (i == length - 3 && test_object) {
+        assertEquals('object', array_1[i]);
+        assertEquals('object', array_2[i]);
+      } else if (i != length - 7) {
+        assertEquals(2*i+1, array_1[i]);
+        assertEquals(2*i+1, array_2[i]);
+      } else {
+        assertEquals(undefined, array_1[i]);
+        assertEquals(undefined, array_2[i]);
+      }
+    }
+
+    assertEquals(length, array_1.length);
+    assertEquals(length, array_2.length);
+  }
+
+  test(false, false, function(a,i,v){ a[i] = v; }, 20);
+  test(true,  false, function(a,i,v){ a[i] = v; }, 20);
+  test(false, true,  function(a,i,v){ a[i] = v; }, 20);
+  test(true,  true,  function(a,i,v){ a[i] = v; }, 20);
+
+  test(false, false, function(a,i,v){ a[i] = v; }, 10000);
+  test(true,  false, function(a,i,v){ a[i] = v; }, 10000);
+  test(false, true,  function(a,i,v){ a[i] = v; }, 10000);
+  test(true,  true,  function(a,i,v){ a[i] = v; }, 10000);
+
+  // Check COW arrays
+  function get_cow() { return [1, 2, 3]; }
+
+  function transition(x) { x[0] = 1.5; }
+
+  var ignore = get_cow();
+  transition(ignore);  // Handled by runtime.
+  var a = get_cow();
+  var b = get_cow();
+  transition(a);  // Handled by IC.
+  assertEquals(1.5, a[0]);
+  assertEquals(1, b[0]);
+} else {
+  print("Test skipped because smi only arrays are not supported.");
+}
\ No newline at end of file
diff --git a/test/mjsunit/error-tostring.js b/test/mjsunit/error-tostring.js
new file mode 100644
index 0000000..a285641
--- /dev/null
+++ b/test/mjsunit/error-tostring.js
@@ -0,0 +1,85 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Test default string representation of an Error object.
+
+var e = new Error();
+assertEquals('Error', e.toString());
+
+
+// Test printing of cyclic errors which return the empty string for
+// compatibility with Safari and Firefox.
+
+e = new Error();
+e.name = e;
+e.message = e;
+e.stack = "Does not occur in output";
+e.arguments = "Does not occur in output";
+e.type = "Does not occur in output";
+assertEquals('', e.toString());
+
+e = new Error();
+e.name = [ e ];
+e.message = [ e ];
+e.stack = "Does not occur in output";
+e.arguments = "Does not occur in output";
+e.type = "Does not occur in output";
+assertEquals('', e.toString());
+
+
+// Test the sequence in which getters and toString operations are called
+// on a given Error object.  Verify the produced string representation.
+
+function testErrorToString(nameValue, messageValue) {
+  var seq = [];
+  var e = {
+    get name() {
+      seq.push(1);
+      return (nameValue === undefined) ? nameValue : {
+        toString: function() { seq.push(2); return nameValue; }
+      };
+    },
+    get message() {
+      seq.push(3);
+      return (messageValue === undefined) ? messageValue : {
+        toString: function() { seq.push(4); return messageValue; }
+      };
+    }
+  };
+  var string = Error.prototype.toString.call(e);
+  return [string,seq];
+}
+
+assertEquals(["Error",[1,3]], testErrorToString(undefined, undefined));
+assertEquals(["e1",[1,2,3]], testErrorToString("e1", undefined));
+assertEquals(["e1: null",[1,2,3,4]], testErrorToString("e1", null));
+assertEquals(["e1",[1,2,3,4]], testErrorToString("e1", ""));
+assertEquals(["Error: e2",[1,3,4]], testErrorToString(undefined, "e2"));
+assertEquals(["null: e2",[1,2,3,4]], testErrorToString(null, "e2"));
+assertEquals(["e2",[1,2,3,4]], testErrorToString("", "e2"));
+assertEquals(["e1: e2",[1,2,3,4]], testErrorToString("e1", "e2"));
diff --git a/test/mjsunit/function-bind.js b/test/mjsunit/function-bind.js
index e9d0221..4a8f2d2 100644
--- a/test/mjsunit/function-bind.js
+++ b/test/mjsunit/function-bind.js
@@ -29,29 +29,31 @@
 
 // Simple tests.
 function foo(x, y, z) {
-  return x + y + z;
+  return [this, arguments.length, x];
 }
 
+assertEquals(3, foo.length);
+
 var f = foo.bind(foo);
-assertEquals(3, f(1, 1, 1));
+assertEquals([foo, 3, 1], f(1, 2, 3));
 assertEquals(3, f.length);
 
-f = foo.bind(foo, 2);
-assertEquals(4, f(1, 1));
+f = foo.bind(foo, 1);
+assertEquals([foo, 3, 1], f(2, 3));
 assertEquals(2, f.length);
 
-f = foo.bind(foo, 2, 2);
-assertEquals(5, f(1));
+f = foo.bind(foo, 1, 2);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo, 2, 2, 2);
-assertEquals(6, f());
+f = foo.bind(foo, 1, 2, 3);
+assertEquals([foo, 3, 1], f());
 assertEquals(0, f.length);
 
 // Test that length works correctly even if more than the actual number
 // of arguments are given when binding.
 f = foo.bind(foo, 1, 2, 3, 4, 5, 6, 7, 8, 9);
-assertEquals(6, f());
+assertEquals([foo, 9, 1], f());
 assertEquals(0, f.length);
 
 // Use a different bound object.
@@ -78,65 +80,98 @@
 // When only giving the thisArg, any number of binds should have
 // the same effect.
 f = foo.bind(foo);
-assertEquals(3, f(1, 1, 1));
-f = foo.bind(foo).bind(foo).bind(foo).bind(foo);
-assertEquals(3, f(1, 1, 1));
+assertEquals([foo, 3, 1], f(1, 2, 3));
+
+var not_foo = {};
+f = foo.bind(foo).bind(not_foo).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(1, 2, 3));
 assertEquals(3, f.length);
 
 // Giving bound parameters should work at any place in the chain.
-f = foo.bind(foo, 1).bind(foo).bind(foo).bind(foo);
-assertEquals(3, f(1, 1));
+f = foo.bind(foo, 1).bind(not_foo).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(2, 3));
 assertEquals(2, f.length);
 
-f = foo.bind(foo).bind(foo, 1).bind(foo).bind(foo);
-assertEquals(3, f(1, 1));
+f = foo.bind(foo).bind(not_foo, 1).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(2, 3));
 assertEquals(2, f.length);
 
-f = foo.bind(foo).bind(foo).bind(foo,1 ).bind(foo);
-assertEquals(3, f(1, 1));
+f = foo.bind(foo).bind(not_foo).bind(not_foo,1 ).bind(not_foo);
+assertEquals([foo, 3, 1], f(2, 3));
 assertEquals(2, f.length);
 
-f = foo.bind(foo).bind(foo).bind(foo).bind(foo, 1);
-assertEquals(3, f(1, 1));
+f = foo.bind(foo).bind(not_foo).bind(not_foo).bind(not_foo, 1);
+assertEquals([foo, 3, 1], f(2, 3));
 assertEquals(2, f.length);
 
-// Several parameters can be given, and given in different bind invokations.
-f = foo.bind(foo, 1, 1).bind(foo).bind(foo).bind(foo);
-assertEquals(3, f(1));
+// Several parameters can be given, and given in different bind invocations.
+f = foo.bind(foo, 1, 2).bind(not_foo).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo).bind(foo, 1, 1).bind(foo).bind(foo);
-assertEquals(3, f(1));
+f = foo.bind(foo).bind(not_foo, 1, 2).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(1));
 assertEquals(1, f.length);
 
-f = foo.bind(foo).bind(foo, 1, 1).bind(foo).bind(foo);
-assertEquals(3, f(1));
+f = foo.bind(foo).bind(not_foo, 1, 2).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo).bind(foo).bind(foo, 1, 1).bind(foo);
-assertEquals(3, f(1));
+f = foo.bind(foo).bind(not_foo).bind(not_foo, 1, 2).bind(not_foo);
+assertEquals([foo, 3, 1], f(1));
 assertEquals(1, f.length);
 
-f = foo.bind(foo).bind(foo).bind(foo).bind(foo, 1, 1);
-assertEquals(3, f(1));
+f = foo.bind(foo).bind(not_foo).bind(not_foo).bind(not_foo, 1, 2);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo, 1).bind(foo, 1).bind(foo).bind(foo);
-assertEquals(3, f(1));
+f = foo.bind(foo, 1).bind(not_foo, 2).bind(not_foo).bind(not_foo);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo, 1).bind(foo).bind(foo, 1).bind(foo);
-assertEquals(3, f(1));
+f = foo.bind(foo, 1).bind(not_foo).bind(not_foo, 2).bind(not_foo);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo, 1).bind(foo).bind(foo).bind(foo, 1);
-assertEquals(3, f(1));
+f = foo.bind(foo, 1).bind(not_foo).bind(not_foo).bind(not_foo, 2);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
-f = foo.bind(foo).bind(foo, 1).bind(foo).bind(foo, 1);
-assertEquals(3, f(1));
+f = foo.bind(foo).bind(not_foo, 1).bind(not_foo).bind(not_foo, 2);
+assertEquals([foo, 3, 1], f(3));
 assertEquals(1, f.length);
 
+// The wrong number of arguments can be given to bound functions too.
+f = foo.bind(foo);
+assertEquals(3, f.length);
+assertEquals([foo, 0, undefined], f());
+assertEquals([foo, 1, 1], f(1));
+assertEquals([foo, 2, 1], f(1, 2));
+assertEquals([foo, 3, 1], f(1, 2, 3));
+assertEquals([foo, 4, 1], f(1, 2, 3, 4));
+
+f = foo.bind(foo, 1);
+assertEquals(2, f.length);
+assertEquals([foo, 1, 1], f());
+assertEquals([foo, 2, 1], f(2));
+assertEquals([foo, 3, 1], f(2, 3));
+assertEquals([foo, 4, 1], f(2, 3, 4));
+
+f = foo.bind(foo, 1, 2);
+assertEquals(1, f.length);
+assertEquals([foo, 2, 1], f());
+assertEquals([foo, 3, 1], f(3));
+assertEquals([foo, 4, 1], f(3, 4));
+
+f = foo.bind(foo, 1, 2, 3);
+assertEquals(0, f.length);
+assertEquals([foo, 3, 1], f());
+assertEquals([foo, 4, 1], f(4));
+
+f = foo.bind(foo, 1, 2, 3, 4);
+assertEquals(0, f.length);
+assertEquals([foo, 4, 1], f());
+
 // Test constructor calls.
 
 function bar(x, y, z) {
@@ -171,13 +206,91 @@
 
 
 // Test bind chains when used as a constructor.
-
 f = bar.bind(bar, 1).bind(bar, 2).bind(bar, 3);
 obj2 = new f();
 assertEquals(1, obj2.x);
 assertEquals(2, obj2.y);
 assertEquals(3, obj2.z);
 
-// Test instanceof obj2 is bar, not f.
+// Test obj2 is instanceof both bar and f.
 assertTrue(obj2 instanceof bar);
-assertFalse(obj2 instanceof f);
+assertTrue(obj2 instanceof f);
+
+// This-args are not relevant to instanceof.
+f = bar.bind(foo.prototype, 1).
+    bind(String.prototype, 2).
+    bind(Function.prototype, 3);
+var obj3 = new f();
+assertTrue(obj3 instanceof bar);
+assertTrue(obj3 instanceof f);
+assertFalse(obj3 instanceof foo);
+assertFalse(obj3 instanceof Function);
+assertFalse(obj3 instanceof String);
+
+// thisArg is converted to object.
+f = foo.bind(undefined);
+assertEquals([this, 0, undefined], f());
+
+f = foo.bind(null);
+assertEquals([this, 0, undefined], f());
+
+f = foo.bind(42);
+assertEquals([Object(42), 0, undefined], f());
+
+f = foo.bind("foo");
+assertEquals([Object("foo"), 0, undefined], f());
+
+f = foo.bind(true);
+assertEquals([Object(true), 0, undefined], f());
+
+// Strict functions don't convert thisArg.
+function soo(x, y, z) {
+  "use strict";
+  return [this, arguments.length, x];
+}
+
+var s = soo.bind(undefined);
+assertEquals([undefined, 0, undefined], s());
+
+s = soo.bind(null);
+assertEquals([null, 0, undefined], s());
+
+s = soo.bind(42);
+assertEquals([42, 0, undefined], s());
+
+s = soo.bind("foo");
+assertEquals(["foo", 0, undefined], s());
+
+s = soo.bind(true);
+assertEquals([true, 0, undefined], s());
+
+// Test that .arguments and .caller are poisoned according to the ES5 spec.
+
+// Check that property descriptors are correct (unconfigurable, unenumerable,
+// and both get and set is the ThrowTypeError function).
+var cdesc = Object.getOwnPropertyDescriptor(f, "caller");
+var adesc = Object.getOwnPropertyDescriptor(f, "arguments");
+
+assertFalse(cdesc.enumerable);
+assertFalse(cdesc.configurable);
+
+assertFalse(adesc.enumerable);
+assertFalse(adesc.configurable);
+
+assertSame(cdesc.get, cdesc.set);
+assertSame(cdesc.get, adesc.get);
+assertSame(cdesc.get, adesc.set);
+
+assertTrue(cdesc.get instanceof Function);
+assertEquals(0, cdesc.get.length);
+assertThrows(cdesc.get, TypeError);
+
+assertThrows(function() { return f.caller; }, TypeError);
+assertThrows(function() { f.caller = 42; }, TypeError);
+assertThrows(function() { return f.arguments; }, TypeError);
+assertThrows(function() { f.arguments = 42; }, TypeError);
+
+// Shouldn't throw. Accessing the functions caller must throw if
+// the caller is strict and the callee isn't. A bound function is built-in,
+// but not considered strict.
+(function foo() { return foo.caller; }).bind()();
diff --git a/test/mjsunit/harmony/block-conflicts.js b/test/mjsunit/harmony/block-conflicts.js
index 8b171f1..e27d6a1 100644
--- a/test/mjsunit/harmony/block-conflicts.js
+++ b/test/mjsunit/harmony/block-conflicts.js
@@ -80,6 +80,11 @@
                  "let x = function() {}",
                  "let x, y",
                  "let y, x",
+                 "const x = 0",
+                 "const x = undefined",
+                 "const x = function() {}",
+                 "const x = 2, y = 3",
+                 "const y = 4, x = 5",
                  ];
 var varbinds = [ "var x",
                  "var x = 0",
diff --git a/test/mjsunit/harmony/block-for.js b/test/mjsunit/harmony/block-for.js
new file mode 100644
index 0000000..1f68037
--- /dev/null
+++ b/test/mjsunit/harmony/block-for.js
@@ -0,0 +1,142 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-scoping
+
+function props(x) {
+  var array = [];
+  for (let p in x) array.push(p);
+  return array.sort();
+}
+
+assertEquals(0, props({}).length);
+assertEquals(1, props({x:1}).length);
+assertEquals(2, props({x:1, y:2}).length);
+
+assertArrayEquals(["x"], props({x:1}));
+assertArrayEquals(["x", "y"], props({x:1, y:2}));
+assertArrayEquals(["x", "y", "zoom"], props({x:1, y:2, zoom:3}));
+
+assertEquals(0, props([]).length);
+assertEquals(1, props([1]).length);
+assertEquals(2, props([1,2]).length);
+
+assertArrayEquals(["0"], props([1]));
+assertArrayEquals(["0", "1"], props([1,2]));
+assertArrayEquals(["0", "1", "2"], props([1,2,3]));
+
+var o = {};
+var a = [];
+let i = "outer_i";
+let s = "outer_s";
+for (let i = 0x0020; i < 0x01ff; i+=2) {
+  let s = 'char:' + String.fromCharCode(i);
+  a.push(s);
+  o[s] = i;
+}
+assertArrayEquals(a, props(o));
+assertEquals(i, "outer_i");
+assertEquals(s, "outer_s");
+
+var a = [];
+assertEquals(0, props(a).length);
+a[Math.pow(2,30)-1] = 0;
+assertEquals(1, props(a).length);
+a[Math.pow(2,31)-1] = 0;
+assertEquals(2, props(a).length);
+a[1] = 0;
+assertEquals(3, props(a).length);
+
+var result = '';
+for (let p in {a : [0], b : 1}) { result += p; }
+assertEquals('ab', result);
+
+var result = '';
+for (let p in {a : {v:1}, b : 1}) { result += p; }
+assertEquals('ab', result);
+
+var result = '';
+for (let p in { get a() {}, b : 1}) { result += p; }
+assertEquals('ab', result);
+
+var result = '';
+for (let p in { get a() {}, set a(x) {}, b : 1}) { result += p; }
+assertEquals('ab', result);
+
+
+// Check that there is exactly one variable without initializer
+// in a for-in statement with let variables.
+assertThrows("function foo() { for (let in {}) { } }", SyntaxError);
+assertThrows("function foo() { for (let x = 3 in {}) { } }", SyntaxError);
+assertThrows("function foo() { for (let x, y in {}) { } }", SyntaxError);
+assertThrows("function foo() { for (let x = 3, y in {}) { } }", SyntaxError);
+assertThrows("function foo() { for (let x, y = 4 in {}) { } }", SyntaxError);
+assertThrows("function foo() { for (let x = 3, y = 4 in {}) { } }", SyntaxError);
+
+
+// In a normal for statement the iteration variable is not
+// freshly allocated for each iteration.
+function closures1() {
+  let a = [];
+  for (let i = 0; i < 5; ++i) {
+    a.push(function () { return i; });
+  }
+  for (let j = 0; j < 5; ++j) {
+    assertEquals(5, a[j]());
+  }
+}
+closures1();
+
+
+function closures2() {
+  let a = [], b = [];
+  for (let i = 0, j = 10; i < 5; ++i, ++j) {
+    a.push(function () { return i; });
+    b.push(function () { return j; });
+  }
+  for (let k = 0; k < 5; ++k) {
+    assertEquals(5, a[k]());
+    assertEquals(15, b[k]());
+  }
+}
+closures2();
+
+
+// In a for-in statement the iteration variable is fresh
+// for earch iteration.
+function closures3(x) {
+  let a = [];
+  for (let p in x) {
+    a.push(function () { return p; });
+  }
+  let k = 0;
+  for (let q in x) {
+    assertEquals(q, a[k]());
+    ++k;
+  }
+}
+closures3({a : [0], b : 1, c : {v : 1}, get d() {}, set e(x) {}});
diff --git a/test/mjsunit/harmony/block-let-declaration.js b/test/mjsunit/harmony/block-let-declaration.js
index 7f3264f..a1acc28 100644
--- a/test/mjsunit/harmony/block-let-declaration.js
+++ b/test/mjsunit/harmony/block-let-declaration.js
@@ -32,15 +32,18 @@
 // Global
 let x;
 let y = 2;
+const z = 4;
 
 // Block local
 {
   let y;
   let x = 3;
+  const z = 5;
 }
 
 assertEquals(undefined, x);
 assertEquals(2,y);
+assertEquals(4,z);
 
 if (true) {
   let y;
@@ -58,7 +61,7 @@
   assertDoesNotThrow("(function(){" + str + "})()");
 }
 
-// Test let declarations statement positions.
+// Test let declarations in statement positions.
 TestLocalThrows("if (true) let x;", SyntaxError);
 TestLocalThrows("if (true) {} else let x;", SyntaxError);
 TestLocalThrows("do let x; while (false)", SyntaxError);
@@ -68,7 +71,32 @@
 TestLocalThrows("switch (true) { case true: let x; }", SyntaxError);
 TestLocalThrows("switch (true) { default: let x; }", SyntaxError);
 
-// Test var declarations statement positions.
+// Test const declarations with initialisers in statement positions.
+TestLocalThrows("if (true) const x = 1;", SyntaxError);
+TestLocalThrows("if (true) {} else const x = 1;", SyntaxError);
+TestLocalThrows("do const x = 1; while (false)", SyntaxError);
+TestLocalThrows("while (false) const x = 1;", SyntaxError);
+TestLocalThrows("label: const x = 1;", SyntaxError);
+TestLocalThrows("for (;false;) const x = 1;", SyntaxError);
+TestLocalThrows("switch (true) { case true: const x = 1; }", SyntaxError);
+TestLocalThrows("switch (true) { default: const x = 1; }", SyntaxError);
+
+// Test const declarations without initialisers.
+TestLocalThrows("const x;", SyntaxError);
+TestLocalThrows("const x = 1, y;", SyntaxError);
+TestLocalThrows("const x, y = 1;", SyntaxError);
+
+// Test const declarations without initialisers in statement positions.
+TestLocalThrows("if (true) const x;", SyntaxError);
+TestLocalThrows("if (true) {} else const x;", SyntaxError);
+TestLocalThrows("do const x; while (false)", SyntaxError);
+TestLocalThrows("while (false) const x;", SyntaxError);
+TestLocalThrows("label: const x;", SyntaxError);
+TestLocalThrows("for (;false;) const x;", SyntaxError);
+TestLocalThrows("switch (true) { case true: const x; }", SyntaxError);
+TestLocalThrows("switch (true) { default: const x; }", SyntaxError);
+
+// Test var declarations in statement positions.
 TestLocalDoesNotThrow("if (true) var x;");
 TestLocalDoesNotThrow("if (true) {} else var x;");
 TestLocalDoesNotThrow("do var x; while (false)");
@@ -93,24 +121,15 @@
   {
     function g1() { }
   }
-  // Non-strict statement positions.
-  if (true) function g2() { }
-  if (true) {} else function g3() { }
-  do function g4() { } while (false)
-  while (false) function g5() { }
-  label: function g6() { }
-  for (;false;) function g7() { }
-  switch (true) { case true: function g8() { } }
-  switch (true) { default: function g9() { } }
 }
 f();
 
 // Test function declarations in statement position in strict mode.
-TestLocalThrows("function f() { 'use strict'; if (true) function g() {}", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; if (true) {} else function g() {}", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; do function g() {} while (false)", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; while (false) function g() {}", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; label: function g() {}", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; for (;false;) function g() {}", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; switch (true) { case true: function g() {} }", SyntaxError);
-TestLocalThrows("function f() { 'use strict'; switch (true) { default: function g() {} }", SyntaxError);
+TestLocalThrows("function f() { if (true) function g() {}", SyntaxError);
+TestLocalThrows("function f() { if (true) {} else function g() {}", SyntaxError);
+TestLocalThrows("function f() { do function g() {} while (false)", SyntaxError);
+TestLocalThrows("function f() { while (false) function g() {}", SyntaxError);
+TestLocalThrows("function f() { label: function g() {}", SyntaxError);
+TestLocalThrows("function f() { for (;false;) function g() {}", SyntaxError);
+TestLocalThrows("function f() { switch (true) { case true: function g() {} }", SyntaxError);
+TestLocalThrows("function f() { switch (true) { default: function g() {} }", SyntaxError);
diff --git a/test/mjsunit/harmony/block-let-semantics.js b/test/mjsunit/harmony/block-let-semantics.js
index 94020a4..f45b72f 100644
--- a/test/mjsunit/harmony/block-let-semantics.js
+++ b/test/mjsunit/harmony/block-let-semantics.js
@@ -61,6 +61,7 @@
 TestAll('let x = x += 1');
 TestAll('let x = x++');
 TestAll('let x = ++x');
+TestAll('const x = x + 1');
 
 // Use before initialization in prior statement.
 TestAll('x + 1; let x;');
@@ -68,18 +69,21 @@
 TestAll('x += 1; let x;');
 TestAll('++x; let x;');
 TestAll('x++; let x;');
+TestAll('let y = x; const x = 1;');
 
 TestAll('f(); let x; function f() { return x + 1; }');
 TestAll('f(); let x; function f() { x = 1; }');
 TestAll('f(); let x; function f() { x += 1; }');
 TestAll('f(); let x; function f() { ++x; }');
 TestAll('f(); let x; function f() { x++; }');
+TestAll('f(); const x = 1; function f() { return x; }');
 
 TestAll('f()(); let x; function f() { return function() { return x + 1; } }');
 TestAll('f()(); let x; function f() { return function() { x = 1; } }');
 TestAll('f()(); let x; function f() { return function() { x += 1; } }');
 TestAll('f()(); let x; function f() { return function() { ++x; } }');
 TestAll('f()(); let x; function f() { return function() { x++; } }');
+TestAll('f()(); const x = 1; function f() { return function() { return x; } }');
 
 // Use before initialization with a dynamic lookup.
 TestAll('eval("x + 1;"); let x;');
@@ -87,6 +91,7 @@
 TestAll('eval("x += 1;"); let x;');
 TestAll('eval("++x;"); let x;');
 TestAll('eval("x++;"); let x;');
+TestAll('eval("x"); const x = 1;');
 
 // Use before initialization with check for eval-shadowed bindings.
 TestAll('function f() { eval("var y = 2;"); x + 1; }; f(); let x;');
@@ -139,10 +144,31 @@
     function h() {
       return b + c;
     }
-    let b = 3;
+    let c = 3;
   }
   assertEquals(5, n());
+
+  {
+    o = i;
+    function i() {
+      return d;
+    }
+    let d = 4;
+  }
+  assertEquals(4, o());
+
+  try {
+    throw 5;
+  } catch(e) {
+    p = j;
+    function j() {
+      return e + f;
+    }
+    let f = 6;
+  }
+  assertEquals(11, p());
 }
+f2();
 
 // Test that resolution of let bound variables works with scopes that call eval.
 function outer() {
diff --git a/test/mjsunit/harmony/block-scoping.js b/test/mjsunit/harmony/block-scoping.js
index c70b3b6..0d0526a 100644
--- a/test/mjsunit/harmony/block-scoping.js
+++ b/test/mjsunit/harmony/block-scoping.js
@@ -44,12 +44,16 @@
 function f2(one) {
   var x = one + 1;
   let y = one + 2;
+  const u = one + 4;
   {
     let z = one + 3;
+    const v = one + 5;
     assertEquals(1, eval('one'));
     assertEquals(2, eval('x'));
     assertEquals(3, eval('y'));
     assertEquals(4, eval('z'));
+    assertEquals(5, eval('u'));
+    assertEquals(6, eval('v'));
   }
 }
 f2(1);
@@ -59,12 +63,17 @@
 function f3(one) {
   var x = one + 1;
   let y = one + 2;
+  const u = one + 4;
   {
     let z = one + 3;
+    const v = one + 5;
     assertEquals(1, one);
     assertEquals(2, x);
     assertEquals(3, y);
     assertEquals(4, z);
+    assertEquals(5, u);
+    assertEquals(6, v);
+
   }
 }
 f3(1);
@@ -74,13 +83,17 @@
 function f4(one) {
   var x = one + 1;
   let y = one + 2;
+  const u = one + 4;
   {
     let z = one + 3;
+    const v = one + 5;
     function f() {
       assertEquals(1, eval('one'));
       assertEquals(2, eval('x'));
       assertEquals(3, eval('y'));
       assertEquals(4, eval('z'));
+      assertEquals(5, eval('u'));
+      assertEquals(6, eval('v'));
     };
   }
 }
@@ -91,13 +104,17 @@
 function f5(one) {
   var x = one + 1;
   let y = one + 2;
+  const u = one + 4;
   {
     let z = one + 3;
+    const v = one + 5;
     function f() {
       assertEquals(1, one);
       assertEquals(2, x);
       assertEquals(3, y);
       assertEquals(4, z);
+      assertEquals(5, u);
+      assertEquals(6, v);
     };
   }
 }
@@ -107,8 +124,10 @@
 // Return from block.
 function f6() {
   let x = 1;
+  const u = 3;
   {
     let y = 2;
+    const v = 4;
     return x + y;
   }
 }
@@ -120,13 +139,26 @@
   let b = 1;
   var c = 1;
   var d = 1;
-  { // let variables shadowing argument, let and var variables
+  const e = 1;
+  { // let variables shadowing argument, let, const and var variables
     let a = 2;
     let b = 2;
     let c = 2;
+    let e = 2;
     assertEquals(2,a);
     assertEquals(2,b);
     assertEquals(2,c);
+    assertEquals(2,e);
+  }
+  { // const variables shadowing argument, let, const and var variables
+    const a = 2;
+    const b = 2;
+    const c = 2;
+    const e = 2;
+    assertEquals(2,a);
+    assertEquals(2,b);
+    assertEquals(2,c);
+    assertEquals(2,e);
   }
   try {
     throw 'stuff1';
@@ -156,6 +188,12 @@
   } catch (c) {
     // catch variable shadowing var variable
     assertEquals('stuff3',c);
+    {
+      // const variable shadowing catch variable
+      const c = 3;
+      assertEquals(3,c);
+    }
+    assertEquals('stuff3',c);
     try {
       throw 'stuff4';
     } catch(c) {
@@ -178,14 +216,16 @@
     c = 2;
   }
   assertEquals(1,c);
-  (function(a,b,c) {
-    // arguments shadowing argument, let and var variable
+  (function(a,b,c,e) {
+    // arguments shadowing argument, let, const and var variable
     a = 2;
     b = 2;
     c = 2;
+    e = 2;
     assertEquals(2,a);
     assertEquals(2,b);
     assertEquals(2,c);
+    assertEquals(2,e);
     // var variable shadowing var variable
     var d = 2;
   })(1,1);
@@ -193,24 +233,30 @@
   assertEquals(1,b);
   assertEquals(1,c);
   assertEquals(1,d);
+  assertEquals(1,e);
 }
 f7(1);
 
 
-// Ensure let variables are block local and var variables function local.
+// Ensure let and const variables are block local
+// and var variables function local.
 function f8() {
   var let_accessors = [];
   var var_accessors = [];
+  var const_accessors = [];
   for (var i = 0; i < 10; i++) {
     let x = i;
     var y = i;
+    const z = i;
     let_accessors[i] = function() { return x; }
     var_accessors[i] = function() { return y; }
+    const_accessors[i] = function() { return z; }
   }
   for (var j = 0; j < 10; j++) {
     y = j + 10;
     assertEquals(j, let_accessors[j]());
     assertEquals(y, var_accessors[j]());
+    assertEquals(j, const_accessors[j]());
   }
 }
 f8();
diff --git a/test/mjsunit/harmony/collections.js b/test/mjsunit/harmony/collections.js
new file mode 100644
index 0000000..1ad1c6f
--- /dev/null
+++ b/test/mjsunit/harmony/collections.js
@@ -0,0 +1,273 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-collections --expose-gc
+
+
+// Test valid getter and setter calls on Sets.
+function TestValidSetCalls(m) {
+  assertDoesNotThrow(function () { m.add(new Object) });
+  assertDoesNotThrow(function () { m.has(new Object) });
+  assertDoesNotThrow(function () { m.delete(new Object) });
+}
+TestValidSetCalls(new Set);
+
+
+// Test valid getter and setter calls on Maps and WeakMaps
+function TestValidMapCalls(m) {
+  assertDoesNotThrow(function () { m.get(new Object) });
+  assertDoesNotThrow(function () { m.set(new Object) });
+  assertDoesNotThrow(function () { m.has(new Object) });
+  assertDoesNotThrow(function () { m.delete(new Object) });
+}
+TestValidMapCalls(new Map);
+TestValidMapCalls(new WeakMap);
+
+
+// Test invalid getter and setter calls for WeakMap only
+function TestInvalidCalls(m) {
+  assertThrows(function () { m.get(undefined) }, TypeError);
+  assertThrows(function () { m.set(undefined, 0) }, TypeError);
+  assertThrows(function () { m.get(0) }, TypeError);
+  assertThrows(function () { m.set(0, 0) }, TypeError);
+  assertThrows(function () { m.get('a-key') }, TypeError);
+  assertThrows(function () { m.set('a-key', 0) }, TypeError);
+}
+TestInvalidCalls(new WeakMap);
+
+
+// Test expected behavior for Sets
+function TestSet(set, key) {
+  assertFalse(set.has(key));
+  set.add(key);
+  assertTrue(set.has(key));
+  set.delete(key);
+  assertFalse(set.has(key));
+}
+function TestSetBehavior(set) {
+  for (i = 0; i < 20; i++) {
+    TestSet(set, new Object);
+  }
+}
+TestSet(new Set, 23);
+TestSet(new Set, 'foo');
+TestSetBehavior(new Set);
+
+
+// Test expected mapping behavior for Maps and WeakMaps
+function TestMapping(map, key, value) {
+  map.set(key, value);
+  assertSame(value, map.get(key));
+}
+function TestMapBehavior1(m) {
+  TestMapping(m, new Object, 23);
+  TestMapping(m, new Object, 'the-value');
+  TestMapping(m, new Object, new Object);
+}
+TestMapBehavior1(new Map);
+TestMapBehavior1(new WeakMap);
+
+
+// Test expected mapping behavior for Maps only
+function TestMapBehavior2(m) {
+  for (var i = 0; i < 20; i++) {
+    TestMapping(m, i, new Object);
+    TestMapping(m, i / 10, new Object);
+    TestMapping(m, 'key-' + i, new Object);
+  }
+  var keys = [ +0, -0, +Infinity, -Infinity, true, false ];
+  for (var i = 0; i < keys.length; i++) {
+    TestMapping(m, keys[i], new Object);
+  }
+}
+TestMapBehavior2(new Map);
+
+
+// Test expected querying behavior of Maps and WeakMaps
+function TestQuery(m) {
+  var key = new Object;
+  TestMapping(m, key, 'to-be-present');
+  assertTrue(m.has(key));
+  assertFalse(m.has(new Object));
+  TestMapping(m, key, undefined);
+  assertFalse(m.has(key));
+  assertFalse(m.has(new Object));
+}
+TestQuery(new Map);
+TestQuery(new WeakMap);
+
+
+// Test expected deletion behavior of Maps and WeakMaps
+function TestDelete(m) {
+  var key = new Object;
+  TestMapping(m, key, 'to-be-deleted');
+  assertTrue(m.delete(key));
+  assertFalse(m.delete(key));
+  assertFalse(m.delete(new Object));
+  assertSame(m.get(key), undefined);
+}
+TestDelete(new Map);
+TestDelete(new WeakMap);
+
+
+// Test GC of Maps and WeakMaps with entry
+function TestGC1(m) {
+  var key = new Object;
+  m.set(key, 'not-collected');
+  gc();
+  assertSame('not-collected', m.get(key));
+}
+TestGC1(new Map);
+TestGC1(new WeakMap);
+
+
+// Test GC of Maps and WeakMaps with chained entries
+function TestGC2(m) {
+  var head = new Object;
+  for (key = head, i = 0; i < 10; i++, key = m.get(key)) {
+    m.set(key, new Object);
+  }
+  gc();
+  var count = 0;
+  for (key = head; key != undefined; key = m.get(key)) {
+    count++;
+  }
+  assertEquals(11, count);
+}
+TestGC2(new Map);
+TestGC2(new WeakMap);
+
+
+// Test property attribute [[Enumerable]]
+function TestEnumerable(func) {
+  function props(x) {
+    var array = [];
+    for (var p in x) array.push(p);
+    return array.sort();
+  }
+  assertArrayEquals([], props(func));
+  assertArrayEquals([], props(func.prototype));
+  assertArrayEquals([], props(new func()));
+}
+TestEnumerable(Set);
+TestEnumerable(Map);
+TestEnumerable(WeakMap);
+
+
+// Test arbitrary properties on Maps and WeakMaps
+function TestArbitrary(m) {
+  function TestProperty(map, property, value) {
+    map[property] = value;
+    assertEquals(value, map[property]);
+  }
+  for (i = 0; i < 20; i++) {
+    TestProperty(m, i, 'val' + i);
+    TestProperty(m, 'foo' + i, 'bar' + i);
+  }
+  TestMapping(m, new Object, 'foobar');
+}
+TestArbitrary(new Map);
+TestArbitrary(new WeakMap);
+
+
+// Test direct constructor call
+assertTrue(Set() instanceof Set);
+assertTrue(Map() instanceof Map);
+assertTrue(WeakMap() instanceof WeakMap);
+
+
+// Test whether NaN values as keys are treated correctly.
+var s = new Set;
+assertFalse(s.has(NaN));
+assertFalse(s.has(NaN + 1));
+assertFalse(s.has(23));
+s.add(NaN);
+assertTrue(s.has(NaN));
+assertTrue(s.has(NaN + 1));
+assertFalse(s.has(23));
+var m = new Map;
+assertFalse(m.has(NaN));
+assertFalse(m.has(NaN + 1));
+assertFalse(m.has(23));
+m.set(NaN, 'a-value');
+assertTrue(m.has(NaN));
+assertTrue(m.has(NaN + 1));
+assertFalse(m.has(23));
+
+
+// Test some common JavaScript idioms for Sets
+var s = new Set;
+assertTrue(s instanceof Set);
+assertTrue(Set.prototype.add instanceof Function)
+assertTrue(Set.prototype.has instanceof Function)
+assertTrue(Set.prototype.delete instanceof Function)
+
+
+// Test some common JavaScript idioms for Maps
+var m = new Map;
+assertTrue(m instanceof Map);
+assertTrue(Map.prototype.set instanceof Function)
+assertTrue(Map.prototype.get instanceof Function)
+assertTrue(Map.prototype.has instanceof Function)
+assertTrue(Map.prototype.delete instanceof Function)
+
+
+// Test some common JavaScript idioms for WeakMaps
+var m = new WeakMap;
+assertTrue(m instanceof WeakMap);
+assertTrue(WeakMap.prototype.set instanceof Function)
+assertTrue(WeakMap.prototype.get instanceof Function)
+assertTrue(WeakMap.prototype.has instanceof Function)
+assertTrue(WeakMap.prototype.delete instanceof Function)
+
+
+// Regression test for WeakMap prototype.
+assertTrue(WeakMap.prototype.constructor === WeakMap)
+assertTrue(Object.getPrototypeOf(WeakMap.prototype) === Object.prototype)
+
+
+// Regression test for issue 1617: The prototype of the WeakMap constructor
+// needs to be unique (i.e. different from the one of the Object constructor).
+assertFalse(WeakMap.prototype === Object.prototype);
+var o = Object.create({});
+assertFalse("get" in o);
+assertFalse("set" in o);
+assertEquals(undefined, o.get);
+assertEquals(undefined, o.set);
+var o = Object.create({}, { myValue: {
+  value: 10,
+  enumerable: false,
+  configurable: true,
+  writable: true
+}});
+assertEquals(10, o.myValue);
+
+
+// Stress Test
+// There is a proposed stress-test available at the es-discuss mailing list
+// which cannot be reasonably automated.  Check it out by hand if you like:
+// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
\ No newline at end of file
diff --git a/test/mjsunit/harmony/debug-blockscopes.js b/test/mjsunit/harmony/debug-blockscopes.js
index 020f527..4c49d9a 100644
--- a/test/mjsunit/harmony/debug-blockscopes.js
+++ b/test/mjsunit/harmony/debug-blockscopes.js
@@ -464,3 +464,112 @@
 };
 closure_1(1)();
 EndTest();
+
+
+// Simple for-in loop over the keys of an object.
+BeginTest("For loop 1");
+
+function for_loop_1() {
+  for (let x in {y:undefined}) {
+    debugger;
+  }
+}
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.Block,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({x:'y'}, 0, exec_state);
+  // The function scope contains a temporary iteration variable.
+  CheckScopeContent({x:'y'}, 1, exec_state);
+};
+for_loop_1();
+EndTest();
+
+
+// For-in loop over the keys of an object with a block scoped let variable
+// shadowing the iteration variable.
+BeginTest("For loop 2");
+
+function for_loop_2() {
+  for (let x in {y:undefined}) {
+    let x = 3;
+    debugger;
+  }
+}
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.Block,
+                   debug.ScopeType.Block,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({x:3}, 0, exec_state);
+  CheckScopeContent({x:'y'}, 1, exec_state);
+  // The function scope contains a temporary iteration variable.
+  CheckScopeContent({x:'y'}, 2, exec_state);
+};
+for_loop_2();
+EndTest();
+
+
+// Simple for loop.
+BeginTest("For loop 3");
+
+function for_loop_3() {
+  for (let x = 3; x < 4; ++x) {
+    debugger;
+  }
+}
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.Block,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({x:3}, 0, exec_state);
+  CheckScopeContent({}, 1, exec_state);
+};
+for_loop_3();
+EndTest();
+
+
+// For loop with a block scoped let variable shadowing the iteration variable.
+BeginTest("For loop 4");
+
+function for_loop_4() {
+  for (let x = 3; x < 4; ++x) {
+    let x = 5;
+    debugger;
+  }
+}
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.Block,
+                   debug.ScopeType.Block,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({x:5}, 0, exec_state);
+  CheckScopeContent({x:3}, 1, exec_state);
+  CheckScopeContent({}, 2, exec_state);
+};
+for_loop_4();
+EndTest();
+
+
+// For loop with two variable declarations.
+BeginTest("For loop 5");
+
+function for_loop_5() {
+  for (let x = 3, y = 5; x < 4; ++x) {
+    debugger;
+  }
+}
+
+listener_delegate = function(exec_state) {
+  CheckScopeChain([debug.ScopeType.Block,
+                   debug.ScopeType.Local,
+                   debug.ScopeType.Global], exec_state);
+  CheckScopeContent({x:3,y:5}, 0, exec_state);
+  CheckScopeContent({}, 1, exec_state);
+};
+for_loop_5();
+EndTest();
diff --git a/test/mjsunit/harmony/proxies-for.js b/test/mjsunit/harmony/proxies-for.js
new file mode 100644
index 0000000..3d419c6
--- /dev/null
+++ b/test/mjsunit/harmony/proxies-for.js
@@ -0,0 +1,168 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-proxies
+
+
+// Helper.
+
+function TestWithProxies(test, x, y, z) {
+  test(Proxy.create, x, y, z)
+  test(function(h) {return Proxy.createFunction(h, function() {})}, x, y, z)
+}
+
+
+// Iterate over a proxy.
+
+function TestForIn(properties, handler) {
+  TestWithProxies(TestForIn2, properties, handler)
+}
+
+function TestForIn2(create, properties, handler) {
+  var p = create(handler)
+  var found = []
+  for (var x in p) found.push(x)
+  assertArrayEquals(properties, found)
+}
+
+TestForIn(["0", "a"], {
+  enumerate: function() { return [0, "a"] }
+})
+
+TestForIn(["null", "a"], {
+  enumerate: function() { return this.enumerate2() },
+  enumerate2: function() { return [null, "a"] }
+})
+
+TestForIn(["b", "d"], {
+  getPropertyNames: function() { return ["a", "b", "c", "d", "e"] },
+  getPropertyDescriptor: function(k) {
+    switch (k) {
+      case "a": return {enumerable: false, value: "3"};
+      case "b": return {enumerable: true, get get() {}};
+      case "c": return {value: 4};
+      case "d": return {get enumerable() { return true }};
+      default: return undefined;
+    }
+  }
+})
+
+TestForIn(["b", "a", "0", "c"], Proxy.create({
+  get: function(pr, pk) {
+    return function() { return ["b", "a", 0, "c"] }
+  }
+}))
+
+
+
+// Iterate over an object with a proxy prototype.
+
+function TestForInDerived(properties, handler) {
+  TestWithProxies(TestForInDerived2, properties, handler)
+}
+
+function TestForInDerived2(create, properties, handler) {
+  var p = create(handler)
+  var o = Object.create(p)
+  o.z = 0
+  var found = []
+  for (var x in o) found.push(x)
+  assertArrayEquals(["z"].concat(properties), found)
+
+  var oo = Object.create(o)
+  oo.y = 0
+  var found = []
+  for (var x in oo) found.push(x)
+  assertArrayEquals(["y", "z"].concat(properties), found)
+}
+
+TestForInDerived(["0", "a"], {
+  enumerate: function() { return [0, "a"] },
+  getPropertyDescriptor: function(k) {
+    return k == "0" || k == "a" ? {} : undefined
+  }
+})
+
+TestForInDerived(["null", "a"], {
+  enumerate: function() { return this.enumerate2() },
+  enumerate2: function() { return [null, "a"] },
+  getPropertyDescriptor: function(k) {
+    return k == "null" || k == "a" ? {} : undefined
+  }
+})
+
+TestForInDerived(["b", "d"], {
+  getPropertyNames: function() { return ["a", "b", "c", "d", "e"] },
+  getPropertyDescriptor: function(k) {
+    switch (k) {
+      case "a": return {enumerable: false, value: "3"};
+      case "b": return {enumerable: true, get get() {}};
+      case "c": return {value: 4};
+      case "d": return {get enumerable() { return true }};
+      default: return undefined;
+    }
+  }
+})
+
+
+
+// Throw exception in enumerate trap.
+
+function TestForInThrow(handler) {
+  TestWithProxies(TestForInThrow2, handler)
+}
+
+function TestForInThrow2(create, handler) {
+  var p = create(handler)
+  var o = Object.create(p)
+  assertThrows(function(){ for (var x in p) {} }, "myexn")
+  assertThrows(function(){ for (var x in o) {} }, "myexn")
+}
+
+TestForInThrow({
+  enumerate: function() { throw "myexn" }
+})
+
+TestForInThrow({
+  enumerate: function() { return this.enumerate2() },
+  enumerate2: function() { throw "myexn" }
+})
+
+TestForInThrow({
+  getPropertyNames: function() { throw "myexn" }
+})
+
+TestForInThrow({
+  getPropertyNames: function() { return ["a"] },
+  getPropertyDescriptor: function() { throw "myexn" }
+})
+
+TestForInThrow(Proxy.create({
+  get: function(pr, pk) {
+    return function() { throw "myexn" }
+  }
+}))
diff --git a/test/mjsunit/harmony/proxies-function.js b/test/mjsunit/harmony/proxies-function.js
index 541bca8..6a88d19 100644
--- a/test/mjsunit/harmony/proxies-function.js
+++ b/test/mjsunit/harmony/proxies-function.js
@@ -38,6 +38,13 @@
 }
 
 
+// Ensures that checking the "length" property of a function proxy doesn't
+// crash due to lack of a [[Get]] method.
+var handler = {
+  get : function(r, n) { return n == "length" ? 2 : undefined }
+}
+
+
 // Calling (call, Function.prototype.call, Function.prototype.apply,
 //          Function.prototype.bind).
 
@@ -46,81 +53,167 @@
 
 function TestCall(isStrict, callTrap) {
   assertEquals(42, callTrap(5, 37))
-  // TODO(rossberg): unrelated bug: this does not succeed for optimized code:
-  // assertEquals(isStrict ? undefined : global_object, receiver)
+  assertEquals(isStrict ? undefined : global_object, receiver)
 
-  var f = Proxy.createFunction({}, callTrap)
+  var handler = {
+    get: function(r, k) {
+      return k == "length" ? 2 : Function.prototype[k]
+    }
+  }
+  var f = Proxy.createFunction(handler, callTrap)
+
   receiver = 333
   assertEquals(42, f(11, 31))
   assertEquals(isStrict ? undefined : global_object, receiver)
-  var o = {}
+  var o = {f: f}
+  receiver = 333
+  assertEquals(42, o.f(10, 32))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, o["f"](9, 33))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, (1, o).f(8, 34))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, (1, o)["f"](7, 35))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, f.call(o, 32, 10))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, f.call(null, 33, 9))
+  assertSame(isStrict ? null : global_object, receiver)
+  receiver = 333
+  assertEquals(44, f.call(2, 21, 23))
+  assertSame(2, receiver.valueOf())
+  receiver = 333
   assertEquals(42, Function.prototype.call.call(f, o, 20, 22))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
+  receiver = 333
   assertEquals(43, Function.prototype.call.call(f, null, 20, 23))
-  assertEquals(isStrict ? null : global_object, receiver)
+  assertSame(isStrict ? null : global_object, receiver)
   assertEquals(44, Function.prototype.call.call(f, 2, 21, 23))
   assertEquals(2, receiver.valueOf())
   receiver = 333
+  assertEquals(32, f.apply(o, [16, 16]))
+  assertSame(o, receiver)
+  receiver = 333
   assertEquals(32, Function.prototype.apply.call(f, o, [17, 15]))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
+
   var ff = Function.prototype.bind.call(f, o, 12)
+  assertTrue(ff.length <= 1)  // TODO(rossberg): Not spec'ed yet, be lax.
   receiver = 333
   assertEquals(42, ff(30))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
   receiver = 333
   assertEquals(32, Function.prototype.apply.call(ff, {}, [20]))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
+
+  var fff = Function.prototype.bind.call(ff, o, 30)
+  assertEquals(0, fff.length)
+  receiver = 333
+  assertEquals(42, fff())
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, Function.prototype.call.call(fff, {}))
+  assertSame(o, receiver)
 
   var f = CreateFrozen({}, callTrap)
   receiver = 333
   assertEquals(42, f(11, 31))
-  // TODO(rossberg): unrelated bug: this does not succeed for optimized code.
-  // assertEquals(isStrict ? undefined : global, receiver)
+  assertSame(isStrict ? undefined : global_object, receiver)
+  var o = {f: f}
+  receiver = 333
+  assertEquals(42, o.f(10, 32))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, o["f"](9, 33))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, (1, o).f(8, 34))
+  assertSame(o, receiver)
+  receiver = 333
+  assertEquals(42, (1, o)["f"](7, 35))
+  assertSame(o, receiver)
   receiver = 333
   assertEquals(42, Function.prototype.call.call(f, o, 20, 22))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
   receiver = 333
   assertEquals(32, Function.prototype.apply.call(f, o, [17, 15]))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
   receiver = 333
   assertEquals(42, ff(30))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
   receiver = 333
   assertEquals(32, Function.prototype.apply.call(ff, {}, [20]))
-  assertEquals(o, receiver)
+  assertSame(o, receiver)
 }
 
 TestCall(false, function(x, y) {
-  receiver = this; return x + y
+  receiver = this
+  return x + y
 })
 
 TestCall(true, function(x, y) {
-  "use strict";
-  receiver = this; return x + y
+  "use strict"
+  receiver = this
+  return x + y
 })
 
-TestCall(false, Proxy.createFunction({}, function(x, y) {
-  receiver = this; return x + y
+TestCall(false, function() {
+  receiver = this; return arguments[0] + arguments[1]
+})
+
+TestCall(false, Proxy.createFunction(handler, function(x, y) {
+  receiver = this
+  return x + y
 }))
 
-TestCall(true, Proxy.createFunction({}, function(x, y) {
-  "use strict";
-  receiver = this; return x + y
+TestCall(true, Proxy.createFunction(handler, function(x, y) {
+  "use strict"
+  receiver = this
+  return x + y
 }))
 
-TestCall(false, CreateFrozen({}, function(x, y) {
-  receiver = this; return x + y
+TestCall(false, CreateFrozen(handler, function(x, y) {
+  receiver = this
+  return x + y
 }))
 
 
+
+// Using intrinsics as call traps.
+
+function TestCallIntrinsic(type, callTrap) {
+  var f = Proxy.createFunction({}, callTrap)
+  var x = f()
+  assertTrue(typeof x == type)
+}
+
+TestCallIntrinsic("boolean", Boolean)
+TestCallIntrinsic("number", Number)
+TestCallIntrinsic("string", String)
+TestCallIntrinsic("object", Object)
+TestCallIntrinsic("function", Function)
+
+
+
+// Throwing from call trap.
+
 function TestCallThrow(callTrap) {
   var f = Proxy.createFunction({}, callTrap)
   assertThrows(function(){ f(11) }, "myexn")
+  assertThrows(function(){ ({x: f}).x(11) }, "myexn")
+  assertThrows(function(){ ({x: f})["x"](11) }, "myexn")
   assertThrows(function(){ Function.prototype.call.call(f, {}, 2) }, "myexn")
   assertThrows(function(){ Function.prototype.apply.call(f, {}, [1]) }, "myexn")
 
   var f = CreateFrozen({}, callTrap)
   assertThrows(function(){ f(11) }, "myexn")
+  assertThrows(function(){ ({x: f}).x(11) }, "myexn")
+  assertThrows(function(){ ({x: f})["x"](11) }, "myexn")
   assertThrows(function(){ Function.prototype.call.call(f, {}, 2) }, "myexn")
   assertThrows(function(){ Function.prototype.apply.call(f, {}, [1]) }, "myexn")
 }
@@ -137,24 +230,48 @@
 var receiver
 
 var handlerWithPrototype = {
-  fix: function() { return {prototype: prototype} },
-  get: function(r, n) { assertEquals("prototype", n); return prototype }
+  fix: function() { return { prototype: { value: prototype } }; },
+  get: function(r, n) {
+    if (n == "length") return 2;
+    assertEquals("prototype", n);
+    return prototype;
+  }
 }
 
 var handlerSansPrototype = {
-  fix: function() { return {} },
-  get: function(r, n) { assertEquals("prototype", n); return undefined }
+  fix: function() { return { length: { value: 2 } } },
+  get: function(r, n) {
+    if (n == "length") return 2;
+    assertEquals("prototype", n);
+    return undefined;
+  }
 }
 
-function ReturnUndef(x, y) { "use strict"; receiver = this; this.sum = x + y }
-function ReturnThis(x, y) { "use strict"; receiver = this; this.sum = x + y; return this }
-function ReturnNew(x, y) { "use strict"; receiver = this; return {sum: x + y} }
+function ReturnUndef(x, y) {
+  "use strict";
+  receiver = this;
+  this.sum = x + y;
+}
+
+function ReturnThis(x, y) {
+  "use strict";
+  receiver = this;
+  this.sum = x + y;
+  return this;
+}
+
+function ReturnNew(x, y) {
+  "use strict";
+  receiver = this;
+  return {sum: x + y};
+}
+
 function ReturnNewWithProto(x, y) {
   "use strict";
   receiver = this;
-  var result = Object.create(prototype)
-  result.sum = x + y
-  return result
+  var result = Object.create(prototype);
+  result.sum = x + y;
+  return result;
 }
 
 function TestConstruct(proto, constructTrap) {
@@ -165,15 +282,13 @@
 function TestConstruct2(proto, constructTrap, handler) {
   var f = Proxy.createFunction(handler, function() {}, constructTrap)
   var o = new f(11, 31)
-  // TODO(rossberg): doesn't hold, due to unrelated bug.
-  // assertEquals(undefined, receiver)
+  assertEquals(undefined, receiver)
   assertEquals(42, o.sum)
   assertSame(proto, Object.getPrototypeOf(o))
 
   var f = CreateFrozen(handler, function() {}, constructTrap)
   var o = new f(11, 32)
-  // TODO(rossberg): doesn't hold, due to unrelated bug.
-  // assertEquals(undefined, receiver)
+  assertEquals(undefined, receiver)
   assertEquals(43, o.sum)
   assertSame(proto, Object.getPrototypeOf(o))
 }
@@ -181,13 +296,16 @@
 TestConstruct(Object.prototype, ReturnNew)
 TestConstruct(prototype, ReturnNewWithProto)
 
-TestConstruct(Object.prototype, Proxy.createFunction({}, ReturnNew))
-TestConstruct(prototype, Proxy.createFunction({}, ReturnNewWithProto))
+TestConstruct(Object.prototype, Proxy.createFunction(handler, ReturnNew))
+TestConstruct(prototype, Proxy.createFunction(handler, ReturnNewWithProto))
 
-TestConstruct(Object.prototype, CreateFrozen({}, ReturnNew))
-TestConstruct(prototype, CreateFrozen({}, ReturnNewWithProto))
+TestConstruct(Object.prototype, CreateFrozen(handler, ReturnNew))
+TestConstruct(prototype, CreateFrozen(handler, ReturnNewWithProto))
 
 
+
+// Construction with derived construct trap.
+
 function TestConstructFromCall(proto, returnsThis, callTrap) {
   TestConstructFromCall2(proto, returnsThis, callTrap, handlerWithPrototype)
   TestConstructFromCall2(proto, returnsThis, callTrap, handlerSansPrototype)
@@ -212,10 +330,14 @@
 TestConstructFromCall(Object.prototype, false, ReturnNew)
 TestConstructFromCall(prototype, false, ReturnNewWithProto)
 
-TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnUndef))
-TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnThis))
-TestConstructFromCall(Object.prototype, false, Proxy.createFunction({}, ReturnNew))
-TestConstructFromCall(prototype, false, Proxy.createFunction({}, ReturnNewWithProto))
+TestConstructFromCall(Object.prototype, true,
+                      Proxy.createFunction(handler, ReturnUndef))
+TestConstructFromCall(Object.prototype, true,
+                      Proxy.createFunction(handler, ReturnThis))
+TestConstructFromCall(Object.prototype, false,
+                      Proxy.createFunction(handler, ReturnNew))
+TestConstructFromCall(prototype, false,
+                      Proxy.createFunction(handler, ReturnNewWithProto))
 
 TestConstructFromCall(Object.prototype, true, CreateFrozen({}, ReturnUndef))
 TestConstructFromCall(Object.prototype, true, CreateFrozen({}, ReturnThis))
@@ -232,26 +354,44 @@
 TestConstructFromCall(Object.prototype, false, ReturnNew)
 TestConstructFromCall(prototype, false, ReturnNewWithProto)
 
-TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnUndef))
-TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnThis))
-TestConstructFromCall(Object.prototype, false, Proxy.createFunction({}, ReturnNew))
-TestConstructFromCall(prototype, false, Proxy.createFunction({}, ReturnNewWithProto))
+TestConstructFromCall(Object.prototype, true,
+                      Proxy.createFunction(handler, ReturnUndef))
+TestConstructFromCall(Object.prototype, true,
+                      Proxy.createFunction(handler, ReturnThis))
+TestConstructFromCall(Object.prototype, false,
+                      Proxy.createFunction(handler, ReturnNew))
+TestConstructFromCall(prototype, false,
+                      Proxy.createFunction(handler, ReturnNewWithProto))
 
-TestConstructFromCall(prototype, true, Proxy.createFunction(handlerWithPrototype, ReturnUndef))
-TestConstructFromCall(prototype, true, Proxy.createFunction(handlerWithPrototype, ReturnThis))
-TestConstructFromCall(Object.prototype, false, Proxy.createFunction(handlerWithPrototype, ReturnNew))
-TestConstructFromCall(prototype, false, Proxy.createFunction(handlerWithPrototype, ReturnNewWithProto))
+TestConstructFromCall(prototype, true,
+                      Proxy.createFunction(handlerWithPrototype, ReturnUndef))
+TestConstructFromCall(prototype, true,
+                      Proxy.createFunction(handlerWithPrototype, ReturnThis))
+TestConstructFromCall(Object.prototype, false,
+                      Proxy.createFunction(handlerWithPrototype, ReturnNew))
+TestConstructFromCall(prototype, false,
+                      Proxy.createFunction(handlerWithPrototype,
+                                           ReturnNewWithProto))
 
-TestConstructFromCall(prototype, true, CreateFrozen(handlerWithPrototype, ReturnUndef))
-TestConstructFromCall(prototype, true, CreateFrozen(handlerWithPrototype, ReturnThis))
-TestConstructFromCall(Object.prototype, false, CreateFrozen(handlerWithPrototype, ReturnNew))
-TestConstructFromCall(prototype, false, CreateFrozen(handlerWithPrototype, ReturnNewWithProto))
+TestConstructFromCall(prototype, true,
+                      CreateFrozen(handlerWithPrototype, ReturnUndef))
+TestConstructFromCall(prototype, true,
+                      CreateFrozen(handlerWithPrototype, ReturnThis))
+TestConstructFromCall(Object.prototype, false,
+                      CreateFrozen(handlerWithPrototype, ReturnNew))
+TestConstructFromCall(prototype, false,
+                      CreateFrozen(handlerWithPrototype, ReturnNewWithProto))
 
 
+
+// Throwing from the construct trap.
+
 function TestConstructThrow(trap) {
-  TestConstructThrow2(Proxy.createFunction({fix: function() {return {}}}, trap))
-  TestConstructThrow2(Proxy.createFunction({fix: function() {return {}}},
-    function() {}, trap))
+  TestConstructThrow2(Proxy.createFunction({ fix: function() {return {};} },
+                                           trap))
+  TestConstructThrow2(Proxy.createFunction({ fix: function() {return {};} },
+                                           function() {},
+                                           trap))
 }
 
 function TestConstructThrow2(f) {
@@ -266,13 +406,13 @@
 
 
 
-// Getters and setters.
+// Using function proxies as getters and setters.
 
 var value
 var receiver
 
 function TestAccessorCall(getterCallTrap, setterCallTrap) {
-  var handler = {fix: function() { return {} }}
+  var handler = { fix: function() { return {} } }
   var pgetter = Proxy.createFunction(handler, getterCallTrap)
   var psetter = Proxy.createFunction(handler, setterCallTrap)
 
diff --git a/test/mjsunit/harmony/proxies-hash.js b/test/mjsunit/harmony/proxies-hash.js
index 2bf1830..abfc0f5 100644
--- a/test/mjsunit/harmony/proxies-hash.js
+++ b/test/mjsunit/harmony/proxies-hash.js
@@ -25,42 +25,98 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --harmony-proxies --harmony-weakmaps
+// Flags: --harmony-proxies --harmony-collections
 
 
 // Helper.
 
-function TestWithProxies(test, handler) {
-  test(handler, Proxy.create)
-  test(handler, function(h) {return Proxy.createFunction(h, function() {})})
+function TestWithProxies(test, construct, handler) {
+  test(construct, handler, Proxy.create)
+  test(construct, handler, function(h) {
+    return Proxy.createFunction(h, function() {})
+  })
 }
 
 
-// Weak maps.
+// Sets.
 
-function TestWeakMap(fix) {
-  TestWithProxies(TestWeakMap2, fix)
+function TestSet(construct, fix) {
+  TestWithProxies(TestSet2, construct, fix)
 }
 
-function TestWeakMap2(fix, create) {
+function TestSet2(construct, fix, create) {
   var handler = {fix: function() { return {} }}
   var p1 = create(handler)
   var p2 = create(handler)
   var p3 = create(handler)
   fix(p3)
 
-  var m = new WeakMap
+  var s = construct();
+  s.add(p1);
+  s.add(p2);
+  assertTrue(s.has(p1));
+  assertTrue(s.has(p2));
+  assertFalse(s.has(p3));
+
+  fix(p1)
+  fix(p2)
+  assertTrue(s.has(p1));
+  assertTrue(s.has(p2));
+  assertFalse(s.has(p3));
+
+  s.delete(p2);
+  assertTrue(s.has(p1));
+  assertFalse(s.has(p2));
+  assertFalse(s.has(p3));
+}
+
+TestSet(Set, Object.seal)
+TestSet(Set, Object.freeze)
+TestSet(Set, Object.preventExtensions)
+
+
+// Maps and weak maps.
+
+function TestMap(construct, fix) {
+  TestWithProxies(TestMap2, construct, fix)
+}
+
+function TestMap2(construct, fix, create) {
+  var handler = {fix: function() { return {} }}
+  var p1 = create(handler)
+  var p2 = create(handler)
+  var p3 = create(handler)
+  fix(p3)
+
+  var m = construct();
   m.set(p1, 123);
   m.set(p2, 321);
+  assertTrue(m.has(p1));
+  assertTrue(m.has(p2));
+  assertFalse(m.has(p3));
   assertSame(123, m.get(p1));
   assertSame(321, m.get(p2));
 
   fix(p1)
   fix(p2)
+  assertTrue(m.has(p1));
+  assertTrue(m.has(p2));
+  assertFalse(m.has(p3));
   assertSame(123, m.get(p1));
   assertSame(321, m.get(p2));
+
+  m.delete(p2);
+  assertTrue(m.has(p1));
+  assertFalse(m.has(p2));
+  assertFalse(m.has(p3));
+  assertSame(123, m.get(p1));
+  assertSame(undefined, m.get(p2));
 }
 
-TestWeakMap(Object.seal)
-TestWeakMap(Object.freeze)
-TestWeakMap(Object.preventExtensions)
+TestMap(Map, Object.seal)
+TestMap(Map, Object.freeze)
+TestMap(Map, Object.preventExtensions)
+
+TestMap(WeakMap, Object.seal)
+TestMap(WeakMap, Object.freeze)
+TestMap(WeakMap, Object.preventExtensions)
diff --git a/test/mjsunit/harmony/proxies.js b/test/mjsunit/harmony/proxies.js
index ad8d86a..1ce7a32 100644
--- a/test/mjsunit/harmony/proxies.js
+++ b/test/mjsunit/harmony/proxies.js
@@ -28,9 +28,6 @@
 // Flags: --harmony-proxies
 
 
-// TODO(rossberg): for-in not implemented on proxies.
-
-
 // Helper.
 
 function TestWithProxies(test, x, y, z) {
@@ -138,6 +135,10 @@
   assertEquals("b", key)
   assertEquals(42, p[99])
   assertEquals("99", key)
+  assertEquals(42, (function(n) { return p[n] })("c"))
+  assertEquals("c", key)
+  assertEquals(42, (function(n) { return p[n] })(101))
+  assertEquals("101", key)
 
   var o = Object.create(p, {x: {value: 88}})
   assertEquals(42, o.a)
@@ -148,6 +149,11 @@
   assertEquals("99", key)
   assertEquals(88, o.x)
   assertEquals(88, o["x"])
+  assertEquals(42, (function(n) { return o[n] })("c"))
+  assertEquals("c", key)
+  assertEquals(42, (function(n) { return o[n] })(101))
+  assertEquals("101", key)
+  assertEquals(88, (function(n) { return o[n] })("x"))
 }
 
 TestGet({
@@ -201,6 +207,10 @@
   assertEquals(55, p[101].call(p))
   assertEquals(55, p.withargs(45, 5))
   assertEquals(55, p.withargs.call(p, 11, 22))
+  assertEquals(55, (function(n) { return p[n]() })("f"))
+  assertEquals(55, (function(n) { return p[n].call(p) })("f"))
+  assertEquals(55, (function(n) { return p[n](15, 20) })("withargs"))
+  assertEquals(55, (function(n) { return p[n].call(p, 13, 21) })("withargs"))
   assertEquals("6655", "66" + p)  // calls p.toString
 
   var o = Object.create(p, {g: {value: function(x) { return x + 88 }}})
@@ -216,6 +226,13 @@
   assertEquals(90, o.g(2))
   assertEquals(91, o.g.call(o, 3))
   assertEquals(92, o.g.call(p, 4))
+  assertEquals(55, (function(n) { return o[n]() })("f"))
+  assertEquals(55, (function(n) { return o[n].call(o) })("f"))
+  assertEquals(55, (function(n) { return o[n](15, 20) })("withargs"))
+  assertEquals(55, (function(n) { return o[n].call(o, 13, 21) })("withargs"))
+  assertEquals(93, (function(n) { return o[n](5) })("g"))
+  assertEquals(94, (function(n) { return o[n].call(o, 6) })("g"))
+  assertEquals(95, (function(n) { return o[n].call(p, 7) })("g"))
   assertEquals("6655", "66" + o)  // calls o.toString
 }
 
@@ -282,14 +299,15 @@
   assertThrows(function(){ p.a }, "myexn")
   assertThrows(function(){ p["b"] }, "myexn")
   assertThrows(function(){ p[3] }, "myexn")
+  assertThrows(function(){ (function(n) { p[n] })("c") }, "myexn")
+  assertThrows(function(){ (function(n) { p[n] })(99) }, "myexn")
 
   var o = Object.create(p, {x: {value: 88}, '4': {value: 89}})
   assertThrows(function(){ o.a }, "myexn")
   assertThrows(function(){ o["b"] }, "myexn")
   assertThrows(function(){ o[3] }, "myexn")
-  assertEquals(88, o.x)
-  assertEquals(88, o["x"])
-  assertEquals(89, o[4])
+  assertThrows(function(){ (function(n) { o[n] })("c") }, "myexn")
+  assertThrows(function(){ (function(n) { o[n] })(99) }, "myexn")
 }
 
 TestGetThrow({
@@ -353,6 +371,13 @@
   assertEquals(44, p[77] = 44)
   assertEquals("77", key)
   assertEquals(44, val)
+
+  assertEquals(45, (function(n) { return p[n] = 45 })("c"))
+  assertEquals("c", key)
+  assertEquals(45, val)
+  assertEquals(46, (function(n) { return p[n] = 46 })(99))
+  assertEquals("99", key)
+  assertEquals(46, val)
 }
 
 TestSet({
@@ -434,6 +459,8 @@
   assertThrows(function(){ p.a = 42 }, "myexn")
   assertThrows(function(){ p["b"] = 42 }, "myexn")
   assertThrows(function(){ p[22] = 42 }, "myexn")
+  assertThrows(function(){ (function(n) { p[n] = 45 })("c") }, "myexn")
+  assertThrows(function(){ (function(n) { p[n] = 46 })(99) }, "myexn")
 }
 
 TestSetThrow({
@@ -719,17 +746,17 @@
   assertEquals("zzz", key)
   assertEquals(0, Object.getOwnPropertyNames(desc).length)
 
-// TODO(rossberg): This test requires for-in on proxies.
-//  var d = create({
-//    get: function(r, k) { return (k === "value") ? 77 : void 0 },
-//    getOwnPropertyNames: function() { return ["value"] }
-//  })
-//  assertEquals(1, Object.getOwnPropertyNames(d).length)
-//  assertEquals(77, d.value)
-//  assertEquals(p, Object.defineProperty(p, "p", d))
-//  assertEquals("p", key)
-//  assertEquals(1, Object.getOwnPropertyNames(desc).length)
-//  assertEquals(77, desc.value)
+  var d = create({
+    get: function(r, k) { return (k === "value") ? 77 : void 0 },
+    getOwnPropertyNames: function() { return ["value"] },
+    enumerate: function() { return ["value"] }
+  })
+  assertEquals(1, Object.getOwnPropertyNames(d).length)
+  assertEquals(77, d.value)
+  assertEquals(p, Object.defineProperty(p, "p", d))
+  assertEquals("p", key)
+  assertEquals(1, Object.getOwnPropertyNames(desc).length)
+  assertEquals(77, desc.value)
 
   var props = {
     '11': {},
@@ -774,17 +801,16 @@
   assertThrows(function(){ Object.defineProperty(p, "a", {value: 44})}, "myexn")
   assertThrows(function(){ Object.defineProperty(p, 0, {value: 44})}, "myexn")
 
-// TODO(rossberg): These tests require for-in on proxies.
-//  var d1 = create({
-//    get: function(r, k) { throw "myexn" },
-//    getOwnPropertyNames: function() { return ["value"] }
-//  })
-//  assertThrows(function(){ Object.defineProperty(p, "p", d1) }, "myexn")
-//  var d2 = create({
-//    get: function(r, k) { return 77 },
-//    getOwnPropertyNames: function() { throw "myexn" }
-//  })
-//  assertThrows(function(){ Object.defineProperty(p, "p", d2) }, "myexn")
+  var d1 = create({
+    get: function(r, k) { throw "myexn" },
+    getOwnPropertyNames: function() { return ["value"] }
+  })
+  assertThrows(function(){ Object.defineProperty(p, "p", d1) }, "myexn")
+  var d2 = create({
+    get: function(r, k) { return 77 },
+    getOwnPropertyNames: function() { throw "myexn" }
+  })
+  assertThrows(function(){ Object.defineProperty(p, "p", d2) }, "myexn")
 
   var props = {bla: {get value() { throw "otherexn" }}}
   assertThrows(function(){ Object.defineProperties(p, props) }, "otherexn")
@@ -1468,7 +1494,7 @@
   var p1 = Proxy.create({})
   var p2 = Proxy.create({}, o1)
   var p3 = Proxy.create({}, p2)
-  var p4 = Proxy.create({}, 666)
+  var p4 = Proxy.create({}, null)
   var o2 = Object.create(p3)
 
   assertSame(Object.getPrototypeOf(o1), Object.prototype)
@@ -1606,7 +1632,9 @@
 
 TestKeys(["a", "0"], {
   getOwnPropertyNames: function() { return ["a", 23, "zz", "", 0] },
-  getOwnPropertyDescriptor: function(k) { return {enumerable: k.length == 1} }
+  getOwnPropertyDescriptor: function(k) {
+    return k == "" ? undefined : {enumerable: k.length == 1}
+  }
 })
 
 TestKeys(["23", "zz", ""], {
@@ -1620,10 +1648,12 @@
 
 TestKeys(["a", "b", "c", "5"], {
   get getOwnPropertyNames() {
-    return function() { return ["0", 4, "a", "b", "c", 5] }
+    return function() { return ["0", 4, "a", "b", "c", 5, "ety"] }
   },
   get getOwnPropertyDescriptor() {
-    return function(k) { return {enumerable: k >= "44"} }
+    return function(k) {
+      return k == "ety" ? undefined : {enumerable: k >= "44"}
+    }
   }
 })
 
diff --git a/test/mjsunit/harmony/weakmaps.js b/test/mjsunit/harmony/weakmaps.js
deleted file mode 100644
index 7b5dcaf..0000000
--- a/test/mjsunit/harmony/weakmaps.js
+++ /dev/null
@@ -1,167 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --harmony-weakmaps --expose-gc
-
-
-// Test valid getter and setter calls
-var m = new WeakMap;
-assertDoesNotThrow(function () { m.get(new Object) });
-assertDoesNotThrow(function () { m.set(new Object) });
-assertDoesNotThrow(function () { m.has(new Object) });
-assertDoesNotThrow(function () { m.delete(new Object) });
-
-
-// Test invalid getter and setter calls
-var m = new WeakMap;
-assertThrows(function () { m.get(undefined) }, TypeError);
-assertThrows(function () { m.set(undefined, 0) }, TypeError);
-assertThrows(function () { m.get(0) }, TypeError);
-assertThrows(function () { m.set(0, 0) }, TypeError);
-assertThrows(function () { m.get('a-key') }, TypeError);
-assertThrows(function () { m.set('a-key', 0) }, TypeError);
-
-
-// Test expected mapping behavior
-var m = new WeakMap;
-function TestMapping(map, key, value) {
-  map.set(key, value);
-  assertSame(value, map.get(key));
-}
-TestMapping(m, new Object, 23);
-TestMapping(m, new Object, 'the-value');
-TestMapping(m, new Object, new Object);
-
-
-// Test expected querying behavior
-var m = new WeakMap;
-var key = new Object;
-TestMapping(m, key, 'to-be-present');
-assertTrue(m.has(key));
-assertFalse(m.has(new Object));
-TestMapping(m, key, undefined);
-assertFalse(m.has(key));
-assertFalse(m.has(new Object));
-
-
-// Test expected deletion behavior
-var m = new WeakMap;
-var key = new Object;
-TestMapping(m, key, 'to-be-deleted');
-assertTrue(m.delete(key));
-assertFalse(m.delete(key));
-assertFalse(m.delete(new Object));
-assertSame(m.get(key), undefined);
-
-
-// Test GC of map with entry
-var m = new WeakMap;
-var key = new Object;
-m.set(key, 'not-collected');
-gc();
-assertSame('not-collected', m.get(key));
-
-
-// Test GC of map with chained entries
-var m = new WeakMap;
-var head = new Object;
-for (key = head, i = 0; i < 10; i++, key = m.get(key)) {
-  m.set(key, new Object);
-}
-gc();
-var count = 0;
-for (key = head; key != undefined; key = m.get(key)) {
-  count++;
-}
-assertEquals(11, count);
-
-
-// Test property attribute [[Enumerable]]
-var m = new WeakMap;
-function props(x) {
-  var array = [];
-  for (var p in x) array.push(p);
-  return array.sort();
-}
-assertArrayEquals([], props(WeakMap));
-assertArrayEquals([], props(WeakMap.prototype));
-assertArrayEquals([], props(m));
-
-
-// Test arbitrary properties on weak maps
-var m = new WeakMap;
-function TestProperty(map, property, value) {
-  map[property] = value;
-  assertEquals(value, map[property]);
-}
-for (i = 0; i < 20; i++) {
-  TestProperty(m, i, 'val' + i);
-  TestProperty(m, 'foo' + i, 'bar' + i);
-}
-TestMapping(m, new Object, 'foobar');
-
-
-// Test direct constructor call
-var m = WeakMap();
-assertTrue(m instanceof WeakMap);
-
-
-// Test some common JavaScript idioms
-var m = new WeakMap;
-assertTrue(m instanceof WeakMap);
-assertTrue(WeakMap.prototype.set instanceof Function)
-assertTrue(WeakMap.prototype.get instanceof Function)
-assertTrue(WeakMap.prototype.has instanceof Function)
-assertTrue(WeakMap.prototype.delete instanceof Function)
-
-
-// Regression test for WeakMap prototype.
-assertTrue(WeakMap.prototype.constructor === WeakMap)
-assertTrue(Object.getPrototypeOf(WeakMap.prototype) === Object.prototype)
-
-
-// Regression test for issue 1617: The prototype of the WeakMap constructor
-// needs to be unique (i.e. different from the one of the Object constructor).
-assertFalse(WeakMap.prototype === Object.prototype);
-var o = Object.create({});
-assertFalse("get" in o);
-assertFalse("set" in o);
-assertEquals(undefined, o.get);
-assertEquals(undefined, o.set);
-var o = Object.create({}, { myValue: {
-  value: 10,
-  enumerable: false,
-  configurable: true,
-  writable: true
-}});
-assertEquals(10, o.myValue);
-
-
-// Stress Test
-// There is a proposed stress-test available at the es-discuss mailing list
-// which cannot be reasonably automated.  Check it out by hand if you like:
-// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index 941e0e8..8a1b68b 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -65,6 +65,20 @@
 debug-liveedit-check-stack: SKIP
 debug-liveedit-patch-positions-replace: SKIP
 
+# Liveedit messes with the frame hights - see bug 1791
+debug-liveedit-1: SKIP
+debug-liveedit-2: SKIP
+debug-liveedit-3: SKIP
+debug-liveedit-breakpoints: SKIP
+debug-liveedit-check-stack: SKIP
+debug-liveedit-diff: SKIP
+debug-liveedit-newsource: SKIP
+debug-liveedit-patch-positions: SKIP
+debug-liveedit-patch-positions-replace: SKIP
+debug-liveedit-utils: SKIP
+
+
+
 ##############################################################################
 [ $arch == arm ]
 
diff --git a/test/mjsunit/object-define-properties.js b/test/mjsunit/object-define-properties.js
index 128df69..6d5032e 100644
--- a/test/mjsunit/object-define-properties.js
+++ b/test/mjsunit/object-define-properties.js
@@ -54,3 +54,19 @@
 
 assertEquals(x.foo, 10);
 assertEquals(x.bar, 42);
+
+
+// Make sure that all property descriptors are calculated before any
+// modifications are done.
+
+var object = {};
+
+assertThrows(function() {
+    Object.defineProperties(object, {
+      foo: { value: 1 },
+      bar: { value: 2, get: function() { return 3; } }
+    });
+  }, TypeError);
+
+assertEquals(undefined, object.foo);
+assertEquals(undefined, object.bar);
diff --git a/test/mjsunit/cyclic-error-to-string.js b/test/mjsunit/optimized-typeof.js
similarity index 79%
copy from test/mjsunit/cyclic-error-to-string.js
copy to test/mjsunit/optimized-typeof.js
index 2502b53..b0c0725 100644
--- a/test/mjsunit/cyclic-error-to-string.js
+++ b/test/mjsunit/optimized-typeof.js
@@ -25,22 +25,23 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test printing of cyclic errors which return the empty string for
-// compatibility with Safari and Firefox.
+// Flags: --allow-natives-syntax
 
-var e = new Error();
-assertEquals('Error', e + '');
+function typeofDirectly() {
+  return typeof({}) === "undefined";
+}
 
-e = new Error();
-e.name = e;
-e.message = e;
-e.stack = e;
-e.arguments = e;
-assertEquals(': ', e + '');
+typeofDirectly();
+typeofDirectly();
+%OptimizeFunctionOnNextCall(typeofDirectly);
+typeofDirectly();
 
-e = new Error();
-e.name = [ e ];
-e.message = [ e ];
-e.stack = [ e ];
-e.arguments = [ e ];
-assertEquals(': ', e + '');
+function typeofViaVariable() {
+  var foo = typeof({})
+  return foo === "undefined";
+}
+
+typeofViaVariable();
+typeofViaVariable();
+%OptimizeFunctionOnNextCall(typeofViaVariable);
+typeofViaVariable();
diff --git a/test/mjsunit/regexp-static.js b/test/mjsunit/regexp-static.js
index 0f84968..8f283f6 100644
--- a/test/mjsunit/regexp-static.js
+++ b/test/mjsunit/regexp-static.js
@@ -25,18 +25,6 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test that we throw exceptions when calling test and exec with no
-// input.  This is not part of the spec, but we do it for
-// compatibility with JSC.
-assertThrows("/a/.test()");
-assertThrows("/a/.exec()");
-
-// Test that we do not throw exceptions once the static RegExp.input
-// field has been set.
-RegExp.input = "a";
-assertDoesNotThrow("/a/.test()");
-assertDoesNotThrow("/a/.exec()");
-
 // Test the (deprecated as of JS 1.5) properties of the RegExp function.
 var re = /((\d+)\.(\d+))/;
 var s = 'abc123.456def';
@@ -166,3 +154,8 @@
 var foo = "lsdfj sldkfj sdklfj læsdfjl sdkfjlsdk fjsdl fjsdljskdj flsj flsdkj flskd regexp: /foobar/\nldkfj sdlkfj sdkl";
 assertTrue(/^([a-z]+): (.*)/.test(foo.substring(foo.indexOf("regexp:"))), "regexp: setup");
 assertEquals("regexp", RegExp.$1, "RegExp.$1");
+
+
+// Check that calling with no argument is the same as calling with undefined.
+assertTrue(/^undefined$/.test());
+assertEquals(["undefined"], /^undefined$/.exec());
diff --git a/test/mjsunit/cyclic-error-to-string.js b/test/mjsunit/regress/regress-100409.js
similarity index 79%
copy from test/mjsunit/cyclic-error-to-string.js
copy to test/mjsunit/regress/regress-100409.js
index 2502b53..c29250f 100644
--- a/test/mjsunit/cyclic-error-to-string.js
+++ b/test/mjsunit/regress/regress-100409.js
@@ -25,22 +25,31 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test printing of cyclic errors which return the empty string for
-// compatibility with Safari and Firefox.
+// Flags: --allow-natives-syntax
 
-var e = new Error();
-assertEquals('Error', e + '');
+function outer () {
+  var val = 0;
 
-e = new Error();
-e.name = e;
-e.message = e;
-e.stack = e;
-e.arguments = e;
-assertEquals(': ', e + '');
+  function foo () {
+    val = 0;
+    val;
+    var z = false;
+    var y = true;
+    if (!z) {
+      while (z = !z) {
+        if (y) val++;
+      }
+    }
+    return val++;
+  }
 
-e = new Error();
-e.name = [ e ];
-e.message = [ e ];
-e.stack = [ e ];
-e.arguments = [ e ];
-assertEquals(': ', e + '');
+  return foo;
+}
+
+
+var foo = outer();
+
+assertEquals(1, foo());
+assertEquals(1, foo());
+    %OptimizeFunctionOnNextCall(foo);
+assertEquals(1, foo());
diff --git a/test/mjsunit/cyclic-error-to-string.js b/test/mjsunit/regress/regress-100702.js
similarity index 79%
copy from test/mjsunit/cyclic-error-to-string.js
copy to test/mjsunit/regress/regress-100702.js
index 2502b53..46494ab 100644
--- a/test/mjsunit/cyclic-error-to-string.js
+++ b/test/mjsunit/regress/regress-100702.js
@@ -25,22 +25,20 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Test printing of cyclic errors which return the empty string for
-// compatibility with Safari and Firefox.
+// Regression test for correct handling of non-object receiver values
+// passed to built-in array functions.
 
-var e = new Error();
-assertEquals('Error', e + '');
+String.prototype.isThatMe = function () {
+  assertFalse(this === str);
+};
 
-e = new Error();
-e.name = e;
-e.message = e;
-e.stack = e;
-e.arguments = e;
-assertEquals(': ', e + '');
+var str = "abc";
+str.isThatMe();
+str.isThatMe.call(str);
 
-e = new Error();
-e.name = [ e ];
-e.message = [ e ];
-e.stack = [ e ];
-e.arguments = [ e ];
-assertEquals(': ', e + '');
+var arr = [1];
+arr.forEach("".isThatMe, str);
+arr.filter("".isThatMe, str);
+arr.some("".isThatMe, str);
+arr.every("".isThatMe, str);
+arr.map("".isThatMe, str);
diff --git a/test/mjsunit/regress/regress-1229.js b/test/mjsunit/regress/regress-1229.js
index e16d278..c0dcba9 100644
--- a/test/mjsunit/regress/regress-1229.js
+++ b/test/mjsunit/regress/regress-1229.js
@@ -35,10 +35,10 @@
   assertEquals(3, z);
 }
 
-var bound_arg = [1];
+var foob = foo.bind({}, 1);
 
 function f(y, z) {
-  return %NewObjectFromBound(foo, bound_arg);
+  return %NewObjectFromBound(foob);
 }
 
 // Check that %NewObjectFromBound looks at correct frame for inlined function.
diff --git a/test/mjsunit/stack-traces-2.js b/test/mjsunit/stack-traces-2.js
new file mode 100644
index 0000000..165c4df
--- /dev/null
+++ b/test/mjsunit/stack-traces-2.js
@@ -0,0 +1,87 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --builtins-in-stack-traces
+
+
+// Poisonous object that throws a reference error if attempted converted to
+// a primitive values.
+var thrower = { valueOf: function() { FAIL; },
+                toString: function() { FAIL; } };
+
+// Tests that a native constructor function is included in the
+// stack trace.
+function testTraceNativeConstructor(nativeFunc) {
+  var nativeFuncName = nativeFunc.name;
+  try {
+    new nativeFunc(thrower);
+    assertUnreachable(nativeFuncName);
+  } catch (e) {
+    assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName);
+  }
+}
+
+// Tests that a native conversion function is included in the
+// stack trace.
+function testTraceNativeConversion(nativeFunc) {
+  var nativeFuncName = nativeFunc.name;
+  try {
+    nativeFunc(thrower);
+    assertUnreachable(nativeFuncName);
+  } catch (e) {
+    assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName);
+  }
+}
+
+
+function testNotOmittedBuiltin(throwing, included) {
+  try {
+    throwing();
+    assertUnreachable(included);
+  } catch (e) {
+    assertTrue(e.stack.indexOf(included) >= 0, included);
+  }
+}
+
+
+testTraceNativeConversion(String);  // Does ToString on argument.
+testTraceNativeConversion(Number);  // Does ToNumber on argument.
+testTraceNativeConversion(RegExp);  // Does ToString on argument.
+
+testTraceNativeConstructor(String);  // Does ToString on argument.
+testTraceNativeConstructor(Number);  // Does ToNumber on argument.
+testTraceNativeConstructor(RegExp);  // Does ToString on argument.
+testTraceNativeConstructor(Date);    // Does ToNumber on argument.
+
+// QuickSort has builtins object as receiver, and is non-native
+// builtin. Should not be omitted with the --builtins-in-stack-traces flag.
+testNotOmittedBuiltin(function(){ [thrower, 2].sort(function (a,b) {
+                                                     (b < a) - (a < b); });
+                      }, "QuickSort");
+
+// Not omitted even though ADD from runtime.js is a non-native builtin.
+testNotOmittedBuiltin(function(){ thrower + 2; }, "ADD");
\ No newline at end of file
diff --git a/test/mjsunit/stack-traces.js b/test/mjsunit/stack-traces.js
index 47a5cc5..536e71b 100644
--- a/test/mjsunit/stack-traces.js
+++ b/test/mjsunit/stack-traces.js
@@ -194,6 +194,46 @@
 }
 
 
+// Poisonous object that throws a reference error if attempted converted to
+// a primitive values.
+var thrower = { valueOf: function() { FAIL; },
+                toString: function() { FAIL; } };
+
+// Tests that a native constructor function is included in the
+// stack trace.
+function testTraceNativeConstructor(nativeFunc) {
+  var nativeFuncName = nativeFunc.name;
+  try {
+    new nativeFunc(thrower);
+    assertUnreachable(nativeFuncName);
+  } catch (e) {
+    assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName);
+  }
+}
+
+// Tests that a native conversion function is included in the
+// stack trace.
+function testTraceNativeConversion(nativeFunc) {
+  var nativeFuncName = nativeFunc.name;
+  try {
+    nativeFunc(thrower);
+    assertUnreachable(nativeFuncName);
+  } catch (e) {
+    assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName);
+  }
+}
+
+
+function testOmittedBuiltin(throwing, omitted) {
+  try {
+    throwing();
+    assertUnreachable(omitted);
+  } catch (e) {
+    assertTrue(e.stack.indexOf(omitted) < 0, omitted);
+  }
+}
+
+
 testTrace("testArrayNative", testArrayNative, ["Array.map (native)"]);
 testTrace("testNested", testNested, ["at one", "at two", "at three"]);
 testTrace("testMethodNameInference", testMethodNameInference, ["at Foo.bar"]);
@@ -217,3 +257,21 @@
 testCallerCensorship();
 testUnintendedCallerCensorship();
 testErrorsDuringFormatting();
+
+testTraceNativeConversion(String);  // Does ToString on argument.
+testTraceNativeConversion(Number);  // Does ToNumber on argument.
+testTraceNativeConversion(RegExp);  // Does ToString on argument.
+
+testTraceNativeConstructor(String);  // Does ToString on argument.
+testTraceNativeConstructor(Number);  // Does ToNumber on argument.
+testTraceNativeConstructor(RegExp);  // Does ToString on argument.
+testTraceNativeConstructor(Date);    // Does ToNumber on argument.
+
+// Omitted because QuickSort has builtins object as receiver, and is non-native
+// builtin.
+testOmittedBuiltin(function(){ [thrower, 2].sort(function (a,b) {
+                                                     (b < a) - (a < b); });
+                   }, "QuickSort");
+
+// Omitted because ADD from runtime.js is non-native builtin.
+testOmittedBuiltin(function(){ thrower + 2; }, "ADD");
\ No newline at end of file
diff --git a/test/mjsunit/strict-mode.js b/test/mjsunit/strict-mode.js
index 30234ba..9c9bdfd 100644
--- a/test/mjsunit/strict-mode.js
+++ b/test/mjsunit/strict-mode.js
@@ -1051,14 +1051,20 @@
   }
   assertThrows(function() { strict.caller; }, TypeError);
   assertThrows(function() { strict.arguments; }, TypeError);
+  assertThrows(function() { strict.caller = 42; }, TypeError);
+  assertThrows(function() { strict.arguments = 42; }, TypeError);
 
   var another = new Function("'use strict'");
   assertThrows(function() { another.caller; }, TypeError);
   assertThrows(function() { another.arguments; }, TypeError);
+  assertThrows(function() { another.caller = 42; }, TypeError);
+  assertThrows(function() { another.arguments = 42; }, TypeError);
 
   var third = (function() { "use strict"; return function() {}; })();
   assertThrows(function() { third.caller; }, TypeError);
   assertThrows(function() { third.arguments; }, TypeError);
+  assertThrows(function() { third.caller = 42; }, TypeError);
+  assertThrows(function() { third.arguments = 42; }, TypeError);
 
   CheckPillDescriptor(strict, "caller");
   CheckPillDescriptor(strict, "arguments");
diff --git a/test/mjsunit/to_number_order.js b/test/mjsunit/to_number_order.js
index d17e600..50e4bc7 100644
--- a/test/mjsunit/to_number_order.js
+++ b/test/mjsunit/to_number_order.js
@@ -161,7 +161,7 @@
 
 x = "";
 assertFalse(a > b, "Compare objects a > b");
-assertEquals("fiskhest", x, "Compare objects a > b valueOf order");
+assertEquals("hestfisk", x, "Compare objects a > b valueOf order");
 
 x = "";
 assertFalse(a > void(0), "Compare objects a > undefined");
@@ -195,7 +195,7 @@
 
   x = "";
   assertFalse(a > b, "Compare objects a > b");
-  assertEquals("fiskhest", x, "Compare objects a > b valueOf order");
+  assertEquals("hestfisk", x, "Compare objects a > b valueOf order");
 
   x = "";
   assertFalse(a > void(0), "Compare objects a > undefined");
diff --git a/test/mozilla/mozilla.status b/test/mozilla/mozilla.status
index 6a5c086..e31a630 100644
--- a/test/mozilla/mozilla.status
+++ b/test/mozilla/mozilla.status
@@ -300,6 +300,11 @@
 js1_2/regexp/beginLine: FAIL_OK
 js1_2/regexp/endLine: FAIL_OK
 
+# We no longer let calls to test and exec with no argument implicitly
+# use the previous input.
+js1_2/regexp/RegExp_input: FAIL_OK
+js1_2/regexp/RegExp_input_as_array: FAIL_OK
+
 
 # To be compatible with safari typeof a regexp yields 'function';
 # in firefox it yields 'object'.
@@ -410,12 +415,6 @@
 js1_5/extensions/regress-455413: FAIL_OK
 
 
-# The spec specifies reverse evaluation order for < and >=.
-# See section 11.8.2 and 11.8.5.
-# We implement the spec here but the test tests the more straigtforward order.
-ecma_3/Operators/order-01: FAIL_OK
-
-
 # Uses Mozilla-specific QName, XML, XMLList and Iterator.
 js1_5/Regress/regress-407323: FAIL_OK
 js1_5/Regress/regress-407957: FAIL_OK
diff --git a/test/sputnik/sputnik.status b/test/sputnik/sputnik.status
index 99db598..135540e 100644
--- a/test/sputnik/sputnik.status
+++ b/test/sputnik/sputnik.status
@@ -30,10 +30,6 @@
 
 ############################### BUGS ###################################
 
-# A bound function should fail on access to 'caller' and 'arguments'.
-S15.3.4.5_A1: FAIL
-S15.3.4.5_A2: FAIL
-
 # '__proto__' should be treated as a normal property in JSON.
 S15.12.2_A1: FAIL
 
@@ -46,12 +42,6 @@
 S15.8.2.18_A7: PASS || FAIL_OK
 S15.8.2.13_A23: PASS || FAIL_OK
 
-# We allow calls to regexp exec() with no arguments to fail for
-# compatibility reasons.
-S15.10.6.2_A1_T16: FAIL_OK
-S15.10.6.2_A12: FAIL_OK
-S15.10.6.3_A1_T16: FAIL_OK
-
 # Sputnik tests (r97) assume RegExp.prototype is an Object, not a RegExp.
 S15.10.6_A2: FAIL_OK
 
@@ -162,6 +152,10 @@
 S9.9_A1: FAIL_OK
 S9.9_A2: FAIL_OK
 
+# The expected evaluation order of comparison operations changed.
+S11.8.2_A2.3_T1: FAIL_OK
+S11.8.3_A2.3_T1: FAIL_OK
+
 # Calls builtins without an explicit receiver which means that
 # undefined is passed to the builtin. The tests expect the global
 # object to be passed which was true in ES3 but not in ES5.
@@ -187,6 +181,10 @@
 S15.1.1.3_A2_T1: FAIL_OK  # undefined
 S15.1.1.3_A2_T2: FAIL_OK  # undefined
 
+# Function.prototype.apply can handle arbitrary object as argument list.
+S15.3.4.3_A6_T1: FAIL_OK
+S15.3.4.3_A6_T4: FAIL_OK
+
 # Array.prototype.to[Locale]String is generic in ES5.
 S15.4.4.2_A2_T1: FAIL_OK
 S15.4.4.3_A2_T1: FAIL_OK
diff --git a/test/test262/test262.status b/test/test262/test262.status
index 1a61954..3eefbd7 100644
--- a/test/test262/test262.status
+++ b/test/test262/test262.status
@@ -30,10 +30,6 @@
 
 ############################### BUGS ###################################
 
-# A bound function should fail on access to 'caller' and 'arguments'.
-S15.3.4.5_A1: FAIL
-S15.3.4.5_A2: FAIL
-
 # '__proto__' should be treated as a normal property in JSON.
 S15.12.2_A1: FAIL
 
@@ -43,22 +39,6 @@
 # V8 Bug: http://code.google.com/p/v8/issues/detail?id=1624
 S10.4.2.1_A1: FAIL
 
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1752
-S11.8.2_A2.3_T1: FAIL
-S11.8.3_A2.3_T1: FAIL
-11.8.2-1: FAIL
-11.8.2-2: FAIL
-11.8.2-3: FAIL
-11.8.2-4: FAIL
-11.8.3-1: FAIL
-11.8.3-2: FAIL
-11.8.3-3: FAIL
-11.8.3-4: FAIL
-11.8.3-5: FAIL
-
-# V8 Bug.
-S13.2.3_A1: FAIL
-
 # V8 Bug: http://code.google.com/p/v8/issues/detail?id=1530
 S15.3.3.1_A4: FAIL
 
@@ -165,12 +145,6 @@
 S15.8.2.18_A7: PASS || FAIL_OK
 S15.8.2.13_A23: PASS || FAIL_OK
 
-# We allow calls to regexp exec() with no arguments to fail for
-# compatibility reasons.
-S15.10.6.2_A1_T16: FAIL_OK
-S15.10.6.2_A12: FAIL_OK
-S15.10.6.3_A1_T16: FAIL_OK
-
 # Sputnik tests (r97) assume RegExp.prototype is an Object, not a RegExp.
 S15.10.6_A2: FAIL_OK
 
@@ -342,80 +316,6 @@
 
 ######################### UNANALYZED FAILURES ##########################
 
-# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name'
-#      property to true successfully when [[Enumerable]] attribute of 'name'
-#      is false and [[Configurable]] attribute of 'name' is true,  the 'desc'
-#      is a generic descriptor which only contains [[Enumerable]] attribute
-#      as true, 'name' property is an index data property (8.12.9 step 8)
-15.2.3.6-4-82-18: FAIL
-# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name'
-#      property to false successfully when [[Enumerable]] and [[Configurable]]
-#      attributes of 'name' property are true,  the 'desc' is a generic
-#      descriptor which only contains [Enumerable]] attribute as false and
-#      'name' property is an index accessor property (8.12.9 step 8)
-15.2.3.6-4-82-19: FAIL
-# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name'
-#      property to false successfully when [[Enumerable]] and [[Configurable]]
-#      attributes of 'name' property are true,  the 'desc' is a generic
-#      descriptor which contains [Enumerable]] attribute as false and
-#      [[Configurable]] property is true, 'name' property is an index accessor
-#      property (8.12.9 step 8)
-15.2.3.6-4-82-20: FAIL
-# Bug? Object.defineProperty - Update [[Configurable]] attribute of 'name'
-#      property to false successfully when [[Enumerable]] and [[Configurable]]
-#      attributes of 'name' property are true, the 'desc' is a generic
-#      descriptor which only contains [[Configurable]] attribute as false,
-#      'name' property is an index accessor property (8.12.9 step 8)
-15.2.3.6-4-82-21: FAIL
-# Bug? Object.defineProperty - Update [[Configurable]] attribute of 'name'
-#      property to false successfully when [[Enumerable]] and [[Configurable]]
-#      attributes of 'name' property are true, the 'desc' is a generic
-#      descriptor which contains [[Enumerable]] attribute as true and
-#      [[Configurable]] attribute is false, 'name' property is an index accessor
-#      property (8.12.9 step 8)
-15.2.3.6-4-82-22: FAIL
-# Bug? Object.defineProperty - Update [[Enumerable]] and [[Configurable]]
-#      attributes of 'name' property to false successfully when [[Enumerable]]
-#      and [[Configurable]] attributes of 'name' property are true, the 'desc'
-#      is a generic descriptor which contains [[Enumerable]] and
-#      [[Configurable]] attributes as false, 'name' property is an index
-#      accessor property (8.12.9 step 8)
-15.2.3.6-4-82-23: FAIL
-# Bug? Object.defineProperty - Update [[Enumerable]] attributes of 'name'
-#      property to true successfully when [[Enumerable]] attribute of 'name' is
-#      false and [[Configurable]] attribute of 'name' is true, the 'desc' is a
-#      generic descriptor which only contains [[Enumerable]] attribute as true,
-#      'name' property is an index accessor property (8.12.9 step 8)
-15.2.3.6-4-82-24: FAIL
-# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named
-#      property, 'desc' is accessor descriptor, test updating all attribute
-#      values of 'name' (15.4.5.1 step 4.c)
-15.2.3.6-4-209: FAIL
-# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named
-#      property, name is accessor property and 'desc' is accessor descriptor,
-#      test updating the [[Enumerable]] attribute value of 'name' (15.4.5.1 step
-#      4.c)
-15.2.3.6-4-271: FAIL
-# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named
-#      property, name is accessor property and 'desc' is accessor descriptor,
-#      test updating the [[Configurable]] attribute value of 'name' (15.4.5.1
-#      step 4.c)
-15.2.3.6-4-272: FAIL
-# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named
-#      property, name is accessor property and 'desc' is accessor descriptor,
-#      test updating multiple attribute values of 'name' (15.4.5.1 step 4.c)
-15.2.3.6-4-273: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
-#      formal parameters, 'name' is own accessor property of 'O' which is also
-#      defined in [[ParameterMap]] of 'O', and 'desc' is accessor descriptor,
-#      test updating multiple attribute values of 'name' (10.6
-#      [[DefineOwnProperty]] step 3 and 5.a.i)
-15.2.3.6-4-291-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object, 'name' is own
-#      accessor property of 'O', and 'desc' is accessor descriptor, test
-#      updating multiple attribute values of 'name' (10.6 [[DefineOwnProperty]]
-#      step 3)
-15.2.3.6-4-291: FAIL
 # Bug? Object.defineProperty - 'O' is an Arguments object of a function that has
 #      formal parameters, 'name' is own property of 'O' which is also defined in
 #      [[ParameterMap]] of 'O', and 'desc' is data descriptor, test updating
@@ -454,11 +354,6 @@
 #      updating the [[Configurable]] attribute value of 'name' which is defined
 #      as non-configurable (10.6 [[DefineOwnProperty]] step 4 and step 5b)
 15.2.3.6-4-296-1: FAIL
-# Bug? Object.defineProperty - 'O' is an Arguments object, 'name' is an index
-#      named accessor property of 'O' but not defined in [[ParameterMap]] of
-#      'O', and 'desc' is accessor descriptor, test updating multiple attribute
-#      values of 'name' (10.6 [[DefineOwnProperty]] step 3)
-15.2.3.6-4-303: FAIL
 # Bug? ES5 Attributes - indexed property 'P' with attributes [[Writable]]: true,
 #      [[Enumerable]]: true, [[Configurable]]: false is writable using simple
 #      assignment, 'O' is an Arguments object
@@ -519,30 +414,6 @@
 15.2.3.6-4-623: FAIL
 # Bug? ES5 Attributes - all attributes in Date.prototype.toJSON are correct
 15.2.3.6-4-624: FAIL
-# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named
-#      property, 'desc' is accessor descriptor, test updating all attribute
-#      values of 'P' (15.4.5.1 step 4.c)
-15.2.3.7-6-a-205: FAIL
-# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named
-#      property that already exists on 'O' is accessor property and 'desc' is
-#      accessor descriptor, test updating the [[Enumerable]] attribute value of
-#      'P' (15.4.5.1 step 4.c)
-15.2.3.7-6-a-260: FAIL
-# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named
-#      property that already exists on 'O' is accessor property and 'desc' is
-#      accessor descriptor, test updating the [[Configurable]] attribute value
-#      of 'P' (15.4.5.1 step 4.c)
-15.2.3.7-6-a-261: FAIL
-# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named
-#      property that already exists on 'O' is accessor property and 'desc' is
-#      accessor descriptor, test updating multiple attribute values of 'P'
-#      (15.4.5.1 step 4.c)
-15.2.3.7-6-a-262: FAIL
-# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own accessor
-#      property of 'O' which is also defined in [[ParameterMap]] of 'O', and
-#      'desc' is accessor descriptor, test updating multiple attribute values of
-#      'P' (10.6 [[DefineOwnProperty]] step 3)
-15.2.3.7-6-a-280: FAIL
 # Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data
 #      property of 'O' which is also defined in [[ParameterMap]] of 'O', and
 #      'desc' is data descriptor, test updating multiple attribute values of 'P'
@@ -571,32 +442,6 @@
 #      'P' which is defined as non-configurable (10.6 [[DefineOwnProperty]] step
 #      4)
 15.2.3.7-6-a-285: FAIL
-# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is an array
-#      index named accessor property of 'O' but not defined in [[ParameterMap]]
-#      of 'O', and 'desc' is accessor descriptor, test updating multiple
-#      attribute values of 'P' (10.6 [[DefineOwnProperty]] step 3)
-15.2.3.7-6-a-292: FAIL
-# Bug? Strict Mode - 'this' value is a string which cannot be converted to
-#      wrapper objects when the function is called with an array of arguments
-15.3.4.3-1-s: FAIL
-# Bug? Strict Mode - 'this' value is a number which cannot be converted to
-#      wrapper objects when the function is called with an array of arguments
-15.3.4.3-2-s: FAIL
-# Bug? Strict Mode - 'this' value is a boolean which cannot be converted to
-#      wrapper objects when the function is called with an array of arguments
-15.3.4.3-3-s: FAIL
-# Bug? Function.prototype.bind - [[Get]] attribute of 'caller' property in 'F'
-#      is thrower
-15.3.4.5-20-2: FAIL
-# Bug? Function.prototype.bind - [[Set]] attribute of 'caller' property in 'F'
-#      is thrower
-15.3.4.5-20-3: FAIL
-# Bug? Function.prototype.bind - [[Get]] attribute of 'arguments' property in
-#      'F' is thrower
-15.3.4.5-21-2: FAIL
-# Bug? Function.prototype.bind - [[Set]] attribute of 'arguments' property in
-#      'F' is thrower
-15.3.4.5-21-3: FAIL
 # Bug? Array.prototype.indexOf - decreasing length of array does not delete
 #      non-configurable properties
 15.4.4.14-9-a-19: FAIL
@@ -615,24 +460,9 @@
 # Bug? Array.prototype.map - decreasing length of array does not delete
 #      non-configurable properties
 15.4.4.19-8-b-16: FAIL
-# Bug? Array.prototype.filter - properties can be added to prototype after
-#      current position are visited on an Array-like object
-15.4.4.20-9-b-6: FAIL
 # Bug? Array.prototype.filter - decreasing length of array does not delete
 #      non-configurable properties
 15.4.4.20-9-b-16: FAIL
-# Bug? Array.prototype.filter - element to be retrieved is own data property
-#      that overrides an inherited accessor property on an Array
-15.4.4.20-9-c-i-6: FAIL
-# Bug? Array.prototype.filter - element to be retrieved is own accessor property
-#      that overrides an inherited accessor property on an Array
-15.4.4.20-9-c-i-14: FAIL
-# Bug? Array.prototype.filter - element to be retrieved is inherited accessor
-#      property on an Array
-15.4.4.20-9-c-i-16: FAIL
-# Bug? Array.prototype.filter - element to be retrieved is inherited accessor
-#      property without a get function on an Array
-15.4.4.20-9-c-i-22: FAIL
 # Bug? Array.prototype.reduce - decreasing length of array in step 8 does not
 #      delete non-configurable properties
 15.4.4.21-9-b-16: FAIL
@@ -666,9 +496,6 @@
 # Bug? Date.prototype.toISOString - value of year is Infinity
 #      Date.prototype.toISOString throw the RangeError
 15.9.5.43-0-15: FAIL
-# Bug? Error.prototype.toString return the value of 'msg' when 'name' is empty
-#      string and 'msg' isn't undefined
-15.11.4.4-8-1: FAIL
 
 ############################ SKIPPED TESTS #############################
 
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 4812930..34ad4c4 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -709,7 +709,7 @@
             'experimental_library_files': [
               '../../src/macros.py',
               '../../src/proxy.js',
-              '../../src/weakmap.js',
+              '../../src/collection.js',
             ],
           },
           'actions': [