Version 3.15.2

Function::GetScriptOrigin supplies sourceURL when script name is not available.  (Chromium issue 159413)

Made formatting error message side-effect-free.  (issue 2398)

Fixed length check in JSON.stringify.  (Chromium issue 160010)

ES6: Added support for Set and Map clear method (issue 2400)

Fixed slack tracking when instance prototype changes. (Chromium issue 157019)

Fixed disabling of code flusher while marking.  (Chromium issue 159140)

Added a test case for object grouping in a scavenger GC (issue 2077)

Support shared library build of Android for v8. (Chromium issue 158821)

ES6: Added support for size to Set and Map (issue 2395)

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@12934 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/.gitignore b/.gitignore
index 0bf9313..ec0660f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,3 +50,7 @@
 /xcodebuild
 TAGS
 *.Makefile
+GTAGS
+GRTAGS
+GSYMS
+GPATH
diff --git a/ChangeLog b/ChangeLog
index daf7165..bb9ed30 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,29 @@
+2012-11-12: Version 3.15.2
+
+        Function::GetScriptOrigin supplies sourceURL when script name is
+        not available.  (Chromium issue 159413)
+
+        Made formatting error message side-effect-free.  (issue 2398)
+
+        Fixed length check in JSON.stringify.  (Chromium issue 160010)
+
+        ES6: Added support for Set and Map clear method (issue 2400)
+
+        Fixed slack tracking when instance prototype changes.
+        (Chromium issue 157019)
+
+        Fixed disabling of code flusher while marking.  (Chromium issue 159140)
+
+        Added a test case for object grouping in a scavenger GC (issue 2077)
+
+        Support shared library build of Android for v8.
+        (Chromium issue 158821)
+
+        ES6: Added support for size to Set and Map (issue 2395)
+
+        Performance and stability improvements on all platforms.
+
+
 2012-11-06: Version 3.15.1
 
         Put incremental code flushing behind a flag. (Chromium issue 159140)
diff --git a/include/v8.h b/include/v8.h
index 89d2974..cf9275d 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -402,6 +402,18 @@
    * or followed by a global GC epilogue callback.
    */
   inline void MarkIndependent();
+  inline void MarkIndependent(Isolate* isolate);
+
+  /**
+   * Marks the reference to this object partially dependent. Partially
+   * dependent handles only depend on other partially dependent handles and
+   * these dependencies are provided through object groups. It provides a way
+   * to build smaller object groups for young objects that represent only a
+   * subset of all external dependencies. This mark is automatically cleared
+   * after each garbage collection.
+   */
+  inline void MarkPartiallyDependent();
+  inline void MarkPartiallyDependent(Isolate* isolate);
 
   /** Returns true if this handle was previously marked as independent. */
   inline bool IsIndependent() const;
@@ -3256,7 +3268,10 @@
    * After each garbage collection, object groups are removed. It is
    * intended to be used in the before-garbage-collection callback
    * function, for instance to simulate DOM tree connections among JS
-   * wrapper objects.
+   * wrapper objects. Object groups for all dependent handles need to
+   * be provided for kGCTypeMarkSweepCompact collections, for all other
+   * garbage collection types it is sufficient to provide object groups
+   * for partially dependent handles only.
    * See v8-profiler.h for RetainedObjectInfo interface description.
    */
   static void AddObjectGroup(Persistent<Value>* objects,
@@ -3497,6 +3512,11 @@
                        WeakReferenceCallback);
   static void ClearWeak(internal::Object** global_handle);
   static void MarkIndependent(internal::Object** global_handle);
+  static void MarkIndependent(internal::Isolate* isolate,
+                              internal::Object** global_handle);
+  static void MarkPartiallyDependent(internal::Object** global_handle);
+  static void MarkPartiallyDependent(internal::Isolate* isolate,
+                                     internal::Object** global_handle);
   static bool IsGlobalIndependent(internal::Object** global_handle);
   static bool IsGlobalIndependent(internal::Isolate* isolate,
                                   internal::Object** global_handle);
@@ -4102,7 +4122,7 @@
   static const int kNullValueRootIndex = 7;
   static const int kTrueValueRootIndex = 8;
   static const int kFalseValueRootIndex = 9;
-  static const int kEmptySymbolRootIndex = 117;
+  static const int kEmptySymbolRootIndex = 118;
 
   static const int kJSObjectType = 0xaa;
   static const int kFirstNonstringType = 0x80;
@@ -4286,6 +4306,23 @@
 }
 
 template <class T>
+void Persistent<T>::MarkIndependent(Isolate* isolate) {
+  V8::MarkIndependent(reinterpret_cast<internal::Isolate*>(isolate),
+                      reinterpret_cast<internal::Object**>(**this));
+}
+
+template <class T>
+void Persistent<T>::MarkPartiallyDependent() {
+  V8::MarkPartiallyDependent(reinterpret_cast<internal::Object**>(**this));
+}
+
+template <class T>
+void Persistent<T>::MarkPartiallyDependent(Isolate* isolate) {
+  V8::MarkPartiallyDependent(reinterpret_cast<internal::Isolate*>(isolate),
+                             reinterpret_cast<internal::Object**>(**this));
+}
+
+template <class T>
 void Persistent<T>::SetWrapperClassId(uint16_t class_id) {
   V8::SetWrapperClassId(reinterpret_cast<internal::Object**>(**this), class_id);
 }
diff --git a/src/accessors.cc b/src/accessors.cc
index 1bc9221..c2f245c 100644
--- a/src/accessors.cc
+++ b/src/accessors.cc
@@ -95,6 +95,47 @@
 }
 
 
+static MaybeObject* ArraySetLengthObserved(Isolate* isolate,
+                                           Handle<JSArray> array,
+                                           Handle<Object> new_length_handle) {
+  List<Handle<String> > indices;
+  List<Handle<Object> > old_values;
+  Handle<Object> old_length_handle(array->length(), isolate);
+  uint32_t old_length = 0;
+  CHECK(old_length_handle->ToArrayIndex(&old_length));
+  uint32_t new_length = 0;
+  CHECK(new_length_handle->ToArrayIndex(&new_length));
+  // TODO(adamk): This loop can be very slow for arrays in dictionary mode.
+  // Find another way to iterate over arrays with dictionary elements.
+  for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
+    PropertyAttributes attributes = array->GetLocalElementAttribute(i);
+    if (attributes == ABSENT) continue;
+    // A non-configurable property will cause the truncation operation to
+    // stop at this index.
+    if (attributes == DONT_DELETE) break;
+    // TODO(adamk): Don't fetch the old value if it's an accessor.
+    old_values.Add(Object::GetElement(array, i));
+    indices.Add(isolate->factory()->Uint32ToString(i));
+  }
+
+  MaybeObject* result = array->SetElementsLength(*new_length_handle);
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  CHECK(array->length()->ToArrayIndex(&new_length));
+  if (old_length != new_length) {
+    for (int i = 0; i < indices.length(); ++i) {
+      JSObject::EnqueueChangeRecord(
+          array, "deleted", indices[i], old_values[i]);
+    }
+    JSObject::EnqueueChangeRecord(
+        array, "updated", isolate->factory()->length_symbol(),
+        old_length_handle);
+  }
+  return *hresult;
+}
+
+
 MaybeObject* Accessors::ArraySetLength(JSObject* object, Object* value, void*) {
   Isolate* isolate = object->GetIsolate();
 
@@ -112,7 +153,7 @@
   HandleScope scope(isolate);
 
   // Protect raw pointers.
-  Handle<JSObject> object_handle(object, isolate);
+  Handle<JSArray> array_handle(JSArray::cast(object), isolate);
   Handle<Object> value_handle(value, isolate);
 
   bool has_exception;
@@ -122,7 +163,11 @@
   if (has_exception) return Failure::Exception();
 
   if (uint32_v->Number() == number_v->Number()) {
-    return Handle<JSArray>::cast(object_handle)->SetElementsLength(*uint32_v);
+    if (FLAG_harmony_observation && array_handle->map()->is_observed()) {
+      return ArraySetLengthObserved(isolate, array_handle, uint32_v);
+    } else {
+      return array_handle->SetElementsLength(*uint32_v);
+    }
   }
   return isolate->Throw(
       *isolate->factory()->NewRangeError("invalid_array_length",
diff --git a/src/api.cc b/src/api.cc
index 9cb69eb..4820f9d 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -648,6 +648,27 @@
 }
 
 
+void V8::MarkIndependent(i::Isolate* isolate, i::Object** object) {
+  ASSERT(isolate == i::Isolate::Current());
+  LOG_API(isolate, "MarkIndependent");
+  isolate->global_handles()->MarkIndependent(object);
+}
+
+
+void V8::MarkPartiallyDependent(i::Object** object) {
+  i::Isolate* isolate = i::Isolate::Current();
+  LOG_API(isolate, "MarkPartiallyDependent");
+  isolate->global_handles()->MarkPartiallyDependent(object);
+}
+
+
+void V8::MarkPartiallyDependent(i::Isolate* isolate, i::Object** object) {
+  ASSERT(isolate == i::Isolate::Current());
+  LOG_API(isolate, "MarkPartiallyDependent");
+  isolate->global_handles()->MarkPartiallyDependent(object);
+}
+
+
 bool V8::IsGlobalIndependent(i::Object** obj) {
   i::Isolate* isolate = i::Isolate::Current();
   LOG_API(isolate, "IsGlobalIndependent");
@@ -3714,8 +3735,9 @@
   i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
   if (func->shared()->script()->IsScript()) {
     i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
+    i::Handle<i::Object> scriptName = GetScriptNameOrSourceURL(script);
     v8::ScriptOrigin origin(
-      Utils::ToLocal(i::Handle<i::Object>(script->name())),
+      Utils::ToLocal(scriptName),
       v8::Integer::New(script->line_offset()->value()),
       v8::Integer::New(script->column_offset()->value()));
     return origin;
@@ -6512,6 +6534,7 @@
 
 
 void Testing::DeoptimizeAll() {
+  i::HandleScope scope;
   internal::Deoptimizer::DeoptimizeAll();
 }
 
diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h
index 2bd78ab..acd61fe 100644
--- a/src/arm/assembler-arm-inl.h
+++ b/src/arm/assembler-arm-inl.h
@@ -165,6 +165,24 @@
 }
 
 
+static const int kNoCodeAgeSequenceLength = 3;
+
+Code* RelocInfo::code_age_stub() {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  return Code::GetCodeFromTargetAddress(
+      Memory::Address_at(pc_ + Assembler::kInstrSize *
+                         (kNoCodeAgeSequenceLength - 1)));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  Memory::Address_at(pc_ + Assembler::kInstrSize *
+                     (kNoCodeAgeSequenceLength - 1)) =
+      stub->instruction_start();
+}
+
+
 Address RelocInfo::call_address() {
   // The 2 instructions offset assumes patched debug break slot or return
   // sequence.
@@ -238,6 +256,8 @@
     visitor->VisitGlobalPropertyCell(this);
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(this);
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    visitor->VisitCodeAgeSequence(this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // TODO(isolates): Get a cached isolate below.
   } else if (((RelocInfo::IsJSReturn(mode) &&
@@ -264,6 +284,8 @@
     StaticVisitor::VisitGlobalPropertyCell(heap, this);
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(this);
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    StaticVisitor::VisitCodeAgeSequence(heap, this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 80abafd..b679efa 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -325,9 +325,7 @@
 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
     : AssemblerBase(arg_isolate),
       recorded_ast_id_(TypeFeedbackId::None()),
-      positions_recorder_(this),
-      emit_debug_code_(FLAG_debug_code),
-      predictable_code_size_(false) {
+      positions_recorder_(this) {
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 3fab20b..8418aee 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -649,14 +649,6 @@
   Assembler(Isolate* isolate, void* buffer, int buffer_size);
   ~Assembler();
 
-  // Overrides the default provided by FLAG_debug_code.
-  void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
-  // Avoids using instructions that vary in size in unpredictable ways between
-  // the snapshot and the running VM.  This is needed by the full compiler so
-  // that it can recompile code with debug support and fix the PC.
-  void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
-
   // GetCode emits any pending (non-emitted) code and fills the descriptor
   // desc. GetCode() is idempotent; it returns the same result if no other
   // Assembler functions are invoked in between GetCode() calls.
@@ -1185,8 +1177,6 @@
   // Jump unconditionally to given label.
   void jmp(Label* L) { b(L, al); }
 
-  bool predictable_code_size() const { return predictable_code_size_; }
-
   static bool use_immediate_embedded_pointer_loads(
       const Assembler* assembler) {
 #ifdef USE_BLX
@@ -1345,8 +1335,6 @@
   // the relocation info.
   TypeFeedbackId recorded_ast_id_;
 
-  bool emit_debug_code() const { return emit_debug_code_; }
-
   int buffer_space() const { return reloc_info_writer.pos() - pc_; }
 
   // Decode branch instruction at pos and return branch target pos
@@ -1497,10 +1485,6 @@
   friend class BlockConstPoolScope;
 
   PositionsRecorder positions_recorder_;
-
-  bool emit_debug_code_;
-  bool predictable_code_size_;
-
   friend class PositionsRecorder;
   friend class EnsureSpace;
 };
@@ -1514,26 +1498,6 @@
 };
 
 
-class PredictableCodeSizeScope {
- public:
-  explicit PredictableCodeSizeScope(Assembler* assembler)
-      : asm_(assembler) {
-    old_value_ = assembler->predictable_code_size();
-    assembler->set_predictable_code_size(true);
-  }
-
-  ~PredictableCodeSizeScope() {
-    if (!old_value_) {
-      asm_->set_predictable_code_size(false);
-    }
-  }
-
- private:
-  Assembler* asm_;
-  bool old_value_;
-};
-
-
 } }  // namespace v8::internal
 
 #endif  // V8_ARM_ASSEMBLER_ARM_H_
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 2d1d7b1..24d14e8 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -1226,6 +1226,39 @@
 }
 
 
+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+  // For now, we are relying on the fact that make_code_young doesn't do any
+  // garbage collection which allows us to save/restore the registers without
+  // worrying about which of them contain pointers. We also don't build an
+  // internal frame to make the code faster, since we shouldn't have to do stack
+  // crawls in MakeCodeYoung. This seems a bit fragile.
+
+  // The following registers must be saved and restored when calling through to
+  // the runtime:
+  //   r0 - contains return address (beginning of patch sequence)
+  //   r1 - function object
+  FrameScope scope(masm, StackFrame::MANUAL);
+  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
+  __ PrepareCallCFunction(1, 0, r1);
+  __ CallCFunction(
+      ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
+  __ mov(pc, r0);
+}
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}                                                            \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
                                              Deoptimizer::BailoutType type) {
   {
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index ceb108f..1e73a55 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -4923,7 +4923,7 @@
   // subject: Subject string
   // regexp_data: RegExp data (FixedArray)
   // r0: Instance type of subject string
-  STATIC_ASSERT(4 == kAsciiStringTag);
+  STATIC_ASSERT(4 == kOneByteStringTag);
   STATIC_ASSERT(kTwoByteStringTag == 0);
   // Find the code object based on the assumptions above.
   __ and_(r0, r0, Operand(kStringEncodingMask));
@@ -5999,23 +5999,28 @@
   STATIC_ASSERT(kSmiTag == 0);
   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
 
-  // I.e., arithmetic shift right by one un-smi-tags.
-  __ mov(r2, Operand(r2, ASR, 1), SetCC);
-  __ mov(r3, Operand(r3, ASR, 1), SetCC, cc);
-  // If either to or from had the smi tag bit set, then carry is set now.
-  __ b(cs, &runtime);  // Either "from" or "to" is not a smi.
+  // Arithmetic shift right by one un-smi-tags. In this case we rotate right
+  // instead because we bail out on non-smi values: ROR and ASR are equivalent
+  // for smis but they set the flags in a way that's easier to optimize.
+  __ mov(r2, Operand(r2, ROR, 1), SetCC);
+  __ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
+  // If either to or from had the smi tag bit set, then C is set now, and N
+  // has the same value: we rotated by 1, so the bottom bit is now the top bit.
   // We want to bailout to runtime here if From is negative.  In that case, the
   // next instruction is not executed and we fall through to bailing out to
-  // runtime.  pl is the opposite of mi.
-  // Both r2 and r3 are untagged integers.
-  __ sub(r2, r2, Operand(r3), SetCC, pl);
-  __ b(mi, &runtime);  // Fail if from > to.
+  // runtime.
+  // Executed if both r2 and r3 are untagged integers.
+  __ sub(r2, r2, Operand(r3), SetCC, cc);
+  // One of the above un-smis or the above SUB could have set N==1.
+  __ b(mi, &runtime);  // Either "from" or "to" is not an smi, or from > to.
 
   // Make sure first argument is a string.
   __ ldr(r0, MemOperand(sp, kStringOffset));
   STATIC_ASSERT(kSmiTag == 0);
-  __ JumpIfSmi(r0, &runtime);
-  Condition is_string = masm->IsObjectStringType(r0, r1);
+  // Do a JumpIfSmi, but fold its jump into the subsequent string test.
+  __ tst(r0, Operand(kSmiTagMask));
+  Condition is_string = masm->IsObjectStringType(r0, r1, ne);
+  ASSERT(is_string == eq);
   __ b(NegateCondition(is_string), &runtime);
 
   // Short-cut for the case of trivial substring.
@@ -6086,7 +6091,7 @@
     // string's encoding is wrong because we always have to recheck encoding of
     // the newly created string's parent anyways due to externalized strings.
     Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
     __ tst(r1, Operand(kStringEncodingMask));
     __ b(eq, &two_byte_slice);
@@ -6129,7 +6134,7 @@
 
   __ bind(&allocate_result);
   // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   __ tst(r1, Operand(kStringEncodingMask));
   __ b(eq, &two_byte_sequential);
 
@@ -6494,9 +6499,9 @@
   __ tst(r5, Operand(kAsciiDataHintMask), ne);
   __ b(ne, &ascii_data);
   __ eor(r4, r4, Operand(r5));
-  STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
-  __ and_(r4, r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
-  __ cmp(r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
+  STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+  __ and_(r4, r4, Operand(kOneByteStringTag | kAsciiDataHintTag));
+  __ cmp(r4, Operand(kOneByteStringTag | kAsciiDataHintTag));
   __ b(eq, &ascii_data);
 
   // Allocate a two byte cons string.
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 09166c3..209e151 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -452,6 +452,92 @@
 
 #undef __
 
+// add(r0, pc, Operand(-8))
+static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+  // The sequence of instructions that is patched out for aging code is the
+  // following boilerplate stack-building prologue that is found in FUNCTIONS
+  static bool initialized = false;
+  static uint32_t sequence[kNoCodeAgeSequenceLength];
+  byte* byte_sequence = reinterpret_cast<byte*>(sequence);
+  *length = kNoCodeAgeSequenceLength * Assembler::kInstrSize;
+  if (!initialized) {
+    CodePatcher patcher(byte_sequence, kNoCodeAgeSequenceLength);
+    patcher.masm()->stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+    patcher.masm()->LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+    patcher.masm()->add(fp, sp, Operand(2 * kPointerSize));
+    initialized = true;
+  }
+  return byte_sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+  byte* start = instruction_start();
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (!memcmp(start, young_sequence, young_length) ||
+      Memory::uint32_at(start) == kCodeAgePatchFirstInstruction) {
+    return start;
+  } else {
+    byte* start_after_strict = NULL;
+    if (kind() == FUNCTION) {
+      start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
+    } else {
+      ASSERT(kind() == OPTIMIZED_FUNCTION);
+      start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
+    }
+    ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+           Memory::uint32_at(start_after_strict) ==
+           kCodeAgePatchFirstInstruction);
+    return start_after_strict;
+  }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  bool result = !memcmp(sequence, young_sequence, young_length);
+  ASSERT(result ||
+         Memory::uint32_at(sequence) == kCodeAgePatchFirstInstruction);
+  return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+                               MarkingParity* parity) {
+  if (IsYoungSequence(sequence)) {
+    *age = kNoAge;
+    *parity = NO_MARKING_PARITY;
+  } else {
+    Address target_address = Memory::Address_at(
+        sequence + Assembler::kInstrSize * (kNoCodeAgeSequenceLength - 1));
+    Code* stub = GetCodeFromTargetAddress(target_address);
+    GetCodeAgeAndParity(stub, age, parity);
+  }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+                                Code::Age age,
+                                MarkingParity parity) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (age == kNoAge) {
+    memcpy(sequence, young_sequence, young_length);
+    CPU::FlushICache(sequence, young_length);
+  } else {
+    Code* stub = GetCodeAgeStub(age, parity);
+    CodePatcher patcher(sequence, young_length / Assembler::kInstrSize);
+    patcher.masm()->add(r0, pc, Operand(-8));
+    patcher.masm()->ldr(pc, MemOperand(pc, -4));
+    patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+  }
+}
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_TARGET_ARCH_ARM
diff --git a/src/arm/codegen-arm.h b/src/arm/codegen-arm.h
index c340e6b..c77844d 100644
--- a/src/arm/codegen-arm.h
+++ b/src/arm/codegen-arm.h
@@ -34,6 +34,9 @@
 namespace v8 {
 namespace internal {
 
+static const int kSizeOfFullCodegenStrictModePrologue = 16;
+static const int kSizeOfOptimizedStrictModePrologue = 16;
+
 // Forward declarations
 class CompilationInfo;
 
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index be82283..03d5067 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -149,12 +149,15 @@
   // function calls.
   if (!info->is_classic_mode() || info->is_native()) {
     Label ok;
+    Label begin;
+    __ bind(&begin);
     __ cmp(r5, Operand(0));
     __ b(eq, &ok);
     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
     __ str(r2, MemOperand(sp, receiver_offset));
     __ bind(&ok);
+    ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - begin.pos());
   }
 
   // Open a frame scope to indicate that there is a frame on the stack.  The
@@ -164,12 +167,12 @@
 
   int locals_count = info->scope()->num_stack_slots();
 
-  __ Push(lr, fp, cp, r1);
-  if (locals_count > 0) {
-    // Load undefined value here, so the value is ready for the loop
-    // below.
-    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
-  }
+  // The following four instructions must remain together and unmodified for
+  // code aging to work properly.
+  __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+  // Load undefined value here, so the value is ready for the loop
+  // below.
+  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   // Adjust fp to point to caller's fp.
   __ add(fp, sp, Operand(2 * kPointerSize));
 
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index ff6da03..17f3325 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -1881,6 +1881,7 @@
         (instr->representation().IsDouble() &&
          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+
     LOperand* external_pointer = UseRegister(instr->elements());
     result = new(zone()) LLoadKeyed(external_pointer, key);
   }
@@ -1905,32 +1906,22 @@
 
 
 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
-  ElementsKind elements_kind = instr->elements_kind();
-  bool needs_write_barrier = instr->NeedsWriteBarrier();
-  LOperand* key = needs_write_barrier
-      ? UseTempRegister(instr->key())
-      : UseRegisterOrConstantAtStart(instr->key());
-  bool val_is_temp_register =
-      elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
-      elements_kind == EXTERNAL_FLOAT_ELEMENTS;
-  LOperand* val = val_is_temp_register || needs_write_barrier
-      ? UseTempRegister(instr->value())
-      : UseRegister(instr->value());
+  LOperand* elements = UseRegisterAtStart(instr->elements());
+  LOperand* key;
+  LOperand* val;
+  if (instr->NeedsWriteBarrier()) {
+    key = UseTempRegister(instr->key());
+    val = UseTempRegister(instr->value());
+  } else {
+    key = UseRegisterOrConstantAtStart(instr->key());
+    val = UseRegisterAtStart(instr->value());
+  }
 
-  LStoreKeyed* result = NULL;
+#ifdef DEBUG
   if (!instr->is_external()) {
     ASSERT(instr->elements()->representation().IsTagged());
-
-    LOperand* object = NULL;
-    if (instr->value()->representation().IsDouble()) {
-      object = UseRegisterAtStart(instr->elements());
-    } else {
-      ASSERT(instr->value()->representation().IsTagged());
-      object = UseTempRegister(instr->elements());
-    }
-
-    result = new(zone()) LStoreKeyed(object, key, val);
   } else {
+    ElementsKind elements_kind = instr->elements_kind();
     ASSERT(
         (instr->value()->representation().IsInteger32() &&
          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
@@ -1939,11 +1930,10 @@
          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
     ASSERT(instr->elements()->representation().IsExternal());
-
-    LOperand* external_pointer = UseRegister(instr->elements());
-    result = new(zone()) LStoreKeyed(external_pointer, key, val);
   }
+#endif
 
+  LStoreKeyed* result = new(zone()) LStoreKeyed(elements, key, val);
   ASSERT(result != NULL);
   return result;
 }
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 8c107d9..7615134 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -138,15 +138,23 @@
   // function calls.
   if (!info_->is_classic_mode() || info_->is_native()) {
     Label ok;
+    Label begin;
+    __ bind(&begin);
     __ cmp(r5, Operand(0));
     __ b(eq, &ok);
     int receiver_offset = scope()->num_parameters() * kPointerSize;
     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
     __ str(r2, MemOperand(sp, receiver_offset));
     __ bind(&ok);
+    ASSERT_EQ(kSizeOfOptimizedStrictModePrologue, ok.pos() - begin.pos());
   }
 
+  // The following three instructions must remain together and unmodified for
+  // code aging to work properly.
   __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+  // Add unused load of ip to ensure prologue sequence is identical for
+  // full-codegen and lithium-codegen.
+  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   __ add(fp, sp, Operand(2 * kPointerSize));  // Adjust FP to point to saved FP.
 
   // Reserve space for the stack slots needed by the code.
@@ -3040,13 +3048,12 @@
                    (instr->additional_index() << element_size_shift)));
   }
 
+  __ vldr(result, elements, 0);
   if (instr->hydrogen()->RequiresHoleCheck()) {
     __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
     __ cmp(scratch, Operand(kHoleNanUpper32));
     DeoptimizeIf(eq, instr->environment());
   }
-
-  __ vldr(result, elements, 0);
 }
 
 
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 623bd6a..dcc7149 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -422,6 +422,16 @@
 void MacroAssembler::LoadRoot(Register destination,
                               Heap::RootListIndex index,
                               Condition cond) {
+  if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
+      !Heap::RootCanBeWrittenAfterInitialization(index)) {
+    Handle<Object> root(isolate()->heap()->roots_array_start()[index]);
+    if (!isolate()->heap()->InNewSpace(*root)) {
+      // The CPU supports fast immediate values, and this root will never
+      // change. We will load it as a relocatable immediate value.
+      mov(destination, Operand(root), LeaveCC, cond);
+      return;
+    }
+  }
   ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
 }
 
@@ -3684,7 +3694,7 @@
   // For ASCII (char-size of 1) we shift the smi tag away to get the length.
   // For UC16 (char-size of 2) we just leave the smi tag in place, thereby
   // getting the length multiplied by 2.
-  ASSERT(kAsciiStringTag == 4 && kStringEncodingMask == 4);
+  ASSERT(kOneByteStringTag == 4 && kStringEncodingMask == 4);
   ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   ldr(ip, FieldMemOperand(value, String::kLengthOffset));
   tst(instance_type, Operand(kStringEncodingMask));
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index e3e39a3..1d97a6c 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -893,12 +893,15 @@
 
   // Load and check the instance type of an object for being a string.
   // Loads the type into the second argument register.
-  // Returns a condition that will be enabled if the object was a string.
+  // Returns a condition that will be enabled if the object was a string
+  // and the passed-in condition passed. If the passed-in condition failed
+  // then flags remain unchanged.
   Condition IsObjectStringType(Register obj,
-                               Register type) {
-    ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
-    ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
-    tst(type, Operand(kIsNotStringMask));
+                               Register type,
+                               Condition cond = al) {
+    ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset), cond);
+    ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset), cond);
+    tst(type, Operand(kIsNotStringMask), cond);
     ASSERT_EQ(0, kStringTag);
     return eq;
   }
@@ -1202,7 +1205,7 @@
   // Souce and destination can be the same register.
   void UntagAndJumpIfNotSmi(Register dst, Register src, Label* non_smi_case);
 
-  // Jump the register contains a smi.
+  // Jump if the register contains a smi.
   inline void JumpIfSmi(Register value, Label* smi_label) {
     tst(value, Operand(kSmiTagMask));
     b(eq, smi_label);
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 5cdba9c..bd7f1bd 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -1387,7 +1387,14 @@
       }
 
       case ROR: {
-        UNIMPLEMENTED();
+        if (shift_amount == 0) {
+          *carry_out = c_flag_;
+        } else {
+          uint32_t left = static_cast<uint32_t>(result) >> shift_amount;
+          uint32_t right = static_cast<uint32_t>(result) << (32 - shift_amount);
+          result = right | left;
+          *carry_out = (static_cast<uint32_t>(result) >> 31) != 0;
+        }
         break;
       }
 
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index d3b5862..9fc39d4 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -3467,7 +3467,13 @@
   // r1: constructor function
   // r2: initial map
   // r7: undefined
+  ASSERT(function->has_initial_map());
   __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+#ifdef DEBUG
+  int instance_size = function->initial_map()->instance_size();
+  __ cmp(r3, Operand(instance_size >> kPointerSizeLog2));
+  __ Check(eq, "Instance size of initial map changed.");
+#endif
   __ AllocateInNewSpace(r3, r4, r5, r6, &generic_stub_call, SIZE_IN_WORDS);
 
   // Allocated the JSObject, now initialize the fields. Map is set to initial
@@ -3525,7 +3531,6 @@
   }
 
   // Fill the unused in-object property fields with undefined.
-  ASSERT(function->has_initial_map());
   for (int i = shared->this_property_assignments_count();
        i < function->initial_map()->inobject_properties();
        i++) {
diff --git a/src/array.js b/src/array.js
index 155d565..37053ce 100644
--- a/src/array.js
+++ b/src/array.js
@@ -441,8 +441,8 @@
   }
   n--;
   var value = this[n];
-  this.length = n;
   delete this[n];
+  this.length = n;
   return value;
 }
 
@@ -581,7 +581,7 @@
 
   var first = this[0];
 
-  if (IS_ARRAY(this)) {
+  if (IS_ARRAY(this) && !%IsObserved(this)) {
     SmartMove(this, 0, 1, len, 0);
   } else {
     SimpleMove(this, 0, 1, len, 0);
@@ -602,7 +602,7 @@
   var len = TO_UINT32(this.length);
   var num_arguments = %_ArgumentsLength();
 
-  if (IS_ARRAY(this)) {
+  if (IS_ARRAY(this) && !%IsObserved(this)) {
     SmartMove(this, 0, 0, len, num_arguments);
   } else {
     SimpleMove(this, 0, 0, len, num_arguments);
@@ -649,6 +649,7 @@
   if (end_i < start_i) return result;
 
   if (IS_ARRAY(this) &&
+      !%IsObserved(this) &&
       (end_i > 1000) &&
       (%EstimateNumberOfElements(this) < end_i)) {
     SmartSlice(this, start_i, end_i - start_i, len, result);
@@ -705,7 +706,9 @@
 
   var use_simple_splice = true;
 
-  if (IS_ARRAY(this) && num_additional_args !== del_count) {
+  if (IS_ARRAY(this) &&
+      !%IsObserved(this) &&
+      num_additional_args !== del_count) {
     // If we are only deleting/moving a few things near the end of the
     // array then the simple version is going to be faster, because it
     // doesn't touch most of the array.
diff --git a/src/assembler.cc b/src/assembler.cc
index d81d4ae..87b7804 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -108,7 +108,9 @@
 
 AssemblerBase::AssemblerBase(Isolate* isolate)
     : isolate_(isolate),
-      jit_cookie_(0) {
+      jit_cookie_(0),
+      emit_debug_code_(FLAG_debug_code),
+      predictable_code_size_(false) {
   if (FLAG_mask_constants_with_cookie && isolate != NULL)  {
     jit_cookie_ = V8::RandomPrivate(isolate);
   }
@@ -313,6 +315,7 @@
 #ifdef DEBUG
   byte* begin_pos = pos_;
 #endif
+  ASSERT(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
   ASSERT(rinfo->pc() - last_pc_ >= 0);
   ASSERT(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM
          <= kMaxStandardNonCompactModes);
@@ -570,6 +573,15 @@
       }
     }
   }
+  if (code_age_sequence_ != NULL) {
+    byte* old_code_age_sequence = code_age_sequence_;
+    code_age_sequence_ = NULL;
+    if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
+      rinfo_.data_ = 0;
+      rinfo_.pc_ = old_code_age_sequence;
+      return;
+    }
+  }
   done_ = true;
 }
 
@@ -585,6 +597,12 @@
   mode_mask_ = mode_mask;
   last_id_ = 0;
   last_position_ = 0;
+  byte* sequence = code->FindCodeAgeSequence();
+  if (sequence != NULL && !Code::IsYoungSequence(sequence)) {
+    code_age_sequence_ = sequence;
+  } else {
+    code_age_sequence_ = NULL;
+  }
   if (mode_mask_ == 0) pos_ = end_;
   next();
 }
@@ -600,6 +618,7 @@
   mode_mask_ = mode_mask;
   last_id_ = 0;
   last_position_ = 0;
+  code_age_sequence_ = NULL;
   if (mode_mask_ == 0) pos_ = end_;
   next();
 }
@@ -652,6 +671,8 @@
       UNREACHABLE();
 #endif
       return "debug break slot";
+    case RelocInfo::CODE_AGE_SEQUENCE:
+      return "code_age_sequence";
     case RelocInfo::NUMBER_OF_MODES:
       UNREACHABLE();
       return "number_of_modes";
@@ -739,6 +760,9 @@
     case NUMBER_OF_MODES:
       UNREACHABLE();
       break;
+    case CODE_AGE_SEQUENCE:
+      ASSERT(Code::IsYoungSequence(pc_) || code_age_stub()->IsCode());
+      break;
   }
 }
 #endif  // VERIFY_HEAP
@@ -874,6 +898,13 @@
 }
 
 
+ExternalReference ExternalReference::get_make_code_young_function(
+    Isolate* isolate) {
+  return ExternalReference(Redirect(
+      isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
+}
+
+
 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
   return ExternalReference(isolate->date_cache()->stamp_address());
 }
diff --git a/src/assembler.h b/src/assembler.h
index a0e55cc..037799d 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -59,7 +59,13 @@
   explicit AssemblerBase(Isolate* isolate);
 
   Isolate* isolate() const { return isolate_; }
-  int jit_cookie() { return jit_cookie_; }
+  int jit_cookie() const { return jit_cookie_; }
+
+  bool emit_debug_code() const { return emit_debug_code_; }
+  void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
+
+  bool predictable_code_size() const { return predictable_code_size_; }
+  void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
 
   // Overwrite a host NaN with a quiet target NaN.  Used by mksnapshot for
   // cross-snapshotting.
@@ -68,6 +74,28 @@
  private:
   Isolate* isolate_;
   int jit_cookie_;
+  bool emit_debug_code_;
+  bool predictable_code_size_;
+};
+
+
+// Avoids using instructions that vary in size in unpredictable ways between the
+// snapshot and the running VM.
+class PredictableCodeSizeScope {
+ public:
+  explicit PredictableCodeSizeScope(AssemblerBase* assembler)
+      : assembler_(assembler) {
+    old_value_ = assembler_->predictable_code_size();
+    assembler_->set_predictable_code_size(true);
+  }
+
+  ~PredictableCodeSizeScope() {
+    assembler_->set_predictable_code_size(old_value_);
+  }
+
+ private:
+  AssemblerBase* assembler_;
+  bool old_value_;
 };
 
 
@@ -211,6 +239,12 @@
     // Pseudo-types
     NUMBER_OF_MODES,  // There are at most 15 modes with noncompact encoding.
     NONE,  // never recorded
+    CODE_AGE_SEQUENCE,  // Not stored in RelocInfo array, used explictly by
+                        // code aging.
+    FIRST_REAL_RELOC_MODE = CODE_TARGET,
+    LAST_REAL_RELOC_MODE = CONST_POOL,
+    FIRST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
+    LAST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
     LAST_CODE_ENUM = DEBUG_BREAK,
     LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL,
     // Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding.
@@ -225,6 +259,15 @@
       : pc_(pc), rmode_(rmode), data_(data), host_(host) {
   }
 
+  static inline bool IsRealRelocMode(Mode mode) {
+    return mode >= FIRST_REAL_RELOC_MODE &&
+        mode <= LAST_REAL_RELOC_MODE;
+  }
+  static inline bool IsPseudoRelocMode(Mode mode) {
+    ASSERT(!IsRealRelocMode(mode));
+    return mode >= FIRST_PSEUDO_RELOC_MODE &&
+        mode <= LAST_PSEUDO_RELOC_MODE;
+  }
   static inline bool IsConstructCall(Mode mode) {
     return mode == CONSTRUCT_CALL;
   }
@@ -262,6 +305,9 @@
   static inline bool IsDebugBreakSlot(Mode mode) {
     return mode == DEBUG_BREAK_SLOT;
   }
+  static inline bool IsCodeAgeSequence(Mode mode) {
+    return mode == CODE_AGE_SEQUENCE;
+  }
   static inline int ModeMask(Mode mode) { return 1 << mode; }
 
   // Accessors
@@ -294,7 +340,8 @@
   INLINE(Handle<JSGlobalPropertyCell> target_cell_handle());
   INLINE(void set_target_cell(JSGlobalPropertyCell* cell,
                               WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
-
+  INLINE(Code* code_age_stub());
+  INLINE(void set_code_age_stub(Code* stub));
 
   // Read the address of the word containing the target_address in an
   // instruction stream.  What this means exactly is architecture-independent.
@@ -487,6 +534,7 @@
 
   byte* pos_;
   byte* end_;
+  byte* code_age_sequence_;
   RelocInfo rinfo_;
   bool done_;
   int mode_mask_;
@@ -595,6 +643,8 @@
   static ExternalReference get_date_field_function(Isolate* isolate);
   static ExternalReference date_cache_stamp(Isolate* isolate);
 
+  static ExternalReference get_make_code_young_function(Isolate* isolate);
+
   // Deoptimization support.
   static ExternalReference new_deoptimizer_function(Isolate* isolate);
   static ExternalReference compute_output_frames_function(Isolate* isolate);
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 5e1f80e..ffa5283 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -1415,6 +1415,11 @@
     INSTALL_NATIVE(JSFunction, "DerivedSetTrap", derived_set_trap);
     INSTALL_NATIVE(JSFunction, "ProxyEnumerate", proxy_enumerate);
   }
+  if (FLAG_harmony_observation) {
+    INSTALL_NATIVE(JSFunction, "NotifyChange", observers_notify_change);
+    INSTALL_NATIVE(JSFunction, "DeliverChangeRecords",
+                   observers_deliver_changes);
+  }
 }
 
 #undef INSTALL_NATIVE
@@ -1828,7 +1833,7 @@
                "native collection.js") == 0) {
       if (!CompileExperimentalBuiltin(isolate(), i)) return false;
     }
-    if (FLAG_harmony_object_observe &&
+    if (FLAG_harmony_observation &&
         strcmp(ExperimentalNatives::GetScriptName(i).start(),
                "native object-observe.js") == 0) {
       if (!CompileExperimentalBuiltin(isolate(), i)) return false;
diff --git a/src/builtins.cc b/src/builtins.cc
index df70cd4..620e4b3 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -510,6 +510,10 @@
   FixedArray* elms = FixedArray::cast(elms_obj);
   JSArray* array = JSArray::cast(receiver);
 
+  if (FLAG_harmony_observation && array->map()->is_observed()) {
+    return CallJsBuiltin(isolate, "ArrayPush", args);
+  }
+
   int len = Smi::cast(array->length())->value();
   int to_add = args.length() - 1;
   if (to_add == 0) {
@@ -566,11 +570,15 @@
   FixedArray* elms = FixedArray::cast(elms_obj);
   JSArray* array = JSArray::cast(receiver);
 
+  if (FLAG_harmony_observation && array->map()->is_observed()) {
+    return CallJsBuiltin(isolate, "ArrayPop", args);
+  }
+
   int len = Smi::cast(array->length())->value();
   if (len == 0) return heap->undefined_value();
 
   // Get top element
-  MaybeObject* top = elms->get(len - 1);
+  Object* top = elms->get(len - 1);
 
   // Set the length.
   array->set_length(Smi::FromInt(len - 1));
@@ -581,9 +589,7 @@
     return top;
   }
 
-  top = array->GetPrototype()->GetElement(len - 1);
-
-  return top;
+  return array->GetPrototype()->GetElement(len - 1);
 }
 
 
@@ -604,6 +610,10 @@
   JSArray* array = JSArray::cast(receiver);
   ASSERT(array->HasFastSmiOrObjectElements());
 
+  if (FLAG_harmony_observation && array->map()->is_observed()) {
+    return CallJsBuiltin(isolate, "ArrayShift", args);
+  }
+
   int len = Smi::cast(array->length())->value();
   if (len == 0) return heap->undefined_value();
 
@@ -646,6 +656,10 @@
   JSArray* array = JSArray::cast(receiver);
   ASSERT(array->HasFastSmiOrObjectElements());
 
+  if (FLAG_harmony_observation && array->map()->is_observed()) {
+    return CallJsBuiltin(isolate, "ArrayUnshift", args);
+  }
+
   int len = Smi::cast(array->length())->value();
   int to_add = args.length() - 1;
   int new_length = len + to_add;
@@ -802,6 +816,10 @@
   JSArray* array = JSArray::cast(receiver);
   ASSERT(array->HasFastSmiOrObjectElements());
 
+  if (FLAG_harmony_observation && array->map()->is_observed()) {
+    return CallJsBuiltin(isolate, "ArraySplice", args);
+  }
+
   int len = Smi::cast(array->length())->value();
 
   int n_arguments = args.length() - 1;
diff --git a/src/builtins.h b/src/builtins.h
index ca70ae5..a2f752e 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -38,6 +38,25 @@
 };
 
 
+#define CODE_AGE_LIST_WITH_ARG(V, A)     \
+  V(Quadragenarian, A)                   \
+  V(Quinquagenarian, A)                  \
+  V(Sexagenarian, A)                     \
+  V(Septuagenarian, A)                   \
+  V(Octogenarian, A)
+
+#define CODE_AGE_LIST_IGNORE_ARG(X, V) V(X)
+
+#define CODE_AGE_LIST(V) \
+  CODE_AGE_LIST_WITH_ARG(CODE_AGE_LIST_IGNORE_ARG, V)
+
+#define DECLARE_CODE_AGE_BUILTIN(C, V)             \
+  V(Make##C##CodeYoungAgainOddMarking, BUILTIN,    \
+    UNINITIALIZED, Code::kNoExtraICState)          \
+  V(Make##C##CodeYoungAgainEvenMarking, BUILTIN,   \
+    UNINITIALIZED, Code::kNoExtraICState)
+
+
 // Define list of builtins implemented in C++.
 #define BUILTIN_LIST_C(V)                                           \
   V(Illegal, NO_EXTRA_ARGUMENTS)                                    \
@@ -195,8 +214,8 @@
                                     Code::kNoExtraICState)              \
                                                                         \
   V(OnStackReplacement,             BUILTIN, UNINITIALIZED,             \
-                                    Code::kNoExtraICState)
-
+                                    Code::kNoExtraICState)              \
+  CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V)
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 // Define list of builtins used by the debugger implemented in assembly.
@@ -379,6 +398,14 @@
   static void Generate_StringConstructCode(MacroAssembler* masm);
   static void Generate_OnStackReplacement(MacroAssembler* masm);
 
+#define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C)                \
+  static void Generate_Make##C##CodeYoungAgainEvenMarking(   \
+      MacroAssembler* masm);                                 \
+  static void Generate_Make##C##CodeYoungAgainOddMarking(    \
+      MacroAssembler* masm);
+  CODE_AGE_LIST(DECLARE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DECLARE_CODE_AGE_BUILTIN_GENERATOR
+
   static void InitBuiltinFunctionTable();
 
   bool initialized_;
diff --git a/src/code-stubs.h b/src/code-stubs.h
index a843841..e4384e7 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -1171,6 +1171,8 @@
   // non-NULL hook.
   static bool SetFunctionEntryHook(FunctionEntryHook entry_hook);
 
+  static bool HasEntryHook() { return entry_hook_ != NULL; }
+
  private:
   static void EntryHookTrampoline(intptr_t function,
                                   intptr_t stack_pointer);
diff --git a/src/collection.js b/src/collection.js
index d36fe18..b3c2db7 100644
--- a/src/collection.js
+++ b/src/collection.js
@@ -88,6 +88,25 @@
 }
 
 
+function SetGetSize() {
+  if (!IS_SET(this)) {
+    throw MakeTypeError('incompatible_method_receiver',
+                        ['Set.prototype.size', this]);
+  }
+  return %SetGetSize(this);
+}
+
+
+function SetClear() {
+  if (!IS_SET(this)) {
+    throw MakeTypeError('incompatible_method_receiver',
+                        ['Set.prototype.clear', this]);
+  }
+  // Replace the internal table with a new empty table.
+  %SetInitialize(this);
+}
+
+
 function MapConstructor() {
   if (%_IsConstructCall()) {
     %MapInitialize(this);
@@ -145,6 +164,25 @@
 }
 
 
+function MapGetSize() {
+  if (!IS_MAP(this)) {
+    throw MakeTypeError('incompatible_method_receiver',
+                        ['Map.prototype.size', this]);
+  }
+  return %MapGetSize(this);
+}
+
+
+function MapClear() {
+  if (!IS_MAP(this)) {
+    throw MakeTypeError('incompatible_method_receiver',
+                        ['Map.prototype.clear', this]);
+  }
+  // Replace the internal table with a new empty table.
+  %MapInitialize(this);
+}
+
+
 function WeakMapConstructor() {
   if (%_IsConstructCall()) {
     %WeakMapInitialize(this);
@@ -215,18 +253,22 @@
   %SetProperty($Map.prototype, "constructor", $Map, DONT_ENUM);
 
   // Set up the non-enumerable functions on the Set prototype object.
+  InstallGetter($Set.prototype, "size", SetGetSize);
   InstallFunctions($Set.prototype, DONT_ENUM, $Array(
     "add", SetAdd,
     "has", SetHas,
-    "delete", SetDelete
+    "delete", SetDelete,
+    "clear", SetClear
   ));
 
   // Set up the non-enumerable functions on the Map prototype object.
+  InstallGetter($Map.prototype, "size", MapGetSize);
   InstallFunctions($Map.prototype, DONT_ENUM, $Array(
     "get", MapGet,
     "set", MapSet,
     "has", MapHas,
-    "delete", MapDelete
+    "delete", MapDelete,
+    "clear", MapClear
   ));
 
   // Set up the WeakMap constructor function.
diff --git a/src/compiler.cc b/src/compiler.cc
index 8637437..710c61e 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -609,6 +609,7 @@
     if (result->ic_age() != HEAP->global_ic_age()) {
       result->ResetForNewContext(HEAP->global_ic_age());
     }
+    result->code()->MakeYoung();
   }
 
   if (result.is_null()) isolate->ReportPendingMessages();
@@ -670,6 +671,7 @@
     if (result->ic_age() != HEAP->global_ic_age()) {
       result->ResetForNewContext(HEAP->global_ic_age());
     }
+    result->code()->MakeYoung();
   }
 
   return result;
diff --git a/src/contexts.h b/src/contexts.h
index 28e4af5..f44d15d 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -161,7 +161,9 @@
   V(DERIVED_HAS_TRAP_INDEX, JSFunction, derived_has_trap) \
   V(DERIVED_GET_TRAP_INDEX, JSFunction, derived_get_trap) \
   V(DERIVED_SET_TRAP_INDEX, JSFunction, derived_set_trap) \
-  V(PROXY_ENUMERATE, JSFunction, proxy_enumerate) \
+  V(PROXY_ENUMERATE_INDEX, JSFunction, proxy_enumerate) \
+  V(OBSERVERS_NOTIFY_CHANGE_INDEX, JSFunction, observers_notify_change) \
+  V(OBSERVERS_DELIVER_CHANGES_INDEX, JSFunction, observers_deliver_changes) \
   V(RANDOM_SEED_INDEX, ByteArray, random_seed)
 
 // JSFunctions are pairs (context, function code), sometimes also called
@@ -288,7 +290,9 @@
     DERIVED_HAS_TRAP_INDEX,
     DERIVED_GET_TRAP_INDEX,
     DERIVED_SET_TRAP_INDEX,
-    PROXY_ENUMERATE,
+    PROXY_ENUMERATE_INDEX,
+    OBSERVERS_NOTIFY_CHANGE_INDEX,
+    OBSERVERS_DELIVER_CHANGES_INDEX,
     RANDOM_SEED_INDEX,
 
     // Properties from here are treated as weak references by the full GC.
diff --git a/src/debug.cc b/src/debug.cc
index 48c5519..ec25acc 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -261,8 +261,12 @@
   // Create relocation iterators for the two code objects.
   if (reloc_iterator_ != NULL) delete reloc_iterator_;
   if (reloc_iterator_original_ != NULL) delete reloc_iterator_original_;
-  reloc_iterator_ = new RelocIterator(debug_info_->code());
-  reloc_iterator_original_ = new RelocIterator(debug_info_->original_code());
+  reloc_iterator_ = new RelocIterator(
+      debug_info_->code(),
+      ~RelocInfo::ModeMask(RelocInfo::CODE_AGE_SEQUENCE));
+  reloc_iterator_original_ = new RelocIterator(
+      debug_info_->original_code(),
+      ~RelocInfo::ModeMask(RelocInfo::CODE_AGE_SEQUENCE));
 
   // Position at the first break point.
   break_point_ = -1;
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index e757656..9d16211 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -41,8 +41,11 @@
 namespace internal {
 
 DeoptimizerData::DeoptimizerData() {
-  eager_deoptimization_entry_code_ = NULL;
-  lazy_deoptimization_entry_code_ = NULL;
+  eager_deoptimization_entry_code_entries_ = -1;
+  lazy_deoptimization_entry_code_entries_ = -1;
+  size_t deopt_table_size = Deoptimizer::GetMaxDeoptTableSize();
+  eager_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size);
+  lazy_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size);
   current_ = NULL;
   deoptimizing_code_list_ = NULL;
 #ifdef ENABLE_DEBUGGER_SUPPORT
@@ -52,16 +55,11 @@
 
 
 DeoptimizerData::~DeoptimizerData() {
-  if (eager_deoptimization_entry_code_ != NULL) {
-    Isolate::Current()->memory_allocator()->Free(
-        eager_deoptimization_entry_code_);
-    eager_deoptimization_entry_code_ = NULL;
-  }
-  if (lazy_deoptimization_entry_code_ != NULL) {
-    Isolate::Current()->memory_allocator()->Free(
-        lazy_deoptimization_entry_code_);
-    lazy_deoptimization_entry_code_ = NULL;
-  }
+  delete eager_deoptimization_entry_code_;
+  eager_deoptimization_entry_code_ = NULL;
+  delete lazy_deoptimization_entry_code_;
+  lazy_deoptimization_entry_code_ = NULL;
+
   DeoptimizingCodeListNode* current = deoptimizing_code_list_;
   while (current != NULL) {
     DeoptimizingCodeListNode* prev = current;
@@ -103,6 +101,20 @@
 }
 
 
+// No larger than 2K on all platforms
+static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
+
+
+size_t Deoptimizer::GetMaxDeoptTableSize() {
+  int entries_size =
+      Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
+  int commit_page_size = static_cast<int>(OS::CommitPageSize());
+  int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
+                    commit_page_size) + 1;
+  return static_cast<size_t>(commit_page_size * page_count);
+}
+
+
 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
   ASSERT(isolate == Isolate::Current());
   Deoptimizer* result = isolate->deoptimizer_data()->current_;
@@ -461,44 +473,45 @@
 }
 
 
-Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) {
+Address Deoptimizer::GetDeoptimizationEntry(int id,
+                                            BailoutType type,
+                                            GetEntryMode mode) {
   ASSERT(id >= 0);
-  if (id >= kNumberOfEntries) return NULL;
-  MemoryChunk* base = NULL;
+  if (id >= kMaxNumberOfEntries) return NULL;
+  VirtualMemory* base = NULL;
+  if (mode == ENSURE_ENTRY_CODE) {
+    EnsureCodeForDeoptimizationEntry(type, id);
+  } else {
+    ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
+  }
   DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
   if (type == EAGER) {
-    if (data->eager_deoptimization_entry_code_ == NULL) {
-      data->eager_deoptimization_entry_code_ = CreateCode(type);
-    }
     base = data->eager_deoptimization_entry_code_;
   } else {
-    if (data->lazy_deoptimization_entry_code_ == NULL) {
-      data->lazy_deoptimization_entry_code_ = CreateCode(type);
-    }
     base = data->lazy_deoptimization_entry_code_;
   }
   return
-      static_cast<Address>(base->area_start()) + (id * table_entry_size_);
+      static_cast<Address>(base->address()) + (id * table_entry_size_);
 }
 
 
 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) {
-  MemoryChunk* base = NULL;
+  VirtualMemory* base = NULL;
   DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
   if (type == EAGER) {
     base = data->eager_deoptimization_entry_code_;
   } else {
     base = data->lazy_deoptimization_entry_code_;
   }
+  Address base_casted = reinterpret_cast<Address>(base->address());
   if (base == NULL ||
-      addr < base->area_start() ||
-      addr >= base->area_start() +
-          (kNumberOfEntries * table_entry_size_)) {
+      addr < base->address() ||
+      addr >= base_casted + (kMaxNumberOfEntries * table_entry_size_)) {
     return kNotDeoptimizationEntry;
   }
   ASSERT_EQ(0,
-      static_cast<int>(addr - base->area_start()) % table_entry_size_);
-  return static_cast<int>(addr - base->area_start()) / table_entry_size_;
+            static_cast<int>(addr - base_casted) % table_entry_size_);
+  return static_cast<int>(addr - base_casted) / table_entry_size_;
 }
 
 
@@ -1384,31 +1397,44 @@
 }
 
 
-MemoryChunk* Deoptimizer::CreateCode(BailoutType type) {
+void Deoptimizer::EnsureCodeForDeoptimizationEntry(BailoutType type,
+                                                   int max_entry_id) {
   // We cannot run this if the serializer is enabled because this will
   // cause us to emit relocation information for the external
   // references. This is fine because the deoptimizer's code section
   // isn't meant to be serialized at all.
   ASSERT(!Serializer::enabled());
 
+  ASSERT(type == EAGER || type == LAZY);
+  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+  int entry_count = (type == EAGER)
+      ? data->eager_deoptimization_entry_code_entries_
+      : data->lazy_deoptimization_entry_code_entries_;
+  if (max_entry_id < entry_count) return;
+  entry_count = Min(Max(entry_count * 2, Deoptimizer::kMinNumberOfEntries),
+                    Deoptimizer::kMaxNumberOfEntries);
+
   MacroAssembler masm(Isolate::Current(), NULL, 16 * KB);
   masm.set_emit_debug_code(false);
-  GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type);
+  GenerateDeoptimizationEntries(&masm, entry_count, type);
   CodeDesc desc;
   masm.GetCode(&desc);
   ASSERT(desc.reloc_size == 0);
 
-  MemoryChunk* chunk =
-      Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size,
-                                                            EXECUTABLE,
-                                                            NULL);
-  ASSERT(chunk->area_size() >= desc.instr_size);
-  if (chunk == NULL) {
-    V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table");
+  VirtualMemory* memory = type == EAGER
+      ? data->eager_deoptimization_entry_code_
+      : data->lazy_deoptimization_entry_code_;
+  size_t table_size = Deoptimizer::GetMaxDeoptTableSize();
+  ASSERT(static_cast<int>(table_size) >= desc.instr_size);
+  memory->Commit(memory->address(), table_size, true);
+  memcpy(memory->address(), desc.buffer, desc.instr_size);
+  CPU::FlushICache(memory->address(), desc.instr_size);
+
+  if (type == EAGER) {
+    data->eager_deoptimization_entry_code_entries_ = entry_count;
+  } else {
+    data->lazy_deoptimization_entry_code_entries_ = entry_count;
   }
-  memcpy(chunk->area_start(), desc.buffer, desc.instr_size);
-  CPU::FlushICache(chunk->area_start(), desc.instr_size);
-  return chunk;
 }
 
 
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index f67f986..4aa38ce 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -100,8 +100,10 @@
 #endif
 
  private:
-  MemoryChunk* eager_deoptimization_entry_code_;
-  MemoryChunk* lazy_deoptimization_entry_code_;
+  int eager_deoptimization_entry_code_entries_;
+  int lazy_deoptimization_entry_code_entries_;
+  VirtualMemory* eager_deoptimization_entry_code_;
+  VirtualMemory* lazy_deoptimization_entry_code_;
   Deoptimizer* current_;
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
@@ -226,7 +228,17 @@
 
   static void ComputeOutputFrames(Deoptimizer* deoptimizer);
 
-  static Address GetDeoptimizationEntry(int id, BailoutType type);
+
+  enum GetEntryMode {
+    CALCULATE_ENTRY_ADDRESS,
+    ENSURE_ENTRY_CODE
+  };
+
+
+  static Address GetDeoptimizationEntry(
+      int id,
+      BailoutType type,
+      GetEntryMode mode = ENSURE_ENTRY_CODE);
   static int GetDeoptimizationId(Address addr, BailoutType type);
   static int GetOutputInfo(DeoptimizationOutputData* data,
                            BailoutId node_id,
@@ -283,8 +295,11 @@
 
   int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
 
+  static size_t GetMaxDeoptTableSize();
+
  private:
-  static const int kNumberOfEntries = 16384;
+  static const int kMinNumberOfEntries = 64;
+  static const int kMaxNumberOfEntries = 16384;
 
   Deoptimizer(Isolate* isolate,
               JSFunction* function,
@@ -327,7 +342,8 @@
   void AddArgumentsObjectValue(intptr_t value);
   void AddDoubleValue(intptr_t slot_address, double value);
 
-  static MemoryChunk* CreateCode(BailoutType type);
+  static void EnsureCodeForDeoptimizationEntry(BailoutType type,
+                                               int max_entry_id);
   static void GenerateDeoptimizationEntries(
       MacroAssembler* masm, int count, BailoutType type);
 
diff --git a/src/elements.cc b/src/elements.cc
index 3d18829..8cb48c6 100644
--- a/src/elements.cc
+++ b/src/elements.cc
@@ -528,9 +528,8 @@
                              JSObject* holder,
                              uint32_t key,
                              BackingStore* backing_store) {
-    MaybeObject* element =
-        ElementsAccessorSubclass::GetImpl(receiver, holder, key, backing_store);
-    return !element->IsTheHole();
+    return ElementsAccessorSubclass::GetAttributesImpl(
+        receiver, holder, key, backing_store) != ABSENT;
   }
 
   virtual bool HasElement(Object* receiver,
@@ -564,6 +563,29 @@
            : backing_store->GetHeap()->the_hole_value();
   }
 
+  MUST_USE_RESULT virtual PropertyAttributes GetAttributes(
+      Object* receiver,
+      JSObject* holder,
+      uint32_t key,
+      FixedArrayBase* backing_store) {
+    if (backing_store == NULL) {
+      backing_store = holder->elements();
+    }
+    return ElementsAccessorSubclass::GetAttributesImpl(
+        receiver, holder, key, BackingStore::cast(backing_store));
+  }
+
+  MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+        Object* receiver,
+        JSObject* obj,
+        uint32_t key,
+        BackingStore* backing_store) {
+    if (key >= ElementsAccessorSubclass::GetCapacityImpl(backing_store)) {
+      return ABSENT;
+    }
+    return backing_store->is_the_hole(key) ? ABSENT : NONE;
+  }
+
   MUST_USE_RESULT virtual MaybeObject* SetLength(JSArray* array,
                                                  Object* length) {
     return ElementsAccessorSubclass::SetLengthImpl(
@@ -1143,6 +1165,16 @@
         : backing_store->GetHeap()->undefined_value();
   }
 
+  MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+      Object* receiver,
+      JSObject* obj,
+      uint32_t key,
+      BackingStore* backing_store) {
+    return
+        key < ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store)
+        ? NONE : ABSENT;
+  }
+
   MUST_USE_RESULT static MaybeObject* SetLengthImpl(
       JSObject* obj,
       Object* length,
@@ -1431,6 +1463,18 @@
     return obj->GetHeap()->the_hole_value();
   }
 
+  MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+      Object* receiver,
+      JSObject* obj,
+      uint32_t key,
+      SeededNumberDictionary* backing_store) {
+    int entry = backing_store->FindEntry(key);
+    if (entry != SeededNumberDictionary::kNotFound) {
+      return backing_store->DetailsAt(entry).attributes();
+    }
+    return ABSENT;
+  }
+
   static bool HasElementImpl(Object* receiver,
                              JSObject* holder,
                              uint32_t key,
@@ -1490,6 +1534,22 @@
     }
   }
 
+  MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+      Object* receiver,
+      JSObject* obj,
+      uint32_t key,
+      FixedArray* parameter_map) {
+    Object* probe = GetParameterMapArg(obj, parameter_map, key);
+    if (!probe->IsTheHole()) {
+      return NONE;
+    } else {
+      // If not aliased, check the arguments.
+      FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+      return ElementsAccessor::ForArray(arguments)->GetAttributes(
+          receiver, obj, key, arguments);
+    }
+  }
+
   MUST_USE_RESULT static MaybeObject* SetLengthImpl(
       JSObject* obj,
       Object* length,
diff --git a/src/elements.h b/src/elements.h
index 822fca5..8a83f0f 100644
--- a/src/elements.h
+++ b/src/elements.h
@@ -71,6 +71,17 @@
       uint32_t key,
       FixedArrayBase* backing_store = NULL) = 0;
 
+  // Returns an element's attributes, or ABSENT if there is no such
+  // element. This method doesn't iterate up the prototype chain.  The caller
+  // can optionally pass in the backing store to use for the check, which must
+  // be compatible with the ElementsKind of the ElementsAccessor. If
+  // backing_store is NULL, the holder->elements() is used as the backing store.
+  MUST_USE_RESULT virtual PropertyAttributes GetAttributes(
+      Object* receiver,
+      JSObject* holder,
+      uint32_t key,
+      FixedArrayBase* backing_store = NULL) = 0;
+
   // Modifies the length data property as specified for JSArrays and resizes the
   // underlying backing store accordingly. The method honors the semantics of
   // changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 325bd4b..694dbea 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -144,16 +144,16 @@
 DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
 DEFINE_bool(harmony_collections, false,
             "enable harmony collections (sets, maps, and weak maps)")
-DEFINE_bool(harmony_object_observe, false,
+DEFINE_bool(harmony_observation, false,
             "enable harmony object observation (implies harmony collections")
 DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
 DEFINE_implication(harmony, harmony_scoping)
 DEFINE_implication(harmony, harmony_modules)
 DEFINE_implication(harmony, harmony_proxies)
 DEFINE_implication(harmony, harmony_collections)
-DEFINE_implication(harmony, harmony_object_observe)
+DEFINE_implication(harmony, harmony_observation)
 DEFINE_implication(harmony_modules, harmony_scoping)
-DEFINE_implication(harmony_object_observe, harmony_collections)
+DEFINE_implication(harmony_observation, harmony_collections)
 
 // Flags for experimental implementation features.
 DEFINE_bool(packed_arrays, true, "optimizes arrays that have no holes")
@@ -396,6 +396,9 @@
             "flush code that we expect not to use again (during full gc)")
 DEFINE_bool(flush_code_incrementally, false,
             "flush code that we expect not to use again (incrementally)")
+DEFINE_bool(age_code, false,
+            "track un-executed functions to age code and flush only "
+            "old code")
 DEFINE_bool(incremental_marking, true, "use incremental marking")
 DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps")
 DEFINE_bool(trace_incremental_marking, false,
diff --git a/src/global-handles.cc b/src/global-handles.cc
index c09ba4b..0006f8e 100644
--- a/src/global-handles.cc
+++ b/src/global-handles.cc
@@ -69,6 +69,7 @@
     class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
     index_ = 0;
     independent_ = false;
+    partially_dependent_ = false;
     in_new_space_list_ = false;
     parameter_or_next_free_.next_free = NULL;
     callback_ = NULL;
@@ -89,6 +90,7 @@
     object_ = object;
     class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
     independent_ = false;
+    partially_dependent_ = false;
     state_  = NORMAL;
     parameter_or_next_free_.parameter = NULL;
     callback_ = NULL;
@@ -154,6 +156,15 @@
   }
   bool is_independent() const { return independent_; }
 
+  void MarkPartiallyDependent(GlobalHandles* global_handles) {
+    ASSERT(state_ != FREE);
+    if (global_handles->isolate()->heap()->InNewSpace(object_)) {
+      partially_dependent_ = true;
+    }
+  }
+  bool is_partially_dependent() const { return partially_dependent_; }
+  void clear_partially_dependent() { partially_dependent_ = false; }
+
   // In-new-space-list flag accessors.
   void set_in_new_space_list(bool v) { in_new_space_list_ = v; }
   bool is_in_new_space_list() const { return in_new_space_list_; }
@@ -260,6 +271,7 @@
   State state_ : 4;
 
   bool independent_ : 1;
+  bool partially_dependent_ : 1;
   bool in_new_space_list_ : 1;
 
   // Handle specific callback.
@@ -448,6 +460,11 @@
 }
 
 
+void GlobalHandles::MarkPartiallyDependent(Object** location) {
+  Node::FromLocation(location)->MarkPartiallyDependent(this);
+}
+
+
 bool GlobalHandles::IsIndependent(Object** location) {
   return Node::FromLocation(location)->is_independent();
 }
@@ -501,8 +518,9 @@
   for (int i = 0; i < new_space_nodes_.length(); ++i) {
     Node* node = new_space_nodes_[i];
     if (node->IsStrongRetainer() ||
-        (node->IsWeakRetainer() && !node->is_independent())) {
-      v->VisitPointer(node->location());
+        (node->IsWeakRetainer() && !node->is_independent() &&
+         !node->is_partially_dependent())) {
+        v->VisitPointer(node->location());
     }
   }
 }
@@ -513,8 +531,8 @@
   for (int i = 0; i < new_space_nodes_.length(); ++i) {
     Node* node = new_space_nodes_[i];
     ASSERT(node->is_in_new_space_list());
-    if (node->is_independent() && node->IsWeak() &&
-        f(isolate_->heap(), node->location())) {
+    if ((node->is_independent() || node->is_partially_dependent()) &&
+        node->IsWeak() && f(isolate_->heap(), node->location())) {
       node->MarkPending();
     }
   }
@@ -525,7 +543,8 @@
   for (int i = 0; i < new_space_nodes_.length(); ++i) {
     Node* node = new_space_nodes_[i];
     ASSERT(node->is_in_new_space_list());
-    if (node->is_independent() && node->IsWeakRetainer()) {
+    if ((node->is_independent() || node->is_partially_dependent()) &&
+        node->IsWeakRetainer()) {
       v->VisitPointer(node->location());
     }
   }
@@ -547,7 +566,10 @@
       // Skip dependent handles. Their weak callbacks might expect to be
       // called between two global garbage collection callbacks which
       // are not called for minor collections.
-      if (!node->is_independent()) continue;
+      if (!node->is_independent() && !node->is_partially_dependent()) {
+        continue;
+      }
+      node->clear_partially_dependent();
       if (node->PostGarbageCollectionProcessing(isolate_, this)) {
         if (initial_post_gc_processing_count != post_gc_processing_count_) {
           // Weak callback triggered another GC and another round of
@@ -563,6 +585,7 @@
     }
   } else {
     for (NodeIterator it(this); !it.done(); it.Advance()) {
+      it.node()->clear_partially_dependent();
       if (it.node()->PostGarbageCollectionProcessing(isolate_, this)) {
         if (initial_post_gc_processing_count != post_gc_processing_count_) {
           // See the comment above.
@@ -610,7 +633,7 @@
 
 void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
   for (NodeIterator it(this); !it.done(); it.Advance()) {
-    if (it.node()->has_wrapper_class_id() && it.node()->IsRetainer()) {
+    if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
       v->VisitEmbedderReference(it.node()->location(),
                                 it.node()->wrapper_class_id());
     }
diff --git a/src/global-handles.h b/src/global-handles.h
index 866317e..482baef 100644
--- a/src/global-handles.h
+++ b/src/global-handles.h
@@ -155,6 +155,9 @@
   // Clear the weakness of a global handle.
   void MarkIndependent(Object** location);
 
+  // Mark the reference to this object externaly unreachable.
+  void MarkPartiallyDependent(Object** location);
+
   static bool IsIndependent(Object** location);
 
   // Tells whether global handle is near death.
@@ -195,16 +198,17 @@
   // Iterates over strong and dependent handles. See the node above.
   void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v);
 
-  // Finds weak independent handles satisfying the callback predicate
-  // and marks them as pending. See the note above.
+  // Finds weak independent or partially independent handles satisfying
+  // the callback predicate and marks them as pending. See the note above.
   void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f);
 
-  // Iterates over weak independent handles. See the note above.
+  // Iterates over weak independent or partially independent handles.
+  // See the note above.
   void IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v);
 
   // Add an object group.
   // Should be only used in GC callback function before a collection.
-  // All groups are destroyed after a mark-compact collection.
+  // All groups are destroyed after a garbage collection.
   void AddObjectGroup(Object*** handles,
                       size_t length,
                       v8::RetainedObjectInfo* info);
diff --git a/src/handles.cc b/src/handles.cc
index 46399d6..a6192d8 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -593,6 +593,25 @@
 }
 
 
+Handle<Object> GetScriptNameOrSourceURL(Handle<Script> script) {
+  Isolate* isolate = script->GetIsolate();
+  Handle<String> name_or_source_url_key =
+      isolate->factory()->LookupAsciiSymbol("nameOrSourceURL");
+  Handle<JSValue> script_wrapper = GetScriptWrapper(script);
+  Handle<Object> property = GetProperty(script_wrapper,
+                                        name_or_source_url_key);
+  ASSERT(property->IsJSFunction());
+  Handle<JSFunction> method = Handle<JSFunction>::cast(property);
+  bool caught_exception;
+  Handle<Object> result = Execution::TryCall(method, script_wrapper, 0,
+                                             NULL, &caught_exception);
+  if (caught_exception) {
+    result = isolate->factory()->undefined_value();
+  }
+  return result;
+}
+
+
 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
   int len = array->length();
   for (int i = 0; i < len; i++) {
diff --git a/src/handles.h b/src/handles.h
index a1d88c2..b80dbe5 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -95,6 +95,13 @@
 };
 
 
+// Convenience wrapper.
+template<class T>
+inline Handle<T> handle(T* t) {
+  return Handle<T>(t);
+}
+
+
 class DeferredHandles;
 class HandleScopeImplementer;
 
@@ -260,6 +267,7 @@
 // The safe version does not make heap allocations but may work much slower.
 int GetScriptLineNumberSafe(Handle<Script> script, int code_position);
 int GetScriptColumnNumber(Handle<Script> script, int code_position);
+Handle<Object> GetScriptNameOrSourceURL(Handle<Script> script);
 
 // Computes the enumerable keys from interceptors. Used for debug mirrors and
 // by GetKeysInFixedArrayFor below.
diff --git a/src/heap.cc b/src/heap.cc
index 1f7cf10..b85f1bc 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -1333,6 +1333,12 @@
   scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
 
   new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+
+  while (IterateObjectGroups(&scavenge_visitor)) {
+    new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+  }
+  isolate()->global_handles()->RemoveObjectGroups();
+
   isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
       &IsUnscavengedHeapObject);
   isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
@@ -1373,6 +1379,51 @@
 }
 
 
+// TODO(mstarzinger): Unify this method with
+// MarkCompactCollector::MarkObjectGroups().
+bool Heap::IterateObjectGroups(ObjectVisitor* scavenge_visitor) {
+  List<ObjectGroup*>* object_groups =
+    isolate()->global_handles()->object_groups();
+
+  int last = 0;
+  bool changed = false;
+  for (int i = 0; i < object_groups->length(); i++) {
+    ObjectGroup* entry = object_groups->at(i);
+    ASSERT(entry != NULL);
+
+    Object*** objects = entry->objects_;
+    bool group_marked = false;
+    for (size_t j = 0; j < entry->length_; j++) {
+      Object* object = *objects[j];
+      if (object->IsHeapObject()) {
+        if (!IsUnscavengedHeapObject(this, &object)) {
+          group_marked = true;
+          break;
+        }
+      }
+    }
+
+    if (!group_marked) {
+      (*object_groups)[last++] = entry;
+      continue;
+    }
+
+    for (size_t j = 0; j < entry->length_; ++j) {
+      Object* object = *objects[j];
+      if (object->IsHeapObject()) {
+        scavenge_visitor->VisitPointer(&object);
+        changed = true;
+      }
+    }
+
+    entry->Dispose();
+    object_groups->at(i) = NULL;
+  }
+  object_groups->Rewind(last);
+  return changed;
+}
+
+
 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
                                                                 Object** p) {
   MapWord first_word = HeapObject::cast(*p)->map_word();
@@ -2844,6 +2895,15 @@
   }
   set_natives_source_cache(FixedArray::cast(obj));
 
+  // Allocate object to hold object observation state.
+  { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+    if (!maybe_obj->ToObject(&obj)) return false;
+  }
+  { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj));
+    if (!maybe_obj->ToObject(&obj)) return false;
+  }
+  set_observation_state(JSObject::cast(obj));
+
   // Handling of script id generation is in FACTORY->NewScript.
   set_last_script_id(undefined_value());
 
@@ -2863,6 +2923,34 @@
 }
 
 
+bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
+  RootListIndex writable_roots[] = {
+    kStoreBufferTopRootIndex,
+    kStackLimitRootIndex,
+    kInstanceofCacheFunctionRootIndex,
+    kInstanceofCacheMapRootIndex,
+    kInstanceofCacheAnswerRootIndex,
+    kCodeStubsRootIndex,
+    kNonMonomorphicCacheRootIndex,
+    kPolymorphicCodeCacheRootIndex,
+    kLastScriptIdRootIndex,
+    kEmptyScriptRootIndex,
+    kRealStackLimitRootIndex,
+    kArgumentsAdaptorDeoptPCOffsetRootIndex,
+    kConstructStubDeoptPCOffsetRootIndex,
+    kGetterStubDeoptPCOffsetRootIndex,
+    kSetterStubDeoptPCOffsetRootIndex,
+    kSymbolTableRootIndex,
+  };
+
+  for (unsigned int i = 0; i < ARRAY_SIZE(writable_roots); i++) {
+    if (root_index == writable_roots[i])
+      return true;
+  }
+  return false;
+}
+
+
 Object* RegExpResultsCache::Lookup(Heap* heap,
                                    String* key_string,
                                    Object* key_pattern,
diff --git a/src/heap.h b/src/heap.h
index da78b18..ae14302 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -154,7 +154,8 @@
   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
-  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
+  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
+  V(JSObject, observation_state, ObservationState)
 
 #define ROOT_LIST(V)                                  \
   STRONG_ROOT_LIST(V)                                 \
@@ -1451,6 +1452,10 @@
   STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
   STATIC_CHECK(kempty_symbolRootIndex == Internals::kEmptySymbolRootIndex);
 
+  // Generated code can embed direct references to non-writable roots if
+  // they are in new space.
+  static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
+
   MUST_USE_RESULT MaybeObject* NumberToString(
       Object* number, bool check_number_string_cache = true);
   MUST_USE_RESULT MaybeObject* Uint32ToString(
@@ -1902,6 +1907,7 @@
   bool PerformGarbageCollection(GarbageCollector collector,
                                 GCTracer* tracer);
 
+  bool IterateObjectGroups(ObjectVisitor* scavenge_visitor);
 
   inline void UpdateOldSpaceLimits();
 
diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h
index 7fdf50c..114f878 100644
--- a/src/ia32/assembler-ia32-inl.h
+++ b/src/ia32/assembler-ia32-inl.h
@@ -46,12 +46,21 @@
 namespace internal {
 
 
+static const byte kCallOpcode = 0xE8;
+
+
 // The modes possibly affected by apply must be in kApplyMask.
 void RelocInfo::apply(intptr_t delta) {
   if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
     int32_t* p = reinterpret_cast<int32_t*>(pc_);
     *p -= delta;  // Relocate entry.
     CPU::FlushICache(p, sizeof(uint32_t));
+  } else if (rmode_ == CODE_AGE_SEQUENCE) {
+    if (*pc_ == kCallOpcode) {
+      int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
+      *p -= delta;  // Relocate entry.
+      CPU::FlushICache(p, sizeof(uint32_t));
+    }
   } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
     // Special handling of js_return when a break point is set (call
     // instruction has been inserted).
@@ -169,6 +178,21 @@
 }
 
 
+Code* RelocInfo::code_age_stub() {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  ASSERT(*pc_ == kCallOpcode);
+  return Code::GetCodeFromTargetAddress(
+      Assembler::target_address_at(pc_ + 1));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+  ASSERT(*pc_ == kCallOpcode);
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
+}
+
+
 Address RelocInfo::call_address() {
   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
@@ -206,7 +230,7 @@
 
 
 bool RelocInfo::IsPatchedReturnSequence() {
-  return *pc_ == 0xE8;
+  return *pc_ == kCallOpcode;
 }
 
 
@@ -227,7 +251,9 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
-#ifdef ENABLE_DEBUGGER_SUPPORT
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    visitor->VisitCodeAgeSequence(this);
+  #ifdef ENABLE_DEBUGGER_SUPPORT
   // TODO(isolates): Get a cached isolate below.
   } else if (((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
@@ -255,6 +281,8 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    StaticVisitor::VisitCodeAgeSequence(heap, this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 641c0ea..06fc411 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -169,7 +169,7 @@
 const int RelocInfo::kApplyMask =
   RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY |
     1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE |
-    1 << RelocInfo::DEBUG_BREAK_SLOT;
+    1 << RelocInfo::DEBUG_BREAK_SLOT | 1 << RelocInfo::CODE_AGE_SEQUENCE;
 
 
 bool RelocInfo::IsCodedSpecially() {
@@ -314,8 +314,7 @@
 
 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
     : AssemblerBase(arg_isolate),
-      positions_recorder_(this),
-      emit_debug_code_(FLAG_debug_code) {
+      positions_recorder_(this) {
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index 6e079de..9fb7baa 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -584,14 +584,6 @@
   Assembler(Isolate* isolate, void* buffer, int buffer_size);
   ~Assembler();
 
-  // Overrides the default provided by FLAG_debug_code.
-  void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
-  // Avoids using instructions that vary in size in unpredictable ways between
-  // the snapshot and the running VM.  This is needed by the full compiler so
-  // that it can recompile code with debug support and fix the PC.
-  void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
-
   // GetCode emits any pending (non-emitted) code and fills the descriptor
   // desc. GetCode() is idempotent; it returns the same result if no other
   // Assembler functions are invoked in between GetCode() calls.
@@ -1125,9 +1117,6 @@
   void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
 
  protected:
-  bool emit_debug_code() const { return emit_debug_code_; }
-  bool predictable_code_size() const { return predictable_code_size_ ; }
-
   void movsd(XMMRegister dst, const Operand& src);
   void movsd(const Operand& dst, XMMRegister src);
 
@@ -1200,10 +1189,6 @@
   RelocInfoWriter reloc_info_writer;
 
   PositionsRecorder positions_recorder_;
-
-  bool emit_debug_code_;
-  bool predictable_code_size_;
-
   friend class PositionsRecorder;
 };
 
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index 9bc15e9..01785bb 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -538,6 +538,42 @@
 }
 
 
+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+  // For now, we are relying on the fact that make_code_young doesn't do any
+  // garbage collection which allows us to save/restore the registers without
+  // worrying about which of them contain pointers. We also don't build an
+  // internal frame to make the code faster, since we shouldn't have to do stack
+  // crawls in MakeCodeYoung. This seems a bit fragile.
+
+  // Re-execute the code that was patched back to the young age when
+  // the stub returns.
+  __ sub(Operand(esp, 0), Immediate(5));
+  __ pushad();
+  __ mov(eax, Operand(esp, 8 * kPointerSize));
+  {
+    FrameScope scope(masm, StackFrame::MANUAL);
+    __ PrepareCallCFunction(1, ebx);
+    __ mov(Operand(esp, 0), eax);
+    __ CallCFunction(
+        ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+  }
+  __ popad();
+  __ ret(0);
+}
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}                                                            \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
                                              Deoptimizer::BailoutType type) {
   {
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 1d23c7e..7ea71e4 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -5723,7 +5723,7 @@
   __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
   __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
   __ and_(ecx, edi);
-  STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+  STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   __ test(ecx, Immediate(kStringEncodingMask));
   __ j(zero, &non_ascii);
@@ -5751,9 +5751,9 @@
   __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
   __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
   __ xor_(edi, ecx);
-  STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
-  __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
-  __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
+  STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+  __ and_(edi, kOneByteStringTag | kAsciiDataHintTag);
+  __ cmp(edi, kOneByteStringTag | kAsciiDataHintTag);
   __ j(equal, &ascii_data);
   // Allocate a two byte cons string.
   __ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
@@ -6324,7 +6324,7 @@
     // string's encoding is wrong because we always have to recheck encoding of
     // the newly created string's parent anyways due to externalized strings.
     Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
     __ test(ebx, Immediate(kStringEncodingMask));
     __ j(zero, &two_byte_slice, Label::kNear);
@@ -6371,7 +6371,7 @@
   __ push(edx);
   __ push(edi);
   __ SmiUntag(ecx);
-  STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   __ test_b(ebx, kStringEncodingMask);
   __ j(zero, &two_byte_sequential);
 
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index eb68687..4c79519 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -732,7 +732,7 @@
   // Dispatch on the encoding: ASCII or two-byte.
   Label ascii;
   __ bind(&seq_string);
-  STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+  STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   __ test(result, Immediate(kStringEncodingMask));
   __ j(not_zero, &ascii, Label::kNear);
@@ -757,6 +757,103 @@
 
 #undef __
 
+static const int kNoCodeAgeSequenceLength = 5;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+  static bool initialized = false;
+  static byte sequence[kNoCodeAgeSequenceLength];
+  *length = kNoCodeAgeSequenceLength;
+  if (!initialized) {
+    // The sequence of instructions that is patched out for aging code is the
+    // following boilerplate stack-building prologue that is found both in
+    // FUNCTION and OPTIMIZED_FUNCTION code:
+    CodePatcher patcher(sequence, kNoCodeAgeSequenceLength);
+    patcher.masm()->push(ebp);
+    patcher.masm()->mov(ebp, esp);
+    patcher.masm()->push(esi);
+    patcher.masm()->push(edi);
+    initialized = true;
+  }
+  return sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+  byte* start = instruction_start();
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (!memcmp(start, young_sequence, young_length) ||
+      *start == kCallOpcode) {
+    return start;
+  } else {
+    if (kind() == FUNCTION) {
+      byte* start_after_strict =
+          start + kSizeOfFullCodegenStrictModePrologue;
+      ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+             start[kSizeOfFullCodegenStrictModePrologue] == kCallOpcode);
+      return start_after_strict;
+    } else {
+      ASSERT(kind() == OPTIMIZED_FUNCTION);
+      start = instruction_start() + kSizeOfOptimizedStrictModePrologue;
+      if (!memcmp(start, young_sequence, young_length) ||
+          *start == kCallOpcode) {
+        return start;
+      }
+      start = instruction_start() + kSizeOfOptimizedAlignStackPrologue;
+      if (!memcmp(start, young_sequence, young_length) ||
+          *start == kCallOpcode) {
+        return start;
+      }
+      start = instruction_start() + kSizeOfOptimizedAlignStackPrologue +
+          kSizeOfOptimizedStrictModePrologue;
+      ASSERT(!memcmp(start, young_sequence, young_length) ||
+             *start == kCallOpcode);
+      return start;
+    }
+  }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  bool result = (!memcmp(sequence, young_sequence, young_length));
+  ASSERT(result || *sequence == kCallOpcode);
+  return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+                               MarkingParity* parity) {
+  if (IsYoungSequence(sequence)) {
+    *age = kNoAge;
+    *parity = NO_MARKING_PARITY;
+  } else {
+    sequence++;  // Skip the kCallOpcode byte
+    Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
+        Assembler::kCallTargetAddressOffset;
+    Code* stub = GetCodeFromTargetAddress(target_address);
+    GetCodeAgeAndParity(stub, age, parity);
+  }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+                                Code::Age age,
+                                MarkingParity parity) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (age == kNoAge) {
+    memcpy(sequence, young_sequence, young_length);
+    CPU::FlushICache(sequence, young_length);
+  } else {
+    Code* stub = GetCodeAgeStub(age, parity);
+    CodePatcher patcher(sequence, young_length);
+    patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE);
+  }
+}
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_TARGET_ARCH_IA32
diff --git a/src/ia32/codegen-ia32.h b/src/ia32/codegen-ia32.h
index f4ab0b5..a783e9a 100644
--- a/src/ia32/codegen-ia32.h
+++ b/src/ia32/codegen-ia32.h
@@ -37,6 +37,10 @@
 // Forward declarations
 class CompilationInfo;
 
+static const int kSizeOfFullCodegenStrictModePrologue = 34;
+static const int kSizeOfOptimizedStrictModePrologue = 12;
+static const int kSizeOfOptimizedAlignStackPrologue = 44;
+
 // -------------------------------------------------------------------------
 // CodeGenerator
 
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 406537d..159c2ca 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -138,6 +138,8 @@
   // function calls.
   if (!info->is_classic_mode() || info->is_native()) {
     Label ok;
+    Label start;
+    __ bind(&start);
     __ test(ecx, ecx);
     __ j(zero, &ok, Label::kNear);
     // +1 for return address.
@@ -149,6 +151,8 @@
     __ mov(Operand(esp, receiver_offset),
            Immediate(isolate()->factory()->undefined_value()));
     __ bind(&ok);
+    ASSERT(!FLAG_age_code ||
+           (kSizeOfFullCodegenStrictModePrologue == ok.pos() - start.pos()));
   }
 
   // Open a frame scope to indicate that there is a frame on the stack.  The
@@ -3573,7 +3577,7 @@
   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   __ and_(scratch, Immediate(
       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
-  __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
+  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
   __ j(not_equal, &bailout);
   __ add(string_length,
          FieldOperand(string, SeqAsciiString::kLengthOffset));
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 8bd4575..676e73e 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -140,6 +140,8 @@
   // receiver object). ecx is zero for method calls and non-zero for
   // function calls.
   if (!info_->is_classic_mode() || info_->is_native()) {
+    Label begin;
+    __ bind(&begin);
     Label ok;
     __ test(ecx, Operand(ecx));
     __ j(zero, &ok, Label::kNear);
@@ -148,10 +150,14 @@
     __ mov(Operand(esp, receiver_offset),
            Immediate(isolate()->factory()->undefined_value()));
     __ bind(&ok);
+    ASSERT(!FLAG_age_code ||
+           (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos()));
   }
 
 
   if (dynamic_frame_alignment_) {
+    Label begin;
+    __ bind(&begin);
     // Move state of dynamic frame alignment into edx.
     __ mov(edx, Immediate(kNoAlignmentPadding));
 
@@ -174,6 +180,9 @@
     __ j(not_zero, &align_loop, Label::kNear);
     __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
     __ bind(&do_not_pad);
+    ASSERT(!FLAG_age_code ||
+           (kSizeOfOptimizedAlignStackPrologue ==
+            do_not_pad.pos() - begin.pos()));
   }
 
   __ push(ebp);  // Caller's frame pointer.
@@ -2766,15 +2775,12 @@
 
 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
   ElementsKind elements_kind = instr->elements_kind();
-  LOperand* key = instr->key();
-  if (!key->IsConstantOperand() &&
-      ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
-                                  elements_kind)) {
-    __ SmiUntag(ToRegister(key));
+  if (ExternalArrayOpRequiresTemp<HLoadKeyed>(instr->hydrogen())) {
+    __ SmiUntag(ToRegister(instr->key()));
   }
   Operand operand(BuildFastArrayOperand(
       instr->elements(),
-      key,
+      instr->key(),
       instr->hydrogen()->key()->representation(),
       elements_kind,
       0,
@@ -3844,15 +3850,12 @@
 
 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
   ElementsKind elements_kind = instr->elements_kind();
-  LOperand* key = instr->key();
-  if (!key->IsConstantOperand() &&
-      ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
-                                  elements_kind)) {
-    __ SmiUntag(ToRegister(key));
+  if (ExternalArrayOpRequiresTemp<HStoreKeyed>(instr->hydrogen())) {
+    __ SmiUntag(ToRegister(instr->key()));
   }
   Operand operand(BuildFastArrayOperand(
       instr->elements(),
-      key,
+      instr->key(),
       instr->hydrogen()->key()->representation(),
       elements_kind,
       0,
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 4207410..ba9c97e 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -1933,17 +1933,14 @@
   ASSERT(instr->key()->representation().IsInteger32() ||
          instr->key()->representation().IsTagged());
   ElementsKind elements_kind = instr->elements_kind();
-  bool clobbers_key = ExternalArrayOpRequiresTemp(
-      instr->key()->representation(), elements_kind);
-  LOperand* key = clobbers_key
+  LOperand* elements = UseRegisterAtStart(instr->elements());
+  LOperand* key = instr->is_external() &&
+    ExternalArrayOpRequiresTemp<HLoadKeyed>(instr)
       ? UseTempRegister(instr->key())
       : UseRegisterOrConstantAtStart(instr->key());
-  LLoadKeyed* result = NULL;
 
-  if (!instr->is_external()) {
-    LOperand* obj = UseRegisterAtStart(instr->elements());
-    result = new(zone()) LLoadKeyed(obj, key);
-  } else {
+#ifdef DEBUG
+  if (instr->is_external()) {
     ASSERT(
         (instr->representation().IsInteger32() &&
          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
@@ -1951,10 +1948,10 @@
         (instr->representation().IsDouble() &&
          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
-    LOperand* external_pointer = UseRegister(instr->elements());
-    result = new(zone()) LLoadKeyed(external_pointer, key);
   }
+#endif
 
+  LLoadKeyed* result = new(zone()) LLoadKeyed(elements, key);
   DefineAsRegister(result);
   bool can_deoptimize = instr->RequiresHoleCheck() ||
       (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS);
@@ -1976,34 +1973,27 @@
 
 
 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
-  LStoreKeyed* result = NULL;
+  ElementsKind elements_kind = instr->elements_kind();
+  LOperand* elements;
+  LOperand* val;
+  LOperand* key;
 
   if (!instr->is_external()) {
     ASSERT(instr->elements()->representation().IsTagged());
     ASSERT(instr->key()->representation().IsInteger32() ||
            instr->key()->representation().IsTagged());
 
-    if (instr->value()->representation().IsDouble()) {
-      LOperand* object = UseRegisterAtStart(instr->elements());
-      LOperand* val = UseTempRegister(instr->value());
-      LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-
-      result = new(zone()) LStoreKeyed(object, key, val);
+    if (instr->NeedsWriteBarrier() &&
+        !IsFastDoubleElementsKind(elements_kind)) {
+      val = UseTempRegister(instr->value());
+      key = UseTempRegister(instr->key());
+      elements = UseRegister(instr->elements());
     } else {
-      ASSERT(instr->value()->representation().IsTagged());
-      bool needs_write_barrier = instr->NeedsWriteBarrier();
-
-      LOperand* obj = UseRegister(instr->elements());
-      LOperand* val = needs_write_barrier
-          ? UseTempRegister(instr->value())
-          : UseRegisterAtStart(instr->value());
-      LOperand* key = needs_write_barrier
-          ? UseTempRegister(instr->key())
-          : UseRegisterOrConstantAtStart(instr->key());
-      result = new(zone()) LStoreKeyed(obj, key, val);
+      val = UseRegisterAtStart(instr->value());
+      key = UseRegisterOrConstantAtStart(instr->key());
+      elements = UseRegisterAtStart(instr->elements());
     }
   } else {
-    ElementsKind elements_kind = instr->elements_kind();
     ASSERT(
         (instr->value()->representation().IsInteger32() &&
          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
@@ -2013,26 +2003,25 @@
           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
     ASSERT(instr->elements()->representation().IsExternal());
 
-    LOperand* external_pointer = UseRegister(instr->elements());
+    if (ExternalArrayOpRequiresTemp<HStoreKeyed>(instr)) {
+      key = UseTempRegister(instr->key());
+      elements = UseRegister(instr->elements());
+    } else {
+      key = UseRegisterOrConstantAtStart(instr->key());
+      elements = UseRegisterAtStart(instr->elements());
+    }
+
     // Determine if we need a byte register in this case for the value.
     bool val_is_fixed_register =
         elements_kind == EXTERNAL_BYTE_ELEMENTS ||
         elements_kind == EXTERNAL_UNSIGNED_BYTE_ELEMENTS ||
         elements_kind == EXTERNAL_PIXEL_ELEMENTS;
-
-    LOperand* val = val_is_fixed_register
+    val = val_is_fixed_register
         ? UseFixed(instr->value(), eax)
         : UseRegister(instr->value());
-    bool clobbers_key = ExternalArrayOpRequiresTemp(
-        instr->key()->representation(), elements_kind);
-    LOperand* key = clobbers_key
-        ? UseTempRegister(instr->key())
-        : UseRegisterOrConstantAtStart(instr->key());
-    result = new(zone()) LStoreKeyed(external_pointer,
-                                     key,
-                                     val);
   }
 
+  LStoreKeyed* result = new(zone()) LStoreKeyed(elements, key, val);
   ASSERT(result != NULL);
   return result;
 }
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 88b1ef0..18741d1 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -1408,13 +1408,16 @@
 };
 
 
-inline static bool ExternalArrayOpRequiresTemp(
-    Representation key_representation,
-    ElementsKind elements_kind) {
+template <class T>
+inline static bool ExternalArrayOpRequiresTemp(T* value) {
+  CHECK(value->IsLoadKeyed() || value->IsStoreKeyed());
+  Representation key_representation = value->key()->representation();
+  ElementsKind elements_kind = value->elements_kind();
+
   // Operations that require the key to be divided by two to be converted into
   // an index cannot fold the scale operation into a load and need an extra
   // temp register to do the work.
-  return key_representation.IsTagged() &&
+  return !value->IsConstant() && key_representation.IsTagged() &&
       (elements_kind == EXTERNAL_BYTE_ELEMENTS ||
        elements_kind == EXTERNAL_UNSIGNED_BYTE_ELEMENTS ||
        elements_kind == EXTERNAL_PIXEL_ELEMENTS);
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 0d0bf03..26d0f92 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -2615,7 +2615,7 @@
   }
   and_(scratch,
        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
-  cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
+  cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
   j(not_equal, failure);
 }
 
@@ -2906,7 +2906,7 @@
 
   bind(&not_external);
   // Sequential string, either ASCII or UC16.
-  ASSERT(kAsciiStringTag == 0x04);
+  ASSERT(kOneByteStringTag == 0x04);
   and_(length, Immediate(kStringEncodingMask));
   xor_(length, Immediate(kStringEncodingMask));
   add(length, Immediate(0x04));
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index f5e2d05..11efb72 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -3421,6 +3421,7 @@
 #endif
 
   // Load the initial map and verify that it is in fact a map.
+  // edi: constructor
   __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   // Will both indicate a NULL and a Smi.
   __ JumpIfSmi(ebx, &generic_stub_call);
@@ -3429,19 +3430,23 @@
 
 #ifdef DEBUG
   // Cannot construct functions this way.
-  // edi: constructor
   // ebx: initial map
   __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
-  __ Assert(not_equal, "Function constructed by construct stub.");
+  __ Check(not_equal, "Function constructed by construct stub.");
 #endif
 
   // Now allocate the JSObject on the heap by moving the new space allocation
   // top forward.
-  // edi: constructor
   // ebx: initial map
+  ASSERT(function->has_initial_map());
+  int instance_size = function->initial_map()->instance_size();
+#ifdef DEBUG
   __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
   __ shl(ecx, kPointerSizeLog2);
-  __ AllocateInNewSpace(ecx, edx, ecx, no_reg,
+  __ cmp(ecx, Immediate(instance_size));
+  __ Check(equal, "Instance size of initial map changed.");
+#endif
+  __ AllocateInNewSpace(instance_size, edx, ecx, no_reg,
                         &generic_stub_call, NO_ALLOCATION_FLAGS);
 
   // Allocated the JSObject, now initialize the fields and add the heap tag.
@@ -3501,7 +3506,6 @@
   }
 
   // Fill the unused in-object property fields with undefined.
-  ASSERT(function->has_initial_map());
   for (int i = shared->this_property_assignments_count();
        i < function->initial_map()->inobject_properties();
        i++) {
diff --git a/src/ic.cc b/src/ic.cc
index dd0bb10..5cc213f 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -1377,6 +1377,11 @@
     return *value;
   }
 
+  // Observed objects are always modified through the runtime.
+  if (FLAG_harmony_observation && receiver->map()->is_observed()) {
+    return receiver->SetProperty(*name, *value, NONE, strict_mode);
+  }
+
   // Use specialized code for setting the length of arrays with fast
   // properties.  Slow properties might indicate redefinition of the
   // length property.
@@ -1902,7 +1907,8 @@
     }
 
     // Update inline cache and stub cache.
-    if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
+    if (FLAG_use_ic && !receiver->IsJSGlobalProxy() &&
+        !(FLAG_harmony_observation && receiver->map()->is_observed())) {
       LookupResult lookup(isolate());
       if (LookupForWrite(receiver, name, &lookup)) {
         UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
@@ -1914,8 +1920,10 @@
   }
 
   // Do not use ICs for objects that require access checks (including
-  // the global object).
-  bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded();
+  // the global object), or are observed.
+  bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded() &&
+      !(FLAG_harmony_observation && object->IsJSObject() &&
+          JSObject::cast(*object)->map()->is_observed());
   ASSERT(!(use_ic && object->IsJSGlobalProxy()));
 
   if (use_ic) {
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index e51d6c1..b34d6d9 100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -175,15 +175,38 @@
 }
 
 
+static void MarkObjectGreyDoNotEnqueue(Object* obj) {
+  if (obj->IsHeapObject()) {
+    HeapObject* heap_obj = HeapObject::cast(obj);
+    MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
+    if (Marking::IsBlack(mark_bit)) {
+      MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
+                                            -heap_obj->Size());
+    }
+    Marking::AnyToGrey(mark_bit);
+  }
+}
+
+
 class IncrementalMarkingMarkingVisitor
     : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
  public:
   static void Initialize() {
     StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
 
+    table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
     table_.Register(kVisitJSRegExp, &VisitJSRegExp);
   }
 
+  static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
+    Context* context = Context::cast(object);
+
+    // We will mark cache black with a separate pass
+    // when we finish marking.
+    MarkObjectGreyDoNotEnqueue(context->normalized_map_cache());
+    VisitNativeContext(map, context);
+  }
+
   static void VisitJSWeakMap(Map* map, HeapObject* object) {
     Heap* heap = map->GetHeap();
     VisitPointers(heap,
@@ -494,19 +517,6 @@
 }
 
 
-static void MarkObjectGreyDoNotEnqueue(Object* obj) {
-  if (obj->IsHeapObject()) {
-    HeapObject* heap_obj = HeapObject::cast(obj);
-    MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
-    if (Marking::IsBlack(mark_bit)) {
-      MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
-                                            -heap_obj->Size());
-    }
-    Marking::AnyToGrey(mark_bit);
-  }
-}
-
-
 void IncrementalMarking::StartMarking(CompactionFlag flag) {
   if (FLAG_trace_incremental_marking) {
     PrintF("[IncrementalMarking] Start marking\n");
@@ -619,6 +629,54 @@
 }
 
 
+void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
+  MarkBit map_mark_bit = Marking::MarkBitFrom(map);
+  if (Marking::IsWhite(map_mark_bit)) {
+    WhiteToGreyAndPush(map, map_mark_bit);
+  }
+
+  IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
+
+  MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
+  SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
+             (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
+  Marking::MarkBlack(obj_mark_bit);
+  MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
+}
+
+
+void IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) {
+  Map* filler_map = heap_->one_pointer_filler_map();
+  while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
+    HeapObject* obj = marking_deque_.Pop();
+
+    // Explicitly skip one word fillers. Incremental markbit patterns are
+    // correct only for objects that occupy at least two words.
+    Map* map = obj->map();
+    if (map == filler_map) continue;
+
+    int size = obj->SizeFromMap(map);
+    bytes_to_process -= size;
+    VisitObject(map, obj, size);
+  }
+}
+
+
+void IncrementalMarking::ProcessMarkingDeque() {
+  Map* filler_map = heap_->one_pointer_filler_map();
+  while (!marking_deque_.IsEmpty()) {
+    HeapObject* obj = marking_deque_.Pop();
+
+    // Explicitly skip one word fillers. Incremental markbit patterns are
+    // correct only for objects that occupy at least two words.
+    Map* map = obj->map();
+    if (map == filler_map) continue;
+
+    VisitObject(map, obj, obj->SizeFromMap(map));
+  }
+}
+
+
 void IncrementalMarking::Hurry() {
   if (state() == MARKING) {
     double start = 0.0;
@@ -628,32 +686,7 @@
     }
     // TODO(gc) hurry can mark objects it encounters black as mutator
     // was stopped.
-    Map* filler_map = heap_->one_pointer_filler_map();
-    Map* native_context_map = heap_->native_context_map();
-    while (!marking_deque_.IsEmpty()) {
-      HeapObject* obj = marking_deque_.Pop();
-
-      // Explicitly skip one word fillers. Incremental markbit patterns are
-      // correct only for objects that occupy at least two words.
-      Map* map = obj->map();
-      if (map == filler_map) {
-        continue;
-      } else if (map == native_context_map) {
-        // Native contexts have weak fields.
-        IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
-      } else {
-        MarkBit map_mark_bit = Marking::MarkBitFrom(map);
-        if (Marking::IsWhite(map_mark_bit)) {
-          WhiteToGreyAndPush(map, map_mark_bit);
-        }
-        IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
-      }
-
-      MarkBit mark_bit = Marking::MarkBitFrom(obj);
-      ASSERT(!Marking::IsBlack(mark_bit));
-      Marking::MarkBlack(mark_bit);
-      MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
-    }
+    ProcessMarkingDeque();
     state_ = COMPLETE;
     if (FLAG_trace_incremental_marking) {
       double end = OS::TimeCurrentMillis();
@@ -792,43 +825,7 @@
       StartMarking(PREVENT_COMPACTION);
     }
   } else if (state_ == MARKING) {
-    Map* filler_map = heap_->one_pointer_filler_map();
-    Map* native_context_map = heap_->native_context_map();
-    while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
-      HeapObject* obj = marking_deque_.Pop();
-
-      // Explicitly skip one word fillers. Incremental markbit patterns are
-      // correct only for objects that occupy at least two words.
-      Map* map = obj->map();
-      if (map == filler_map) continue;
-
-      int size = obj->SizeFromMap(map);
-      bytes_to_process -= size;
-      MarkBit map_mark_bit = Marking::MarkBitFrom(map);
-      if (Marking::IsWhite(map_mark_bit)) {
-        WhiteToGreyAndPush(map, map_mark_bit);
-      }
-
-      // TODO(gc) switch to static visitor instead of normal visitor.
-      if (map == native_context_map) {
-        // Native contexts have weak fields.
-        Context* ctx = Context::cast(obj);
-
-        // We will mark cache black with a separate pass
-        // when we finish marking.
-        MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
-
-        IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
-      } else {
-        IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
-      }
-
-      MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
-      SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
-                  (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
-      Marking::MarkBlack(obj_mark_bit);
-      MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
-    }
+    ProcessMarkingDeque(bytes_to_process);
     if (marking_deque_.IsEmpty()) MarkingComplete(action);
   }
 
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index 1a86fcd..6ae0f59 100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -259,6 +259,12 @@
 
   void EnsureMarkingDequeIsCommitted();
 
+  INLINE(void ProcessMarkingDeque());
+
+  INLINE(void ProcessMarkingDeque(intptr_t bytes_to_process));
+
+  INLINE(void VisitObject(Map* map, HeapObject* obj, int size));
+
   Heap* heap_;
 
   State state_;
diff --git a/src/isolate.cc b/src/isolate.cc
index 75e15a4..00979f7 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -574,8 +574,6 @@
   Handle<String> column_key = factory()->LookupAsciiSymbol("column");
   Handle<String> line_key = factory()->LookupAsciiSymbol("lineNumber");
   Handle<String> script_key = factory()->LookupAsciiSymbol("scriptName");
-  Handle<String> name_or_source_url_key =
-      factory()->LookupAsciiSymbol("nameOrSourceURL");
   Handle<String> script_name_or_source_url_key =
       factory()->LookupAsciiSymbol("scriptNameOrSourceURL");
   Handle<String> function_key = factory()->LookupAsciiSymbol("functionName");
@@ -635,18 +633,7 @@
       }
 
       if (options & StackTrace::kScriptNameOrSourceURL) {
-        Handle<Object> script_name(script->name(), this);
-        Handle<JSValue> script_wrapper = GetScriptWrapper(script);
-        Handle<Object> property = GetProperty(script_wrapper,
-                                              name_or_source_url_key);
-        ASSERT(property->IsJSFunction());
-        Handle<JSFunction> method = Handle<JSFunction>::cast(property);
-        bool caught_exception;
-        Handle<Object> result = Execution::TryCall(method, script_wrapper, 0,
-                                                   NULL, &caught_exception);
-        if (caught_exception) {
-          result = factory()->undefined_value();
-        }
+        Handle<Object> result = GetScriptNameOrSourceURL(script);
         CHECK_NOT_EMPTY_HANDLE(this,
                                JSObject::SetLocalPropertyIgnoreAttributes(
                                    stack_frame, script_name_or_source_url_key,
@@ -1138,6 +1125,14 @@
               stack_trace_for_uncaught_exceptions_options_);
         }
       }
+      // Stringify custom error objects for the message object.
+      if (exception_handle->IsJSObject() && !IsErrorObject(exception_handle)) {
+        bool failed = false;
+        exception_handle = Execution::ToString(exception_handle, &failed);
+        if (failed) {
+          exception_handle = factory()->LookupAsciiSymbol("exception");
+        }
+      }
       Handle<Object> message_obj = MessageHandler::MakeMessageObject(
           "uncaught_exception",
           location,
diff --git a/src/isolate.h b/src/isolate.h
index b90191d..167b762 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -354,6 +354,7 @@
   V(uint64_t, enabled_cpu_features, 0)                                         \
   V(CpuProfiler*, cpu_profiler, NULL)                                          \
   V(HeapProfiler*, heap_profiler, NULL)                                        \
+  V(bool, observer_delivery_pending, false)                                    \
   ISOLATE_DEBUGGER_INIT_LIST(V)
 
 class Isolate {
diff --git a/src/json-stringifier.h b/src/json-stringifier.h
index 74f38fe..cdb724f 100644
--- a/src/json-stringifier.h
+++ b/src/json-stringifier.h
@@ -690,7 +690,6 @@
   // The <uc16, char> version of this method must not be called.
   ASSERT(sizeof(*dest) >= sizeof(*src));
 
-  *(dest++) = '"';
   for (int i = 0; i < length; i++) {
     SrcChar c = src[i];
     if (DoNotEscape(c)) {
@@ -701,7 +700,6 @@
     }
   }
 
-  *(dest++) = '"';
   current_index_ += static_cast<int>(dest - dest_start);
 }
 
@@ -710,12 +708,13 @@
 void BasicJsonStringifier::SerializeString_(Vector<const Char> vector,
                                             Handle<String> string) {
   int length = vector.length();
+  Append_<is_ascii, char>('"');
   // We make a rough estimate to find out if the current string can be
   // serialized without allocating a new string part. The worst case length of
-  // an escaped character is 6.  Shifting left by 3 is a more pessimistic
-  // estimate than multiplying by 6, but faster to calculate.
-  static const int kEnclosingQuotesLength = 2;
-  if (current_index_ + (length << 3) + kEnclosingQuotesLength < part_length_) {
+  // an escaped character is 6.  Shifting the remainin string length right by 3
+  // is a more pessimistic estimate, but faster to calculate.
+
+  if (((part_length_ - current_index_) >> 3) > length) {
     if (is_ascii) {
       SerializeStringUnchecked_(
           vector.start(),
@@ -728,7 +727,6 @@
           length);
     }
   } else {
-    Append_<is_ascii, char>('"');
     String* string_location = *string;
     for (int i = 0; i < length; i++) {
       Char c = vector[i];
@@ -744,8 +742,9 @@
         string_location = *string;
       }
     }
-    Append_<is_ascii, char>('"');
   }
+
+  Append_<is_ascii, char>('"');
 }
 
 
diff --git a/src/liveedit.cc b/src/liveedit.cc
index 2a3aafc..dc7d4b1 100644
--- a/src/liveedit.cc
+++ b/src/liveedit.cc
@@ -1287,7 +1287,9 @@
           continue;
         }
       }
-      buffer_writer.Write(it.rinfo());
+      if (RelocInfo::IsRealRelocMode(rinfo->rmode())) {
+        buffer_writer.Write(it.rinfo());
+      }
     }
   }
 
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index ebba22b..1af3074 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -62,6 +62,7 @@
       sweep_precisely_(false),
       reduce_memory_footprint_(false),
       abort_incremental_marking_(false),
+      marking_parity_(ODD_MARKING_PARITY),
       compacting_(false),
       was_marked_incrementally_(false),
       tracer_(NULL),
@@ -404,6 +405,13 @@
 
   Finish();
 
+  if (marking_parity_ == EVEN_MARKING_PARITY) {
+    marking_parity_ = ODD_MARKING_PARITY;
+  } else {
+    ASSERT(marking_parity_ == ODD_MARKING_PARITY);
+    marking_parity_ = EVEN_MARKING_PARITY;
+  }
+
   tracer_ = NULL;
 }
 
@@ -952,6 +960,34 @@
 }
 
 
+void CodeFlusher::EvictJSFunctionCandidates() {
+  Object* undefined = isolate_->heap()->undefined_value();
+
+  JSFunction* candidate = jsfunction_candidates_head_;
+  JSFunction* next_candidate;
+  while (candidate != NULL) {
+    next_candidate = GetNextCandidate(candidate);
+    ClearNextCandidate(candidate, undefined);
+    candidate = next_candidate;
+  }
+
+  jsfunction_candidates_head_ = NULL;
+}
+
+
+void CodeFlusher::EvictSharedFunctionInfoCandidates() {
+  SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
+  SharedFunctionInfo* next_candidate;
+  while (candidate != NULL) {
+    next_candidate = GetNextCandidate(candidate);
+    ClearNextCandidate(candidate);
+    candidate = next_candidate;
+  }
+
+  shared_function_info_candidates_head_ = NULL;
+}
+
+
 void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
   Heap* heap = isolate_->heap();
 
@@ -2369,6 +2405,16 @@
     }
   }
 
+  void VisitCodeAgeSequence(RelocInfo* rinfo) {
+    ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+    Object* stub = rinfo->code_age_stub();
+    ASSERT(stub != NULL);
+    VisitPointer(&stub);
+    if (stub != rinfo->code_age_stub()) {
+      rinfo->set_code_age_stub(Code::cast(stub));
+    }
+  }
+
   void VisitDebugTarget(RelocInfo* rinfo) {
     ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
             rinfo->IsPatchedReturnSequence()) ||
@@ -3629,6 +3675,7 @@
     code_flusher_ = new CodeFlusher(heap()->isolate());
   } else {
     if (code_flusher_ == NULL) return;
+    code_flusher_->EvictAllCandidates();
     delete code_flusher_;
     code_flusher_ = NULL;
   }
diff --git a/src/mark-compact.h b/src/mark-compact.h
index 8b1620e..0a4c1ea 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -441,11 +441,18 @@
     ProcessJSFunctionCandidates();
   }
 
+  void EvictAllCandidates() {
+    EvictJSFunctionCandidates();
+    EvictSharedFunctionInfoCandidates();
+  }
+
   void IteratePointersToFromSpace(ObjectVisitor* v);
 
  private:
   void ProcessJSFunctionCandidates();
   void ProcessSharedFunctionInfoCandidates();
+  void EvictJSFunctionCandidates();
+  void EvictSharedFunctionInfoCandidates();
 
   static JSFunction** GetNextCandidateSlot(JSFunction* candidate) {
     return reinterpret_cast<JSFunction**>(
@@ -653,6 +660,8 @@
 
   bool is_compacting() const { return compacting_; }
 
+  MarkingParity marking_parity() { return marking_parity_; }
+
  private:
   MarkCompactCollector();
   ~MarkCompactCollector();
@@ -685,6 +694,8 @@
 
   bool abort_incremental_marking_;
 
+  MarkingParity marking_parity_;
+
   // True if we are collecting slots to perform evacuation from evacuation
   // candidates.
   bool compacting_;
diff --git a/src/messages.cc b/src/messages.cc
index 23fd4fd..ce965fc 100644
--- a/src/messages.cc
+++ b/src/messages.cc
@@ -155,7 +155,9 @@
           JSFunction::cast(
               Isolate::Current()->js_builtins_object()->
               GetPropertyNoExceptionThrown(*fmt_str)));
-  Handle<Object> argv[] = { data };
+  Handle<JSMessageObject> message = Handle<JSMessageObject>::cast(data);
+  Handle<Object> argv[] = { Handle<Object>(message->type()),
+                            Handle<Object>(message->arguments()) };
 
   bool caught_exception;
   Handle<Object> result =
diff --git a/src/messages.js b/src/messages.js
index 7d0c6bd..f04bed9 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -26,18 +26,137 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 // -------------------------------------------------------------------
-//
-// If this object gets passed to an error constructor the error will
-// get an accessor for .message that constructs a descriptive error
-// message on access.
-var kAddMessageAccessorsMarker = { };
 
-// This will be lazily initialized when first needed (and forcibly
-// overwritten even though it's const).
-var kMessages = 0;
+var kMessages = {
+  // Error
+  cyclic_proto:                  ["Cyclic __proto__ value"],
+  code_gen_from_strings:         ["%0"],
+  // TypeError
+  unexpected_token:              ["Unexpected token ", "%0"],
+  unexpected_token_number:       ["Unexpected number"],
+  unexpected_token_string:       ["Unexpected string"],
+  unexpected_token_identifier:   ["Unexpected identifier"],
+  unexpected_reserved:           ["Unexpected reserved word"],
+  unexpected_strict_reserved:    ["Unexpected strict mode reserved word"],
+  unexpected_eos:                ["Unexpected end of input"],
+  malformed_regexp:              ["Invalid regular expression: /", "%0", "/: ", "%1"],
+  unterminated_regexp:           ["Invalid regular expression: missing /"],
+  regexp_flags:                  ["Cannot supply flags when constructing one RegExp from another"],
+  incompatible_method_receiver:  ["Method ", "%0", " called on incompatible receiver ", "%1"],
+  invalid_lhs_in_assignment:     ["Invalid left-hand side in assignment"],
+  invalid_lhs_in_for_in:         ["Invalid left-hand side in for-in"],
+  invalid_lhs_in_postfix_op:     ["Invalid left-hand side expression in postfix operation"],
+  invalid_lhs_in_prefix_op:      ["Invalid left-hand side expression in prefix operation"],
+  multiple_defaults_in_switch:   ["More than one default clause in switch statement"],
+  newline_after_throw:           ["Illegal newline after throw"],
+  redeclaration:                 ["%0", " '", "%1", "' has already been declared"],
+  no_catch_or_finally:           ["Missing catch or finally after try"],
+  unknown_label:                 ["Undefined label '", "%0", "'"],
+  uncaught_exception:            ["Uncaught ", "%0"],
+  stack_trace:                   ["Stack Trace:\n", "%0"],
+  called_non_callable:           ["%0", " is not a function"],
+  undefined_method:              ["Object ", "%1", " has no method '", "%0", "'"],
+  property_not_function:         ["Property '", "%0", "' of object ", "%1", " is not a function"],
+  cannot_convert_to_primitive:   ["Cannot convert object to primitive value"],
+  not_constructor:               ["%0", " is not a constructor"],
+  not_defined:                   ["%0", " is not defined"],
+  non_object_property_load:      ["Cannot read property '", "%0", "' of ", "%1"],
+  non_object_property_store:     ["Cannot set property '", "%0", "' of ", "%1"],
+  non_object_property_call:      ["Cannot call method '", "%0", "' of ", "%1"],
+  with_expression:               ["%0", " has no properties"],
+  illegal_invocation:            ["Illegal invocation"],
+  no_setter_in_callback:         ["Cannot set property ", "%0", " of ", "%1", " which has only a getter"],
+  apply_non_function:            ["Function.prototype.apply was called on ", "%0", ", which is a ", "%1", " and not a function"],
+  apply_wrong_args:              ["Function.prototype.apply: Arguments list has wrong type"],
+  invalid_in_operator_use:       ["Cannot use 'in' operator to search for '", "%0", "' in ", "%1"],
+  instanceof_function_expected:  ["Expecting a function in instanceof check, but got ", "%0"],
+  instanceof_nonobject_proto:    ["Function has non-object prototype '", "%0", "' in instanceof check"],
+  null_to_object:                ["Cannot convert null to object"],
+  reduce_no_initial:             ["Reduce of empty array with no initial value"],
+  getter_must_be_callable:       ["Getter must be a function: ", "%0"],
+  setter_must_be_callable:       ["Setter must be a function: ", "%0"],
+  value_and_accessor:            ["Invalid property.  A property cannot both have accessors and be writable or have a value, ", "%0"],
+  proto_object_or_null:          ["Object prototype may only be an Object or null"],
+  property_desc_object:          ["Property description must be an object: ", "%0"],
+  redefine_disallowed:           ["Cannot redefine property: ", "%0"],
+  define_disallowed:             ["Cannot define property:", "%0", ", object is not extensible."],
+  non_extensible_proto:          ["%0", " is not extensible"],
+  handler_non_object:            ["Proxy.", "%0", " called with non-object as handler"],
+  proto_non_object:              ["Proxy.", "%0", " called with non-object as prototype"],
+  trap_function_expected:        ["Proxy.", "%0", " called with non-function for '", "%1", "' trap"],
+  handler_trap_missing:          ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
+  handler_trap_must_be_callable: ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
+  handler_returned_false:        ["Proxy handler ", "%0", " returned false from '", "%1", "' trap"],
+  handler_returned_undefined:    ["Proxy handler ", "%0", " returned undefined from '", "%1", "' trap"],
+  proxy_prop_not_configurable:   ["Proxy handler ", "%0", " returned non-configurable descriptor for property '", "%2", "' from '", "%1", "' trap"],
+  proxy_non_object_prop_names:   ["Trap '", "%1", "' returned non-object ", "%0"],
+  proxy_repeated_prop_name:      ["Trap '", "%1", "' returned repeated property name '", "%2", "'"],
+  invalid_weakmap_key:           ["Invalid value used as weak map key"],
+  not_date_object:               ["this is not a Date object."],
+  observe_non_object:            ["Object.", "%0", " cannot ", "%0", " non-object"],
+  observe_non_function:          ["Object.", "%0", " cannot deliver to non-function"],
+  observe_callback_frozen:       ["Object.observe cannot deliver to a frozen function object"],
+  observe_type_non_string:       ["Invalid changeRecord with non-string 'type' property"],
+  observe_notify_non_notifier:   ["notify called on non-notifier object"],
+  // RangeError
+  invalid_array_length:          ["Invalid array length"],
+  stack_overflow:                ["Maximum call stack size exceeded"],
+  invalid_time_value:            ["Invalid time value"],
+  // SyntaxError
+  unable_to_parse:               ["Parse error"],
+  invalid_regexp_flags:          ["Invalid flags supplied to RegExp constructor '", "%0", "'"],
+  invalid_regexp:                ["Invalid RegExp pattern /", "%0", "/"],
+  illegal_break:                 ["Illegal break statement"],
+  illegal_continue:              ["Illegal continue statement"],
+  illegal_return:                ["Illegal return statement"],
+  illegal_let:                   ["Illegal let declaration outside extended mode"],
+  error_loading_debugger:        ["Error loading debugger"],
+  no_input_to_regexp:            ["No input to ", "%0"],
+  invalid_json:                  ["String '", "%0", "' is not valid JSON"],
+  circular_structure:            ["Converting circular structure to JSON"],
+  called_on_non_object:          ["%0", " called on non-object"],
+  called_on_null_or_undefined:   ["%0", " called on null or undefined"],
+  array_indexof_not_defined:     ["Array.getIndexOf: Argument undefined"],
+  object_not_extensible:         ["Can't add property ", "%0", ", object is not extensible"],
+  illegal_access:                ["Illegal access"],
+  invalid_preparser_data:        ["Invalid preparser data for function ", "%0"],
+  strict_mode_with:              ["Strict mode code may not include a with statement"],
+  strict_catch_variable:         ["Catch variable may not be eval or arguments in strict mode"],
+  too_many_arguments:            ["Too many arguments in function call (only 32766 allowed)"],
+  too_many_parameters:           ["Too many parameters in function definition (only 32766 allowed)"],
+  too_many_variables:            ["Too many variables declared (only 131071 allowed)"],
+  strict_param_name:             ["Parameter name eval or arguments is not allowed in strict mode"],
+  strict_param_dupe:             ["Strict mode function may not have duplicate parameter names"],
+  strict_var_name:               ["Variable name may not be eval or arguments in strict mode"],
+  strict_function_name:          ["Function name may not be eval or arguments in strict mode"],
+  strict_octal_literal:          ["Octal literals are not allowed in strict mode."],
+  strict_duplicate_property:     ["Duplicate data property in object literal not allowed in strict mode"],
+  accessor_data_property:        ["Object literal may not have data and accessor property with the same name"],
+  accessor_get_set:              ["Object literal may not have multiple get/set accessors with the same name"],
+  strict_lhs_assignment:         ["Assignment to eval or arguments is not allowed in strict mode"],
+  strict_lhs_postfix:            ["Postfix increment/decrement may not have eval or arguments operand in strict mode"],
+  strict_lhs_prefix:             ["Prefix increment/decrement may not have eval or arguments operand in strict mode"],
+  strict_reserved_word:          ["Use of future reserved word in strict mode"],
+  strict_delete:                 ["Delete of an unqualified identifier in strict mode."],
+  strict_delete_property:        ["Cannot delete property '", "%0", "' of ", "%1"],
+  strict_const:                  ["Use of const in strict mode."],
+  strict_function:               ["In strict mode code, functions can only be declared at top level or immediately within another function." ],
+  strict_read_only_property:     ["Cannot assign to read only property '", "%0", "' of ", "%1"],
+  strict_cannot_assign:          ["Cannot assign to read only '", "%0", "' in strict mode"],
+  strict_poison_pill:            ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"],
+  strict_caller:                 ["Illegal access to a strict mode caller function."],
+  unprotected_let:               ["Illegal let declaration in unprotected statement context."],
+  unprotected_const:             ["Illegal const declaration in unprotected statement context."],
+  cant_prevent_ext_external_array_elements: ["Cannot prevent extension of an object with external array elements"],
+  redef_external_array_element:  ["Cannot redefine a property of an object with external array elements"],
+  harmony_const_assign:          ["Assignment to constant variable."],
+  invalid_module_path:           ["Module does not export '", "%0", "', or export is not itself a module"],
+  module_type_error:             ["Module '", "%0", "' used improperly"],
+  module_export_undefined:       ["Export '", "%0", "' is not defined in module"],
+};
 
-function FormatString(format, message) {
-  var args = %MessageGetArguments(message);
+
+function FormatString(format, args) {
   var result = "";
   var arg_num = 0;
   for (var i = 0; i < format.length; i++) {
@@ -48,7 +167,7 @@
       if (arg_num < 4) {
         // str is one of %0, %1, %2 or %3.
         try {
-          str = ToDetailString(args[arg_num]);
+          str = NoSideEffectToString(args[arg_num]);
         } catch (e) {
           if (%IsJSModule(args[arg_num]))
             str = "module";
@@ -65,6 +184,26 @@
 }
 
 
+function NoSideEffectToString(obj) {
+  if (IS_STRING(obj)) return obj;
+  if (IS_NUMBER(obj)) return %_NumberToString(obj);
+  if (IS_BOOLEAN(obj)) return x ? 'true' : 'false';
+  if (IS_UNDEFINED(obj)) return 'undefined';
+  if (IS_NULL(obj)) return 'null';
+  if (IS_OBJECT(obj) && %GetDataProperty(obj, "toString") === ObjectToString) {
+    var constructor = obj.constructor;
+    if (typeof constructor == "function") {
+      var constructorName = constructor.name;
+      if (IS_STRING(constructorName) && constructorName !== "") {
+        return "#<" + constructorName + ">";
+      }
+    }
+  }
+  if (IsNativeErrorObject(obj)) return %_CallFunction(obj, ErrorToString);
+  return %_CallFunction(obj, ObjectToString);
+}
+
+
 // To check if something is a native error we need to check the
 // concrete native error types. It is not sufficient to use instanceof
 // since it possible to create an object that has Error.prototype on
@@ -115,7 +254,7 @@
   if (IS_UNDEFINED(args)) {
     args = [];
   }
-  var e = new constructor(kAddMessageAccessorsMarker);
+  var e = new constructor(FormatMessage(type, args));
   e.type = type;
   e.arguments = args;
   return e;
@@ -135,160 +274,10 @@
 
 
 // Helper functions; called from the runtime system.
-function FormatMessage(message) {
-  if (kMessages === 0) {
-    var messagesDictionary = [
-      // Error
-      "cyclic_proto",                 ["Cyclic __proto__ value"],
-      "code_gen_from_strings",        ["%0"],
-      // TypeError
-      "unexpected_token",             ["Unexpected token ", "%0"],
-      "unexpected_token_number",      ["Unexpected number"],
-      "unexpected_token_string",      ["Unexpected string"],
-      "unexpected_token_identifier",  ["Unexpected identifier"],
-      "unexpected_reserved",          ["Unexpected reserved word"],
-      "unexpected_strict_reserved",   ["Unexpected strict mode reserved word"],
-      "unexpected_eos",               ["Unexpected end of input"],
-      "malformed_regexp",             ["Invalid regular expression: /", "%0", "/: ", "%1"],
-      "unterminated_regexp",          ["Invalid regular expression: missing /"],
-      "regexp_flags",                 ["Cannot supply flags when constructing one RegExp from another"],
-      "incompatible_method_receiver", ["Method ", "%0", " called on incompatible receiver ", "%1"],
-      "invalid_lhs_in_assignment",    ["Invalid left-hand side in assignment"],
-      "invalid_lhs_in_for_in",        ["Invalid left-hand side in for-in"],
-      "invalid_lhs_in_postfix_op",    ["Invalid left-hand side expression in postfix operation"],
-      "invalid_lhs_in_prefix_op",     ["Invalid left-hand side expression in prefix operation"],
-      "multiple_defaults_in_switch",  ["More than one default clause in switch statement"],
-      "newline_after_throw",          ["Illegal newline after throw"],
-      "redeclaration",                ["%0", " '", "%1", "' has already been declared"],
-      "no_catch_or_finally",          ["Missing catch or finally after try"],
-      "unknown_label",                ["Undefined label '", "%0", "'"],
-      "uncaught_exception",           ["Uncaught ", "%0"],
-      "stack_trace",                  ["Stack Trace:\n", "%0"],
-      "called_non_callable",          ["%0", " is not a function"],
-      "undefined_method",             ["Object ", "%1", " has no method '", "%0", "'"],
-      "property_not_function",        ["Property '", "%0", "' of object ", "%1", " is not a function"],
-      "cannot_convert_to_primitive",  ["Cannot convert object to primitive value"],
-      "not_constructor",              ["%0", " is not a constructor"],
-      "not_defined",                  ["%0", " is not defined"],
-      "non_object_property_load",     ["Cannot read property '", "%0", "' of ", "%1"],
-      "non_object_property_store",    ["Cannot set property '", "%0", "' of ", "%1"],
-      "non_object_property_call",     ["Cannot call method '", "%0", "' of ", "%1"],
-      "with_expression",              ["%0", " has no properties"],
-      "illegal_invocation",           ["Illegal invocation"],
-      "no_setter_in_callback",        ["Cannot set property ", "%0", " of ", "%1", " which has only a getter"],
-      "apply_non_function",           ["Function.prototype.apply was called on ", "%0", ", which is a ", "%1", " and not a function"],
-      "apply_wrong_args",             ["Function.prototype.apply: Arguments list has wrong type"],
-      "invalid_in_operator_use",      ["Cannot use 'in' operator to search for '", "%0", "' in ", "%1"],
-      "instanceof_function_expected", ["Expecting a function in instanceof check, but got ", "%0"],
-      "instanceof_nonobject_proto",   ["Function has non-object prototype '", "%0", "' in instanceof check"],
-      "null_to_object",               ["Cannot convert null to object"],
-      "reduce_no_initial",            ["Reduce of empty array with no initial value"],
-      "getter_must_be_callable",      ["Getter must be a function: ", "%0"],
-      "setter_must_be_callable",      ["Setter must be a function: ", "%0"],
-      "value_and_accessor",           ["Invalid property.  A property cannot both have accessors and be writable or have a value, ", "%0"],
-      "proto_object_or_null",         ["Object prototype may only be an Object or null"],
-      "property_desc_object",         ["Property description must be an object: ", "%0"],
-      "redefine_disallowed",          ["Cannot redefine property: ", "%0"],
-      "define_disallowed",            ["Cannot define property:", "%0", ", object is not extensible."],
-      "non_extensible_proto",         ["%0", " is not extensible"],
-      "handler_non_object",           ["Proxy.", "%0", " called with non-object as handler"],
-      "proto_non_object",             ["Proxy.", "%0", " called with non-object as prototype"],
-      "trap_function_expected",       ["Proxy.", "%0", " called with non-function for '", "%1", "' trap"],
-      "handler_trap_missing",         ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
-      "handler_trap_must_be_callable", ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
-      "handler_returned_false",       ["Proxy handler ", "%0", " returned false from '", "%1", "' trap"],
-      "handler_returned_undefined",   ["Proxy handler ", "%0", " returned undefined from '", "%1", "' trap"],
-      "proxy_prop_not_configurable",  ["Proxy handler ", "%0", " returned non-configurable descriptor for property '", "%2", "' from '", "%1", "' trap"],
-      "proxy_non_object_prop_names",  ["Trap '", "%1", "' returned non-object ", "%0"],
-      "proxy_repeated_prop_name",     ["Trap '", "%1", "' returned repeated property name '", "%2", "'"],
-      "invalid_weakmap_key",          ["Invalid value used as weak map key"],
-      "not_date_object",              ["this is not a Date object."],
-      "observe_non_object",           ["Object.", "%0", " cannot ", "%0", " non-object"],
-      "observe_non_function",         ["Object.", "%0", " cannot deliver to non-function"],
-      "observe_callback_frozen",      ["Object.observe cannot deliver to a frozen function object"],
-      "observe_type_non_string",      ["Object.notify provided changeRecord with non-string 'type' property"],
-      // RangeError
-      "invalid_array_length",         ["Invalid array length"],
-      "stack_overflow",               ["Maximum call stack size exceeded"],
-      "invalid_time_value",           ["Invalid time value"],
-      // SyntaxError
-      "unable_to_parse",              ["Parse error"],
-      "invalid_regexp_flags",         ["Invalid flags supplied to RegExp constructor '", "%0", "'"],
-      "invalid_regexp",               ["Invalid RegExp pattern /", "%0", "/"],
-      "illegal_break",                ["Illegal break statement"],
-      "illegal_continue",             ["Illegal continue statement"],
-      "illegal_return",               ["Illegal return statement"],
-      "illegal_let",                  ["Illegal let declaration outside extended mode"],
-      "error_loading_debugger",       ["Error loading debugger"],
-      "no_input_to_regexp",           ["No input to ", "%0"],
-      "invalid_json",                 ["String '", "%0", "' is not valid JSON"],
-      "circular_structure",           ["Converting circular structure to JSON"],
-      "called_on_non_object",         ["%0", " called on non-object"],
-      "called_on_null_or_undefined",  ["%0", " called on null or undefined"],
-      "array_indexof_not_defined",    ["Array.getIndexOf: Argument undefined"],
-      "object_not_extensible",        ["Can't add property ", "%0", ", object is not extensible"],
-      "illegal_access",               ["Illegal access"],
-      "invalid_preparser_data",       ["Invalid preparser data for function ", "%0"],
-      "strict_mode_with",             ["Strict mode code may not include a with statement"],
-      "strict_catch_variable",        ["Catch variable may not be eval or arguments in strict mode"],
-      "too_many_arguments",           ["Too many arguments in function call (only 32766 allowed)"],
-      "too_many_parameters",          ["Too many parameters in function definition (only 32766 allowed)"],
-      "too_many_variables",           ["Too many variables declared (only 131071 allowed)"],
-      "strict_param_name",            ["Parameter name eval or arguments is not allowed in strict mode"],
-      "strict_param_dupe",            ["Strict mode function may not have duplicate parameter names"],
-      "strict_var_name",              ["Variable name may not be eval or arguments in strict mode"],
-      "strict_function_name",         ["Function name may not be eval or arguments in strict mode"],
-      "strict_octal_literal",         ["Octal literals are not allowed in strict mode."],
-      "strict_duplicate_property",    ["Duplicate data property in object literal not allowed in strict mode"],
-      "accessor_data_property",       ["Object literal may not have data and accessor property with the same name"],
-      "accessor_get_set",             ["Object literal may not have multiple get/set accessors with the same name"],
-      "strict_lhs_assignment",        ["Assignment to eval or arguments is not allowed in strict mode"],
-      "strict_lhs_postfix",           ["Postfix increment/decrement may not have eval or arguments operand in strict mode"],
-      "strict_lhs_prefix",            ["Prefix increment/decrement may not have eval or arguments operand in strict mode"],
-      "strict_reserved_word",         ["Use of future reserved word in strict mode"],
-      "strict_delete",                ["Delete of an unqualified identifier in strict mode."],
-      "strict_delete_property",       ["Cannot delete property '", "%0", "' of ", "%1"],
-      "strict_const",                 ["Use of const in strict mode."],
-      "strict_function",              ["In strict mode code, functions can only be declared at top level or immediately within another function." ],
-      "strict_read_only_property",    ["Cannot assign to read only property '", "%0", "' of ", "%1"],
-      "strict_cannot_assign",         ["Cannot assign to read only '", "%0", "' in strict mode"],
-      "strict_poison_pill",           ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"],
-      "strict_caller",                ["Illegal access to a strict mode caller function."],
-      "unprotected_let",              ["Illegal let declaration in unprotected statement context."],
-      "unprotected_const",            ["Illegal const declaration in unprotected statement context."],
-      "cant_prevent_ext_external_array_elements", ["Cannot prevent extension of an object with external array elements"],
-      "redef_external_array_element", ["Cannot redefine a property of an object with external array elements"],
-      "harmony_const_assign",         ["Assignment to constant variable."],
-      "invalid_module_path",          ["Module does not export '", "%0", "', or export is not itself a module"],
-      "module_type_error",            ["Module '", "%0", "' used improperly"],
-      "module_export_undefined",      ["Export '", "%0", "' is not defined in module"],
-    ];
-    var messages = { __proto__ : null };
-    for (var i = 0; i < messagesDictionary.length; i += 2) {
-      var key = messagesDictionary[i];
-      var format = messagesDictionary[i + 1];
-
-      for (var j = 0; j < format.length; j++) {
-        %IgnoreAttributesAndSetProperty(format, %_NumberToString(j), format[j],
-                                        DONT_DELETE | READ_ONLY | DONT_ENUM);
-      }
-      %IgnoreAttributesAndSetProperty(format, 'length', format.length,
-                                      DONT_DELETE | READ_ONLY | DONT_ENUM);
-      %PreventExtensions(format);
-      %IgnoreAttributesAndSetProperty(messages,
-                                      key,
-                                      format,
-                                      DONT_DELETE | DONT_ENUM | READ_ONLY);
-    }
-    %PreventExtensions(messages);
-    %IgnoreAttributesAndSetProperty(builtins, "kMessages",
-                                    messages,
-                                    DONT_DELETE | DONT_ENUM | READ_ONLY);
-  }
-  var message_type = %MessageGetType(message);
-  var format = kMessages[message_type];
-  if (!format) return "<unknown message " + message_type + ">";
-  return FormatString(format, message);
+function FormatMessage(type, args) {
+  var format = kMessages[type];
+  if (!format) return "<unknown message " + type + ">";
+  return FormatString(format, args);
 }
 
 
@@ -1166,13 +1155,7 @@
         %IgnoreAttributesAndSetProperty(this, 'stack', void 0, DONT_ENUM);
         %IgnoreAttributesAndSetProperty(this, 'arguments', void 0, DONT_ENUM);
         %IgnoreAttributesAndSetProperty(this, 'type', void 0, DONT_ENUM);
-        if (m === kAddMessageAccessorsMarker) {
-          // DefineOneShotAccessor always inserts a message property and
-          // ignores setters.
-          DefineOneShotAccessor(this, 'message', function (obj) {
-              return FormatMessage(%NewMessageObject(obj.type, obj.arguments));
-          });
-        } else if (!IS_UNDEFINED(m)) {
+        if (!IS_UNDEFINED(m)) {
           %IgnoreAttributesAndSetProperty(
             this, 'message', ToString(m), DONT_ENUM);
         }
@@ -1235,11 +1218,6 @@
     var name = GetPropertyWithoutInvokingMonkeyGetters(error, "name");
     name = IS_UNDEFINED(name) ? "Error" : TO_STRING_INLINE(name);
     var message = GetPropertyWithoutInvokingMonkeyGetters(error, "message");
-    var hasMessage = %_CallFunction(error, "message", ObjectHasOwnProperty);
-    if (type && !hasMessage) {
-      var args = GetPropertyWithoutInvokingMonkeyGetters(error, "arguments");
-      message = FormatMessage(%NewMessageObject(type, args));
-    }
     message = IS_UNDEFINED(message) ? "" : TO_STRING_INLINE(message);
     if (name === "") return message;
     if (message === "") return name;
diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc
index a4563a6..4ce924d 100644
--- a/src/mips/assembler-mips.cc
+++ b/src/mips/assembler-mips.cc
@@ -274,8 +274,7 @@
 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
     : AssemblerBase(arg_isolate),
       recorded_ast_id_(TypeFeedbackId::None()),
-      positions_recorder_(this),
-      emit_debug_code_(FLAG_debug_code) {
+      positions_recorder_(this) {
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h
index 59c45c9..fd2ff0d 100644
--- a/src/mips/assembler-mips.h
+++ b/src/mips/assembler-mips.h
@@ -525,12 +525,6 @@
   Assembler(Isolate* isolate, void* buffer, int buffer_size);
   ~Assembler();
 
-  // Overrides the default provided by FLAG_debug_code.
-  void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
-  // Dummy for cross platform compatibility.
-  void set_predictable_code_size(bool value) { }
-
   // GetCode emits any pending (non-emitted) code and fills the descriptor
   // desc. GetCode() is idempotent; it returns the same result if no other
   // Assembler functions are invoked in between GetCode() calls.
@@ -1033,8 +1027,6 @@
   // the relocation info.
   TypeFeedbackId recorded_ast_id_;
 
-  bool emit_debug_code() const { return emit_debug_code_; }
-
   int32_t buffer_space() const { return reloc_info_writer.pos() - pc_; }
 
   // Decode branch instruction at pos and return branch target pos.
@@ -1285,7 +1277,6 @@
   friend class BlockTrampolinePoolScope;
 
   PositionsRecorder positions_recorder_;
-  bool emit_debug_code_;
   friend class PositionsRecorder;
   friend class EnsureSpace;
 };
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index ca31826..7f7d70e 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -5083,7 +5083,7 @@
   // regexp_data: RegExp data (FixedArray)
   // a0: Instance type of subject string
   STATIC_ASSERT(kStringEncodingMask == 4);
-  STATIC_ASSERT(kAsciiStringTag == 4);
+  STATIC_ASSERT(kOneByteStringTag == 4);
   STATIC_ASSERT(kTwoByteStringTag == 0);
   // Find the code object based on the assumptions above.
   __ And(a0, a0, Operand(kStringEncodingMask));  // Non-zero for ASCII.
@@ -6250,7 +6250,7 @@
     // string's encoding is wrong because we always have to recheck encoding of
     // the newly created string's parent anyways due to externalized strings.
     Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
     __ And(t0, a1, Operand(kStringEncodingMask));
     __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
@@ -6293,7 +6293,7 @@
 
   __ bind(&allocate_result);
   // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   __ And(t0, a1, Operand(kStringEncodingMask));
   __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
 
@@ -6665,9 +6665,10 @@
   __ Branch(&ascii_data, ne, at, Operand(zero_reg));
 
   __ xor_(t0, t0, t1);
-  STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
-  __ And(t0, t0, Operand(kAsciiStringTag | kAsciiDataHintTag));
-  __ Branch(&ascii_data, eq, t0, Operand(kAsciiStringTag | kAsciiDataHintTag));
+  STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+  __ And(t0, t0, Operand(kOneByteStringTag | kAsciiDataHintTag));
+  __ Branch(
+         &ascii_data, eq, t0, Operand(kOneByteStringTag | kAsciiDataHintTag));
 
   // Allocate a two byte cons string.
   __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime);
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index 7ae3342..f79208e 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -2622,138 +2622,8 @@
 }
 
 
-void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
-  Register elements = ToRegister(instr->elements());
-  Register result = ToRegister(instr->result());
-  Register scratch = scratch0();
-  Register store_base = scratch;
-  int offset = 0;
-
-  if (instr->key()->IsConstantOperand()) {
-    LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
-    offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
-                                           instr->additional_index());
-    store_base = elements;
-  } else {
-    Register key = EmitLoadRegister(instr->key(), scratch);
-    // Even though the HLoadKeyedFastElement instruction forces the input
-    // representation for the key to be an integer, the input gets replaced
-    // during bound check elimination with the index argument to the bounds
-    // check, which can be tagged, so that case must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
-      __ addu(scratch, elements, scratch);
-    } else {
-      __ sll(scratch, key, kPointerSizeLog2);
-      __ addu(scratch, elements, scratch);
-    }
-    offset = FixedArray::OffsetOfElementAt(instr->additional_index());
-  }
-  __ lw(result, FieldMemOperand(store_base, offset));
-
-  // Check for the hole value.
-  if (instr->hydrogen()->RequiresHoleCheck()) {
-    if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
-      __ And(scratch, result, Operand(kSmiTagMask));
-      DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
-    } else {
-      __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
-      DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
-    }
-  }
-}
-
-
-void LCodeGen::DoLoadKeyedFastDoubleElement(
-    LLoadKeyedFastDoubleElement* instr) {
-  Register elements = ToRegister(instr->elements());
-  bool key_is_constant = instr->key()->IsConstantOperand();
-  Register key = no_reg;
-  DoubleRegister result = ToDoubleRegister(instr->result());
-  Register scratch = scratch0();
-
-  int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
-  int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
-      ? (element_size_shift - kSmiTagSize) : element_size_shift;
-  int constant_key = 0;
-  if (key_is_constant) {
-    constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
-    if (constant_key & 0xF0000000) {
-      Abort("array index constant value too big.");
-    }
-  } else {
-    key = ToRegister(instr->key());
-  }
-
-  if (key_is_constant) {
-    __ Addu(elements, elements,
-        Operand(((constant_key + instr->additional_index()) <<
-                 element_size_shift) +
-                FixedDoubleArray::kHeaderSize - kHeapObjectTag));
-  } else {
-    __ sll(scratch, key, shift_size);
-    __ Addu(elements, elements, Operand(scratch));
-    __ Addu(elements, elements,
-            Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
-                    (instr->additional_index() << element_size_shift)));
-  }
-
-  if (instr->hydrogen()->RequiresHoleCheck()) {
-    __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
-    DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
-  }
-
-  __ ldc1(result, MemOperand(elements));
-}
-
-
-MemOperand LCodeGen::PrepareKeyedOperand(Register key,
-                                         Register base,
-                                         bool key_is_constant,
-                                         int constant_key,
-                                         int element_size,
-                                         int shift_size,
-                                         int additional_index,
-                                         int additional_offset) {
-  if (additional_index != 0 && !key_is_constant) {
-    additional_index *= 1 << (element_size - shift_size);
-    __ Addu(scratch0(), key, Operand(additional_index));
-  }
-
-  if (key_is_constant) {
-    return MemOperand(base,
-                      (constant_key << element_size) + additional_offset);
-  }
-
-  if (additional_index == 0) {
-    if (shift_size >= 0) {
-      __ sll(scratch0(), key, shift_size);
-      __ Addu(scratch0(), base, scratch0());
-      return MemOperand(scratch0());
-    } else {
-      ASSERT_EQ(-1, shift_size);
-      __ srl(scratch0(), key, 1);
-      __ Addu(scratch0(), base, scratch0());
-      return MemOperand(scratch0());
-    }
-  }
-
-  if (shift_size >= 0) {
-    __ sll(scratch0(), scratch0(), shift_size);
-    __ Addu(scratch0(), base, scratch0());
-    return MemOperand(scratch0());
-  } else {
-    ASSERT_EQ(-1, shift_size);
-    __ srl(scratch0(), scratch0(), 1);
-    __ Addu(scratch0(), base, scratch0());
-    return MemOperand(scratch0());
-  }
-}
-
-
-void LCodeGen::DoLoadKeyedSpecializedArrayElement(
-    LLoadKeyedSpecializedArrayElement* instr) {
-  Register external_pointer = ToRegister(instr->external_pointer());
+void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
+  Register external_pointer = ToRegister(instr->elements());
   Register key = no_reg;
   ElementsKind elements_kind = instr->elements_kind();
   bool key_is_constant = instr->key()->IsConstantOperand();
@@ -2834,6 +2704,145 @@
 }
 
 
+void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
+  Register elements = ToRegister(instr->elements());
+  bool key_is_constant = instr->key()->IsConstantOperand();
+  Register key = no_reg;
+  DoubleRegister result = ToDoubleRegister(instr->result());
+  Register scratch = scratch0();
+
+  int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
+  int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+      ? (element_size_shift - kSmiTagSize) : element_size_shift;
+  int constant_key = 0;
+  if (key_is_constant) {
+    constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
+    if (constant_key & 0xF0000000) {
+      Abort("array index constant value too big.");
+    }
+  } else {
+    key = ToRegister(instr->key());
+  }
+
+  if (key_is_constant) {
+    __ Addu(elements, elements,
+        Operand(((constant_key + instr->additional_index()) <<
+                 element_size_shift) +
+                FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+  } else {
+    __ sll(scratch, key, shift_size);
+    __ Addu(elements, elements, Operand(scratch));
+    __ Addu(elements, elements,
+            Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
+                    (instr->additional_index() << element_size_shift)));
+  }
+
+  if (instr->hydrogen()->RequiresHoleCheck()) {
+    __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
+    DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
+  }
+
+  __ ldc1(result, MemOperand(elements));
+}
+
+
+void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
+  Register elements = ToRegister(instr->elements());
+  Register result = ToRegister(instr->result());
+  Register scratch = scratch0();
+  Register store_base = scratch;
+  int offset = 0;
+
+  if (instr->key()->IsConstantOperand()) {
+    LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+    offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
+                                           instr->additional_index());
+    store_base = elements;
+  } else {
+    Register key = EmitLoadRegister(instr->key(), scratch0());
+    // Even though the HLoadKeyed instruction forces the input
+    // representation for the key to be an integer, the input gets replaced
+    // during bound check elimination with the index argument to the bounds
+    // check, which can be tagged, so that case must be handled here, too.
+    if (instr->hydrogen()->key()->representation().IsTagged()) {
+      __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
+      __ addu(scratch, elements, scratch);
+    } else {
+      __ sll(scratch, key, kPointerSizeLog2);
+      __ addu(scratch, elements, scratch);
+    }
+    offset = FixedArray::OffsetOfElementAt(instr->additional_index());
+  }
+  __ lw(result, FieldMemOperand(store_base, offset));
+
+  // Check for the hole value.
+  if (instr->hydrogen()->RequiresHoleCheck()) {
+    if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+      __ And(scratch, result, Operand(kSmiTagMask));
+      DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
+    } else {
+      __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+      DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
+    }
+  }
+}
+
+
+void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
+  if (instr->is_external()) {
+    DoLoadKeyedExternalArray(instr);
+  } else if (instr->hydrogen()->representation().IsDouble()) {
+    DoLoadKeyedFixedDoubleArray(instr);
+  } else {
+    DoLoadKeyedFixedArray(instr);
+  }
+}
+
+
+MemOperand LCodeGen::PrepareKeyedOperand(Register key,
+                                         Register base,
+                                         bool key_is_constant,
+                                         int constant_key,
+                                         int element_size,
+                                         int shift_size,
+                                         int additional_index,
+                                         int additional_offset) {
+  if (additional_index != 0 && !key_is_constant) {
+    additional_index *= 1 << (element_size - shift_size);
+    __ Addu(scratch0(), key, Operand(additional_index));
+  }
+
+  if (key_is_constant) {
+    return MemOperand(base,
+                      (constant_key << element_size) + additional_offset);
+  }
+
+  if (additional_index == 0) {
+    if (shift_size >= 0) {
+      __ sll(scratch0(), key, shift_size);
+      __ Addu(scratch0(), base, scratch0());
+      return MemOperand(scratch0());
+    } else {
+      ASSERT_EQ(-1, shift_size);
+      __ srl(scratch0(), key, 1);
+      __ Addu(scratch0(), base, scratch0());
+      return MemOperand(scratch0());
+    }
+  }
+
+  if (shift_size >= 0) {
+    __ sll(scratch0(), scratch0(), shift_size);
+    __ Addu(scratch0(), base, scratch0());
+    return MemOperand(scratch0());
+  } else {
+    ASSERT_EQ(-1, shift_size);
+    __ srl(scratch0(), scratch0(), 1);
+    __ Addu(scratch0(), base, scratch0());
+    return MemOperand(scratch0());
+  }
+}
+
+
 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->object()).is(a1));
   ASSERT(ToRegister(instr->key()).is(a0));
@@ -3737,108 +3746,8 @@
 }
 
 
-void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
-  Register value = ToRegister(instr->value());
-  Register elements = ToRegister(instr->object());
-  Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
-  Register scratch = scratch0();
-  Register store_base = scratch;
-  int offset = 0;
-
-  // Do the store.
-  if (instr->key()->IsConstantOperand()) {
-    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
-    LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
-    offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
-                                           instr->additional_index());
-    store_base = elements;
-  } else {
-    // Even though the HLoadKeyedFastElement instruction forces the input
-    // representation for the key to be an integer, the input gets replaced
-    // during bound check elimination with the index argument to the bounds
-    // check, which can be tagged, so that case must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
-      __ addu(scratch, elements, scratch);
-    } else {
-      __ sll(scratch, key, kPointerSizeLog2);
-      __ addu(scratch, elements, scratch);
-    }
-    offset = FixedArray::OffsetOfElementAt(instr->additional_index());
-  }
-  __ sw(value, FieldMemOperand(store_base, offset));
-
-  if (instr->hydrogen()->NeedsWriteBarrier()) {
-    HType type = instr->hydrogen()->value()->type();
-    SmiCheck check_needed =
-        type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
-    // Compute address of modified element and store it into key register.
-    __ Addu(key, store_base, Operand(offset - kHeapObjectTag));
-    __ RecordWrite(elements,
-                   key,
-                   value,
-                   kRAHasBeenSaved,
-                   kSaveFPRegs,
-                   EMIT_REMEMBERED_SET,
-                   check_needed);
-  }
-}
-
-
-void LCodeGen::DoStoreKeyedFastDoubleElement(
-    LStoreKeyedFastDoubleElement* instr) {
-  DoubleRegister value = ToDoubleRegister(instr->value());
-  Register elements = ToRegister(instr->elements());
-  Register key = no_reg;
-  Register scratch = scratch0();
-  bool key_is_constant = instr->key()->IsConstantOperand();
-  int constant_key = 0;
-  Label not_nan;
-
-  // Calculate the effective address of the slot in the array to store the
-  // double value.
-  if (key_is_constant) {
-    constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
-    if (constant_key & 0xF0000000) {
-      Abort("array index constant value too big.");
-    }
-  } else {
-    key = ToRegister(instr->key());
-  }
-  int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
-  int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
-      ? (element_size_shift - kSmiTagSize) : element_size_shift;
-  if (key_is_constant) {
-    __ Addu(scratch, elements, Operand((constant_key << element_size_shift) +
-            FixedDoubleArray::kHeaderSize - kHeapObjectTag));
-  } else {
-    __ sll(scratch, key, shift_size);
-    __ Addu(scratch, elements, Operand(scratch));
-    __ Addu(scratch, scratch,
-            Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
-  }
-
-  if (instr->NeedsCanonicalization()) {
-    Label is_nan;
-    // Check for NaN. All NaNs must be canonicalized.
-    __ BranchF(NULL, &is_nan, eq, value, value);
-    __ Branch(&not_nan);
-
-    // Only load canonical NaN if the comparison above set the overflow.
-    __ bind(&is_nan);
-    __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
-  }
-
-  __ bind(&not_nan);
-  __ sdc1(value, MemOperand(scratch, instr->additional_index() <<
-      element_size_shift));
-}
-
-
-void LCodeGen::DoStoreKeyedSpecializedArrayElement(
-    LStoreKeyedSpecializedArrayElement* instr) {
-
-  Register external_pointer = ToRegister(instr->external_pointer());
+void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
+  Register external_pointer = ToRegister(instr->elements());
   Register key = no_reg;
   ElementsKind elements_kind = instr->elements_kind();
   bool key_is_constant = instr->key()->IsConstantOperand();
@@ -3909,6 +3818,117 @@
   }
 }
 
+
+void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
+  DoubleRegister value = ToDoubleRegister(instr->value());
+  Register elements = ToRegister(instr->elements());
+  Register key = no_reg;
+  Register scratch = scratch0();
+  bool key_is_constant = instr->key()->IsConstantOperand();
+  int constant_key = 0;
+  Label not_nan;
+
+  // Calculate the effective address of the slot in the array to store the
+  // double value.
+  if (key_is_constant) {
+    constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
+    if (constant_key & 0xF0000000) {
+      Abort("array index constant value too big.");
+    }
+  } else {
+    key = ToRegister(instr->key());
+  }
+  int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
+  int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+      ? (element_size_shift - kSmiTagSize) : element_size_shift;
+  if (key_is_constant) {
+    __ Addu(scratch, elements, Operand((constant_key << element_size_shift) +
+            FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+  } else {
+    __ sll(scratch, key, shift_size);
+    __ Addu(scratch, elements, Operand(scratch));
+    __ Addu(scratch, scratch,
+            Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+  }
+
+  if (instr->NeedsCanonicalization()) {
+    Label is_nan;
+    // Check for NaN. All NaNs must be canonicalized.
+    __ BranchF(NULL, &is_nan, eq, value, value);
+    __ Branch(&not_nan);
+
+    // Only load canonical NaN if the comparison above set the overflow.
+    __ bind(&is_nan);
+    __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
+  }
+
+  __ bind(&not_nan);
+  __ sdc1(value, MemOperand(scratch, instr->additional_index() <<
+      element_size_shift));
+}
+
+
+void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
+  Register value = ToRegister(instr->value());
+  Register elements = ToRegister(instr->elements());
+  Register key = instr->key()->IsRegister() ? ToRegister(instr->key())
+      : no_reg;
+  Register scratch = scratch0();
+  Register store_base = scratch;
+  int offset = 0;
+
+  // Do the store.
+  if (instr->key()->IsConstantOperand()) {
+    ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+    LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+    offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
+                                           instr->additional_index());
+    store_base = elements;
+  } else {
+    // Even though the HLoadKeyed instruction forces the input
+    // representation for the key to be an integer, the input gets replaced
+    // during bound check elimination with the index argument to the bounds
+    // check, which can be tagged, so that case must be handled here, too.
+    if (instr->hydrogen()->key()->representation().IsTagged()) {
+      __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
+      __ addu(scratch, elements, scratch);
+    } else {
+      __ sll(scratch, key, kPointerSizeLog2);
+      __ addu(scratch, elements, scratch);
+    }
+    offset = FixedArray::OffsetOfElementAt(instr->additional_index());
+  }
+  __ sw(value, FieldMemOperand(store_base, offset));
+
+  if (instr->hydrogen()->NeedsWriteBarrier()) {
+    HType type = instr->hydrogen()->value()->type();
+    SmiCheck check_needed =
+       type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+    // Compute address of modified element and store it into key register.
+    __ Addu(key, store_base, Operand(offset - kHeapObjectTag));
+    __ RecordWrite(elements,
+                   key,
+                   value,
+                   kRAHasBeenSaved,
+                   kSaveFPRegs,
+                   EMIT_REMEMBERED_SET,
+                   check_needed);
+  }
+}
+
+
+void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
+  // By cases: external, fast double
+  if (instr->is_external()) {
+    DoStoreKeyedExternalArray(instr);
+  } else if (instr->hydrogen()->value()->representation().IsDouble()) {
+    DoStoreKeyedFixedDoubleArray(instr);
+  } else {
+    DoStoreKeyedFixedArray(instr);
+  }
+}
+
+
 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
   ASSERT(ToRegister(instr->object()).is(a2));
   ASSERT(ToRegister(instr->key()).is(a1));
diff --git a/src/mips/lithium-codegen-mips.h b/src/mips/lithium-codegen-mips.h
index 38c5255..7363eb8 100644
--- a/src/mips/lithium-codegen-mips.h
+++ b/src/mips/lithium-codegen-mips.h
@@ -376,6 +376,12 @@
   };
 
   void EnsureSpaceForLazyDeopt();
+  void DoLoadKeyedExternalArray(LLoadKeyed* instr);
+  void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
+  void DoLoadKeyedFixedArray(LLoadKeyed* instr);
+  void DoStoreKeyedExternalArray(LStoreKeyed* instr);
+  void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
+  void DoStoreKeyedFixedArray(LStoreKeyed* instr);
 
   Zone* zone_;
   LPlatformChunk* const chunk_;
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index 6dbb6a6..b5eb128 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -372,16 +372,7 @@
 }
 
 
-void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
-void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
+void LStoreKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
   key()->PrintTo(stream);
@@ -1798,53 +1789,40 @@
 }
 
 
-LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
-    HLoadKeyedFastElement* instr) {
-  ASSERT(instr->representation().IsTagged());
+LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
   ASSERT(instr->key()->representation().IsInteger32() ||
          instr->key()->representation().IsTagged());
-  LOperand* obj = UseRegisterAtStart(instr->object());
-  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-  LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
-  if (instr->RequiresHoleCheck()) AssignEnvironment(result);
-  return DefineAsRegister(result);
-}
-
-
-LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
-    HLoadKeyedFastDoubleElement* instr) {
-  ASSERT(instr->representation().IsDouble());
-  ASSERT(instr->key()->representation().IsInteger32() ||
-         instr->key()->representation().IsTagged());
-  LOperand* elements = UseTempRegister(instr->elements());
-  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-  LLoadKeyedFastDoubleElement* result =
-      new(zone()) LLoadKeyedFastDoubleElement(elements, key);
-  return AssignEnvironment(DefineAsRegister(result));
-}
-
-
-LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
-    HLoadKeyedSpecializedArrayElement* instr) {
   ElementsKind elements_kind = instr->elements_kind();
-  ASSERT(
-      (instr->representation().IsInteger32() &&
-       (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
-       (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
-      (instr->representation().IsDouble() &&
-       ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
-       (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
-  ASSERT(instr->key()->representation().IsInteger32() ||
-         instr->key()->representation().IsTagged());
-  LOperand* external_pointer = UseRegister(instr->external_pointer());
-  LOperand* key = UseRegisterOrConstant(instr->key());
-  LLoadKeyedSpecializedArrayElement* result =
-      new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
-  LInstruction* load_instr = DefineAsRegister(result);
+  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
+  LLoadKeyed* result = NULL;
+
+  if (!instr->is_external()) {
+    LOperand* obj = NULL;
+    if (instr->representation().IsDouble()) {
+      obj = UseTempRegister(instr->elements());
+    } else {
+      ASSERT(instr->representation().IsTagged());
+      obj = UseRegisterAtStart(instr->elements());
+    }
+    result = new(zone()) LLoadKeyed(obj, key);
+  } else {
+    ASSERT(
+        (instr->representation().IsInteger32() &&
+         (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+         (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+        (instr->representation().IsDouble() &&
+         ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+          (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+    LOperand* external_pointer = UseRegister(instr->elements());
+    result = new(zone()) LLoadKeyed(external_pointer, key);
+  }
+
+  DefineAsRegister(result);
   // An unsigned int array load might overflow and cause a deopt, make sure it
   // has an environment.
-  return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
-      AssignEnvironment(load_instr) : load_instr;
+  bool can_deoptimize = instr->RequiresHoleCheck() ||
+      (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS);
+  return can_deoptimize ? AssignEnvironment(result) : result;
 }
 
 
@@ -1858,66 +1836,47 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
-    HStoreKeyedFastElement* instr) {
+LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
+  ElementsKind elements_kind = instr->elements_kind();
   bool needs_write_barrier = instr->NeedsWriteBarrier();
-  ASSERT(instr->value()->representation().IsTagged());
-  ASSERT(instr->object()->representation().IsTagged());
-  ASSERT(instr->key()->representation().IsInteger32() ||
-         instr->key()->representation().IsTagged());
-
-  LOperand* obj = UseTempRegister(instr->object());
-  LOperand* val = needs_write_barrier
-      ? UseTempRegister(instr->value())
-      : UseRegisterAtStart(instr->value());
   LOperand* key = needs_write_barrier
       ? UseTempRegister(instr->key())
       : UseRegisterOrConstantAtStart(instr->key());
-  return new(zone()) LStoreKeyedFastElement(obj, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
-    HStoreKeyedFastDoubleElement* instr) {
-  ASSERT(instr->value()->representation().IsDouble());
-  ASSERT(instr->elements()->representation().IsTagged());
-  ASSERT(instr->key()->representation().IsInteger32() ||
-         instr->key()->representation().IsTagged());
-
-  LOperand* elements = UseRegisterAtStart(instr->elements());
-  LOperand* val = UseTempRegister(instr->value());
-  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-
-  return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
-    HStoreKeyedSpecializedArrayElement* instr) {
-  ElementsKind elements_kind = instr->elements_kind();
-  ASSERT(
-      (instr->value()->representation().IsInteger32() &&
-       (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
-       (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
-      (instr->value()->representation().IsDouble() &&
-       ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
-       (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
-  ASSERT(instr->external_pointer()->representation().IsExternal());
-  ASSERT(instr->key()->representation().IsInteger32() ||
-         instr->key()->representation().IsTagged());
-
-  LOperand* external_pointer = UseRegister(instr->external_pointer());
   bool val_is_temp_register =
       elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
       elements_kind == EXTERNAL_FLOAT_ELEMENTS;
-  LOperand* val = val_is_temp_register
+  LOperand* val = val_is_temp_register || needs_write_barrier
       ? UseTempRegister(instr->value())
       : UseRegister(instr->value());
-  LOperand* key = UseRegisterOrConstant(instr->key());
+  LStoreKeyed* result = NULL;
+  if (!instr->is_external()) {
+    ASSERT(instr->elements()->representation().IsTagged());
 
-  return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
-                                                        key,
-                                                        val);
+    LOperand* object = NULL;
+    if (instr->value()->representation().IsDouble()) {
+      object = UseRegisterAtStart(instr->elements());
+    } else {
+      ASSERT(instr->value()->representation().IsTagged());
+      object = UseTempRegister(instr->elements());
+    }
+
+    result = new(zone()) LStoreKeyed(object, key, val);
+  } else {
+    ASSERT(
+        (instr->value()->representation().IsInteger32() &&
+         (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+         (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+        (instr->value()->representation().IsDouble() &&
+         ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+          (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+    ASSERT(instr->elements()->representation().IsExternal());
+
+    LOperand* external_pointer = UseRegister(instr->elements());
+    result = new(zone()) LStoreKeyed(external_pointer, key, val);
+  }
+
+  ASSERT(result != NULL);
+  return result;
 }
 
 
diff --git a/src/mips/lithium-mips.h b/src/mips/lithium-mips.h
index ef425fc..3ea0aef 100644
--- a/src/mips/lithium-mips.h
+++ b/src/mips/lithium-mips.h
@@ -125,10 +125,8 @@
   V(LoadFunctionPrototype)                      \
   V(LoadGlobalCell)                             \
   V(LoadGlobalGeneric)                          \
-  V(LoadKeyedFastDoubleElement)                 \
-  V(LoadKeyedFastElement)                       \
+  V(LoadKeyed)                                  \
   V(LoadKeyedGeneric)                           \
-  V(LoadKeyedSpecializedArrayElement)           \
   V(LoadNamedField)                             \
   V(LoadNamedFieldPolymorphic)                  \
   V(LoadNamedGeneric)                           \
@@ -156,10 +154,8 @@
   V(StoreContextSlot)                           \
   V(StoreGlobalCell)                            \
   V(StoreGlobalGeneric)                         \
-  V(StoreKeyedFastDoubleElement)                \
-  V(StoreKeyedFastElement)                      \
+  V(StoreKeyed)                                 \
   V(StoreKeyedGeneric)                          \
-  V(StoreKeyedSpecializedArrayElement)          \
   V(StoreNamedField)                            \
   V(StoreNamedGeneric)                          \
   V(StringAdd)                                  \
@@ -1337,59 +1333,25 @@
 };
 
 
-class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
+class LLoadKeyed: public LTemplateInstruction<1, 2, 0> {
  public:
-  LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
+  LLoadKeyed(LOperand* elements, LOperand* key) {
     inputs_[0] = elements;
     inputs_[1] = key;
   }
 
   LOperand* elements() { return inputs_[0]; }
   LOperand* key() { return inputs_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement, "load-keyed-fast-element")
-  DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastElement)
-
-  uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
- public:
-  LLoadKeyedFastDoubleElement(LOperand* elements, LOperand* key) {
-    inputs_[0] = elements;
-    inputs_[1] = key;
-  }
-
-  LOperand* elements() { return inputs_[0]; }
-  LOperand* key() { return inputs_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement,
-                               "load-keyed-fast-double-element")
-  DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastDoubleElement)
-
-  uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
- public:
-    LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
-                                      LOperand* key) {
-    inputs_[0] = external_pointer;
-    inputs_[1] = key;
-  }
-
-  LOperand* external_pointer() { return inputs_[0]; }
-  LOperand* key() { return inputs_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement,
-                               "load-keyed-specialized-array-element")
-  DECLARE_HYDROGEN_ACCESSOR(LoadKeyedSpecializedArrayElement)
-
   ElementsKind elements_kind() const {
     return hydrogen()->elements_kind();
   }
+  bool is_external() const {
+    return hydrogen()->is_external();
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed")
+  DECLARE_HYDROGEN_ACCESSOR(LoadKeyed)
+
   uint32_t additional_index() const { return hydrogen()->index_offset(); }
 };
 
@@ -1903,51 +1865,28 @@
 };
 
 
-class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
+class LStoreKeyed: public LTemplateInstruction<0, 3, 0> {
  public:
-  LStoreKeyedFastElement(LOperand* object, LOperand* key, LOperand* value) {
+  LStoreKeyed(LOperand* object, LOperand* key, LOperand* value) {
     inputs_[0] = object;
     inputs_[1] = key;
     inputs_[2] = value;
   }
 
-  LOperand* object() { return inputs_[0]; }
-  LOperand* key() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement,
-                               "store-keyed-fast-element")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastElement)
-
-  virtual void PrintDataTo(StringStream* stream);
-
-  uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
- public:
-  LStoreKeyedFastDoubleElement(LOperand* elements,
-                               LOperand* key,
-                               LOperand* value) {
-    inputs_[0] = elements;
-    inputs_[1] = key;
-    inputs_[2] = value;
-  }
-
+  bool is_external() const { return hydrogen()->is_external(); }
   LOperand* elements() { return inputs_[0]; }
   LOperand* key() { return inputs_[1]; }
   LOperand* value() { return inputs_[2]; }
+  ElementsKind elements_kind() const {
+    return hydrogen()->elements_kind();
+  }
 
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement,
-                               "store-keyed-fast-double-element")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastDoubleElement)
+  DECLARE_CONCRETE_INSTRUCTION(StoreKeyed, "store-keyed")
+  DECLARE_HYDROGEN_ACCESSOR(StoreKeyed)
 
   virtual void PrintDataTo(StringStream* stream);
-
-  uint32_t additional_index() const { return hydrogen()->index_offset(); }
-
   bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
+  uint32_t additional_index() const { return hydrogen()->index_offset(); }
 };
 
 
@@ -1971,28 +1910,6 @@
   StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
 };
 
-class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
- public:
-  LStoreKeyedSpecializedArrayElement(LOperand* external_pointer,
-                                     LOperand* key,
-                                     LOperand* value) {
-    inputs_[0] = external_pointer;
-    inputs_[1] = key;
-    inputs_[2] = value;
-  }
-
-  LOperand* external_pointer() { return inputs_[0]; }
-  LOperand* key() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement,
-                               "store-keyed-specialized-array-element")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedSpecializedArrayElement)
-
-  ElementsKind elements_kind() const { return hydrogen()->elements_kind(); }
-  uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
 
 class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
  public:
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 052387a..aebfe73 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -5272,7 +5272,7 @@
   // For ASCII (char-size of 1) we shift the smi tag away to get the length.
   // For UC16 (char-size of 2) we just leave the smi tag in place, thereby
   // getting the length multiplied by 2.
-  ASSERT(kAsciiStringTag == 4 && kStringEncodingMask == 4);
+  ASSERT(kOneByteStringTag == 4 && kStringEncodingMask == 4);
   ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   lw(t9, FieldMemOperand(value, String::kLengthOffset));
   And(t8, instance_type, Operand(kStringEncodingMask));
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index ba1d177..bd15775 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -3453,7 +3453,7 @@
   // t7: undefined
   __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
   __ Check(ne, "Function constructed by construct stub.",
-      a3, Operand(JS_FUNCTION_TYPE));
+           a3, Operand(JS_FUNCTION_TYPE));
 #endif
 
   // Now allocate the JSObject in new space.
@@ -3461,7 +3461,13 @@
   // a1: constructor function
   // a2: initial map
   // t7: undefined
+  ASSERT(function->has_initial_map());
   __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
+#ifdef DEBUG
+  int instance_size = function->initial_map()->instance_size();
+  __ Check(eq, "Instance size of initial map changed.",
+           a3, Operand(instance_size >> kPointerSizeLog2));
+#endif
   __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS);
 
   // Allocated the JSObject, now initialize the fields. Map is set to initial
@@ -3524,7 +3530,6 @@
   }
 
   // Fill the unused in-object property fields with undefined.
-  ASSERT(function->has_initial_map());
   for (int i = shared->this_property_assignments_count();
        i < function->initial_map()->inobject_properties();
        i++) {
diff --git a/src/object-observe.js b/src/object-observe.js
index dcf98d8..28aa1f4 100644
--- a/src/object-observe.js
+++ b/src/object-observe.js
@@ -30,21 +30,44 @@
 var InternalObjectIsFrozen = $Object.isFrozen;
 var InternalObjectFreeze = $Object.freeze;
 
-var InternalWeakMapProto = {
-  __proto__: null,
-  set: $WeakMap.prototype.set,
-  get: $WeakMap.prototype.get,
-  has: $WeakMap.prototype.has
+var observationState = %GetObservationState();
+if (IS_UNDEFINED(observationState.observerInfoMap)) {
+  observationState.observerInfoMap = %CreateObjectHashTable();
+  observationState.objectInfoMap = %CreateObjectHashTable();
+  observationState.notifierTargetMap = %CreateObjectHashTable();
+  observationState.activeObservers = new InternalArray;
+  observationState.observerPriority = 0;
 }
 
-function createInternalWeakMap() {
-  var map = new $WeakMap;
-  map.__proto__ = InternalWeakMapProto;
-  return map;
+function InternalObjectHashTable(tableName) {
+  this.tableName = tableName;
 }
 
-var observerInfoMap = createInternalWeakMap();
-var objectInfoMap = createInternalWeakMap();
+InternalObjectHashTable.prototype = {
+  get: function(key) {
+    return %ObjectHashTableGet(observationState[this.tableName], key);
+  },
+  set: function(key, value) {
+    observationState[this.tableName] =
+        %ObjectHashTableSet(observationState[this.tableName], key, value);
+  },
+  has: function(key) {
+    return %ObjectHashTableHas(observationState[this.tableName], key);
+  }
+};
+
+var observerInfoMap = new InternalObjectHashTable('observerInfoMap');
+var objectInfoMap = new InternalObjectHashTable('objectInfoMap');
+var notifierTargetMap = new InternalObjectHashTable('notifierTargetMap');
+
+function CreateObjectInfo(object) {
+  var info = {
+    changeObservers: new InternalArray,
+    notifier: null,
+  };
+  objectInfoMap.set(object, info);
+  return info;
+}
 
 function ObjectObserve(object, callback) {
   if (!IS_SPEC_OBJECT(object))
@@ -55,20 +78,16 @@
     throw MakeTypeError("observe_callback_frozen");
 
   if (!observerInfoMap.has(callback)) {
-    // TODO: setup observerInfo.priority.
     observerInfoMap.set(callback, {
-      pendingChangeRecords: null
+      pendingChangeRecords: null,
+      priority: observationState.observerPriority++,
     });
   }
 
   var objectInfo = objectInfoMap.get(object);
   if (IS_UNDEFINED(objectInfo)) {
-    // TODO: setup objectInfo.notifier
-    objectInfo = {
-      changeObservers: new InternalArray(callback)
-    };
-    objectInfoMap.set(object, objectInfo);
-    return;
+    objectInfo = CreateObjectInfo(object);
+    %SetIsObserved(object, true);
   }
 
   var changeObservers = objectInfo.changeObservers;
@@ -98,9 +117,8 @@
   for (var i = 0; i < observers.length; i++) {
     var observer = observers[i];
     var observerInfo = observerInfoMap.get(observer);
-
-    // TODO: "activate" the observer
-
+    observationState.activeObservers[observerInfo.priority] = observer;
+    %SetObserverDeliveryPending();
     if (IS_NULL(observerInfo.pendingChangeRecords)) {
       observerInfo.pendingChangeRecords = new InternalArray(changeRecord);
     } else {
@@ -109,17 +127,37 @@
   }
 }
 
-function ObjectNotify(object, changeRecord) {
-  // TODO: notifier needs to be [[THIS]]
+function NotifyChange(type, object, name, oldValue) {
+  var objectInfo = objectInfoMap.get(object);
+  var changeRecord = (arguments.length < 4) ?
+      { type: type, object: object, name: name } :
+      { type: type, object: object, name: name, oldValue: oldValue };
+  InternalObjectFreeze(changeRecord);
+  EnqueueChangeRecord(changeRecord, objectInfo.changeObservers);
+}
+
+var notifierPrototype = {};
+
+function ObjectNotifierNotify(changeRecord) {
+  if (!IS_SPEC_OBJECT(this))
+    throw MakeTypeError("called_on_non_object", ["notify"]);
+
+  var target = notifierTargetMap.get(this);
+  if (IS_UNDEFINED(target))
+    throw MakeTypeError("observe_notify_non_notifier");
+
   if (!IS_STRING(changeRecord.type))
     throw MakeTypeError("observe_type_non_string");
 
-  var objectInfo = objectInfoMap.get(object);
+  var objectInfo = objectInfoMap.get(target);
   if (IS_UNDEFINED(objectInfo))
     return;
 
+  if (!objectInfo.changeObservers.length)
+    return;
+
   var newRecord = {
-    object: object  // TODO: Needs to be 'object' retreived from notifier
+    object: target
   };
   for (var prop in changeRecord) {
     if (prop === 'object')
@@ -131,11 +169,29 @@
   EnqueueChangeRecord(newRecord, objectInfo.changeObservers);
 }
 
-function ObjectDeliverChangeRecords(callback) {
-  if (!IS_SPEC_FUNCTION(callback))
-    throw MakeTypeError("observe_non_function", ["deliverChangeRecords"]);
+function ObjectGetNotifier(object) {
+  if (!IS_SPEC_OBJECT(object))
+    throw MakeTypeError("observe_non_object", ["getNotifier"]);
 
-  var observerInfo = observerInfoMap.get(callback);
+  if (InternalObjectIsFrozen(object))
+    return null;
+
+  var objectInfo = objectInfoMap.get(object);
+  if (IS_UNDEFINED(objectInfo))
+    objectInfo = CreateObjectInfo(object);
+
+  if (IS_NULL(objectInfo.notifier)) {
+    objectInfo.notifier = {
+      __proto__: notifierPrototype
+    };
+    notifierTargetMap.set(objectInfo.notifier, object);
+  }
+
+  return objectInfo.notifier;
+}
+
+function DeliverChangeRecordsForObserver(observer) {
+  var observerInfo = observerInfoMap.get(observer);
   if (IS_UNDEFINED(observerInfo))
     return;
 
@@ -147,18 +203,38 @@
   var delivered = [];
   %MoveArrayContents(pendingChangeRecords, delivered);
   try {
-    %Call(void 0, delivered, callback);
+    %Call(void 0, delivered, observer);
   } catch (ex) {}
 }
 
+function ObjectDeliverChangeRecords(callback) {
+  if (!IS_SPEC_FUNCTION(callback))
+    throw MakeTypeError("observe_non_function", ["deliverChangeRecords"]);
+
+  DeliverChangeRecordsForObserver(callback);
+}
+
+function DeliverChangeRecords() {
+  while (observationState.activeObservers.length) {
+    var activeObservers = observationState.activeObservers;
+    observationState.activeObservers = new InternalArray;
+    for (var i in activeObservers) {
+      DeliverChangeRecordsForObserver(activeObservers[i]);
+    }
+  }
+}
+
 function SetupObjectObserve() {
   %CheckIsBootstrapping();
   InstallFunctions($Object, DONT_ENUM, $Array(
     "deliverChangeRecords", ObjectDeliverChangeRecords,
-    "notify", ObjectNotify,  // TODO: Remove when getNotifier is implemented.
+    "getNotifier", ObjectGetNotifier,
     "observe", ObjectObserve,
     "unobserve", ObjectUnobserve
   ));
+  InstallFunctions(notifierPrototype, DONT_ENUM, $Array(
+    "notify", ObjectNotifierNotify
+  ));
 }
 
-SetupObjectObserve();
\ No newline at end of file
+SetupObjectObserve();
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 4e7d8f8..d5815f2 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -289,7 +289,7 @@
 
 bool String::IsAsciiRepresentation() {
   uint32_t type = map()->instance_type();
-  return (type & kStringEncodingMask) == kAsciiStringTag;
+  return (type & kStringEncodingMask) == kOneByteStringTag;
 }
 
 
@@ -305,7 +305,7 @@
   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
   ASSERT(IsFlat());
   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
-    case kAsciiStringTag:
+    case kOneByteStringTag:
       return true;
     case kTwoByteStringTag:
       return false;
@@ -321,7 +321,7 @@
   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
   ASSERT(IsFlat());
   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
-    case kAsciiStringTag:
+    case kOneByteStringTag:
       return false;
     case kTwoByteStringTag:
       return true;
@@ -333,7 +333,7 @@
 
 bool String::HasOnlyAsciiChars() {
   uint32_t type = map()->instance_type();
-  return (type & kStringEncodingMask) == kAsciiStringTag ||
+  return (type & kStringEncodingMask) == kOneByteStringTag ||
          (type & kAsciiDataHintMask) == kAsciiDataHintTag;
 }
 
@@ -387,7 +387,7 @@
 
 
 bool StringShape::IsSequentialAscii() {
-  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
+  return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
 }
 
 
@@ -397,14 +397,14 @@
 
 
 bool StringShape::IsExternalAscii() {
-  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
+  return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
 }
 
 
-STATIC_CHECK((kExternalStringTag | kAsciiStringTag) ==
+STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
              Internals::kExternalAsciiRepresentationTag);
 
-STATIC_CHECK(v8::String::ASCII_ENCODING == kAsciiStringTag);
+STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
 
 
 bool StringShape::IsExternalTwoByte() {
@@ -718,6 +718,11 @@
 }
 
 
+bool Object::IsObjectHashTable() {
+  return IsHashTable();
+}
+
+
 bool Object::IsPrimitive() {
   return IsOddball() || IsNumber() || IsString();
 }
@@ -2445,18 +2450,18 @@
 uint16_t String::Get(int index) {
   ASSERT(index >= 0 && index < length());
   switch (StringShape(this).full_representation_tag()) {
-    case kSeqStringTag | kAsciiStringTag:
+    case kSeqStringTag | kOneByteStringTag:
       return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
     case kSeqStringTag | kTwoByteStringTag:
       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
-    case kConsStringTag | kAsciiStringTag:
+    case kConsStringTag | kOneByteStringTag:
     case kConsStringTag | kTwoByteStringTag:
       return ConsString::cast(this)->ConsStringGet(index);
-    case kExternalStringTag | kAsciiStringTag:
+    case kExternalStringTag | kOneByteStringTag:
       return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
     case kExternalStringTag | kTwoByteStringTag:
       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
-    case kSlicedStringTag | kAsciiStringTag:
+    case kSlicedStringTag | kOneByteStringTag:
     case kSlicedStringTag | kTwoByteStringTag:
       return SlicedString::cast(this)->SlicedStringGet(index);
     default:
@@ -3151,6 +3156,16 @@
 }
 
 
+void Map::set_is_observed(bool is_observed) {
+  set_bit_field3(IsObserved::update(bit_field3(), is_observed));
+}
+
+
+bool Map::is_observed() {
+  return IsObserved::decode(bit_field3());
+}
+
+
 void Code::set_flags(Code::Flags flags) {
   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
   // Make sure that all call stubs have an arguments count.
@@ -4413,42 +4428,6 @@
 }
 
 
-MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
-    Map* initial_map) {
-  Context* native_context = context()->native_context();
-  Object* array_function =
-      native_context->get(Context::ARRAY_FUNCTION_INDEX);
-  if (array_function->IsJSFunction() &&
-      this == JSFunction::cast(array_function)) {
-    // Replace all of the cached initial array maps in the native context with
-    // the appropriate transitioned elements kind maps.
-    Heap* heap = GetHeap();
-    MaybeObject* maybe_maps =
-        heap->AllocateFixedArrayWithHoles(kElementsKindCount);
-    FixedArray* maps;
-    if (!maybe_maps->To(&maps)) return maybe_maps;
-
-    Map* current_map = initial_map;
-    ElementsKind kind = current_map->elements_kind();
-    ASSERT(kind == GetInitialFastElementsKind());
-    maps->set(kind, current_map);
-    for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
-         i < kFastElementsKindCount; ++i) {
-      Map* new_map;
-      ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
-      MaybeObject* maybe_new_map =
-          current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
-      if (!maybe_new_map->To(&new_map)) return maybe_new_map;
-      maps->set(next_kind, new_map);
-      current_map = new_map;
-    }
-    native_context->set_js_array_maps(maps);
-  }
-  set_initial_map(initial_map);
-  return this;
-}
-
-
 bool JSFunction::has_initial_map() {
   return prototype_or_initial_map()->IsMap();
 }
@@ -5065,6 +5044,16 @@
   return GetPropertyAttributeWithReceiver(this, key);
 }
 
+
+PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
+  if (IsJSProxy()) {
+    return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
+  }
+  return JSObject::cast(this)->GetElementAttributeWithReceiver(
+      this, index, true);
+}
+
+
 // TODO(504): this may be useful in other places too where JSGlobalProxy
 // is used.
 Object* JSObject::BypassGlobalProxy() {
@@ -5089,7 +5078,26 @@
   if (IsJSProxy()) {
     return JSProxy::cast(this)->HasElementWithHandler(index);
   }
-  return JSObject::cast(this)->HasElementWithReceiver(this, index);
+  return JSObject::cast(this)->GetElementAttributeWithReceiver(
+      this, index, true) != ABSENT;
+}
+
+
+bool JSReceiver::HasLocalElement(uint32_t index) {
+  if (IsJSProxy()) {
+    return JSProxy::cast(this)->HasElementWithHandler(index);
+  }
+  return JSObject::cast(this)->GetElementAttributeWithReceiver(
+      this, index, false) != ABSENT;
+}
+
+
+PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
+  if (IsJSProxy()) {
+    return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
+  }
+  return JSObject::cast(this)->GetElementAttributeWithReceiver(
+      this, index, false);
 }
 
 
diff --git a/src/objects-visiting-inl.h b/src/objects-visiting-inl.h
index d698a8d..71635ca 100644
--- a/src/objects-visiting-inl.h
+++ b/src/objects-visiting-inl.h
@@ -225,6 +225,17 @@
 
 
 template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
+    Heap* heap, RelocInfo* rinfo) {
+  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+  Code* target = rinfo->code_age_stub();
+  ASSERT(target != NULL);
+  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+  StaticVisitor::MarkObject(heap, target);
+}
+
+
+template<typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
     Map* map, HeapObject* object) {
   FixedBodyVisitor<StaticVisitor,
@@ -276,6 +287,9 @@
   if (FLAG_cleanup_code_caches_at_gc) {
     code->ClearTypeFeedbackCells(heap);
   }
+  if (FLAG_age_code && !Serializer::enabled()) {
+    code->MakeOlder(heap->mark_compact_collector()->marking_parity());
+  }
   code->CodeIterateBody<StaticVisitor>(heap);
 }
 
@@ -449,8 +463,10 @@
   // by optimized version of function.
   MarkBit code_mark = Marking::MarkBitFrom(function->code());
   if (code_mark.Get()) {
-    if (!Marking::MarkBitFrom(shared_info).Get()) {
-      shared_info->set_code_age(0);
+    if (!FLAG_age_code) {
+      if (!Marking::MarkBitFrom(shared_info).Get()) {
+        shared_info->set_code_age(0);
+      }
     }
     return false;
   }
@@ -460,11 +476,16 @@
     return false;
   }
 
-  // We do not flush code for optimized functions.
+  // We do not (yet) flush code for optimized functions.
   if (function->code() != shared_info->code()) {
     return false;
   }
 
+  // Check age of optimized code.
+  if (FLAG_age_code && !function->code()->IsOld()) {
+    return false;
+  }
+
   return IsFlushable(heap, shared_info);
 }
 
@@ -506,20 +527,20 @@
     return false;
   }
 
-  // TODO(mstarzinger): The following will soon be replaced by a new way of
-  // aging code, that is based on an aging stub in the function prologue.
+  if (FLAG_age_code) {
+    return shared_info->code()->IsOld();
+  } else {
+    // How many collections newly compiled code object will survive before being
+    // flushed.
+    static const int kCodeAgeThreshold = 5;
 
-  // How many collections newly compiled code object will survive before being
-  // flushed.
-  static const int kCodeAgeThreshold = 5;
-
-  // Age this shared function info.
-  if (shared_info->code_age() < kCodeAgeThreshold) {
-    shared_info->set_code_age(shared_info->code_age() + 1);
-    return false;
+    // Age this shared function info.
+    if (shared_info->code_age() < kCodeAgeThreshold) {
+      shared_info->set_code_age(shared_info->code_age() + 1);
+      return false;
+    }
+    return true;
   }
-
-  return true;
 }
 
 
diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc
index a2dc43e..6ae4d7c 100644
--- a/src/objects-visiting.cc
+++ b/src/objects-visiting.cc
@@ -45,7 +45,7 @@
   if (instance_type < FIRST_NONSTRING_TYPE) {
     switch (instance_type & kStringRepresentationMask) {
       case kSeqStringTag:
-        if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
+        if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
           return kVisitSeqAsciiString;
         } else {
           return kVisitSeqTwoByteString;
diff --git a/src/objects-visiting.h b/src/objects-visiting.h
index 26d1b12..3937e25 100644
--- a/src/objects-visiting.h
+++ b/src/objects-visiting.h
@@ -391,13 +391,11 @@
   static inline void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo);
   static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo);
   static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo);
+  static inline void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo);
   static inline void VisitExternalReference(RelocInfo* rinfo) { }
   static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
 
   // TODO(mstarzinger): This should be made protected once refactoring is done.
-  static inline void VisitNativeContext(Map* map, HeapObject* object);
-
-  // TODO(mstarzinger): This should be made protected once refactoring is done.
   // Mark non-optimize code for functions inlined into the given optimized
   // code. This will prevent it from being flushed.
   static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
@@ -408,6 +406,7 @@
   static inline void VisitSharedFunctionInfo(Map* map, HeapObject* object);
   static inline void VisitJSFunction(Map* map, HeapObject* object);
   static inline void VisitJSRegExp(Map* map, HeapObject* object);
+  static inline void VisitNativeContext(Map* map, HeapObject* object);
 
   // Mark pointers in a Map and its TransitionArray together, possibly
   // treating transitions or back pointers weak.
diff --git a/src/objects.cc b/src/objects.cc
index d0f8ac2..fa5bfed 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1340,7 +1340,7 @@
         SlicedString::BodyDescriptor::IterateBody(this, v);
         break;
       case kExternalStringTag:
-        if ((type & kStringEncodingMask) == kAsciiStringTag) {
+        if ((type & kStringEncodingMask) == kOneByteStringTag) {
           reinterpret_cast<ExternalAsciiString*>(this)->
               ExternalAsciiStringIterateBody(v);
         } else {
@@ -1677,6 +1677,7 @@
   ASSERT(!IsJSGlobalProxy());
   Map* map_of_this = map();
   Heap* heap = GetHeap();
+  MaybeObject* result;
   if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
       !map_of_this->is_extensible()) {
     if (strict_mode == kNonStrictMode) {
@@ -1688,28 +1689,70 @@
                                  HandleVector(args, 1)));
     }
   }
+
   if (HasFastProperties()) {
     // Ensure the descriptor array does not get too big.
     if (map_of_this->NumberOfOwnDescriptors() <
         DescriptorArray::kMaxNumberOfDescriptors) {
       if (value->IsJSFunction()) {
-        return AddConstantFunctionProperty(name,
-                                           JSFunction::cast(value),
-                                           attributes);
+        result = AddConstantFunctionProperty(name,
+                                             JSFunction::cast(value),
+                                             attributes);
       } else {
-        return AddFastProperty(name, value, attributes, store_mode);
+        result = AddFastProperty(name, value, attributes, store_mode);
       }
     } else {
       // Normalize the object to prevent very large instance descriptors.
       // This eliminates unwanted N^2 allocation and lookup behavior.
       Object* obj;
-      { MaybeObject* maybe_obj =
-            NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
-        if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-      }
+      MaybeObject* maybe = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+      if (!maybe->To(&obj)) return maybe;
+      result = AddSlowProperty(name, value, attributes);
     }
+  } else {
+    result = AddSlowProperty(name, value, attributes);
   }
-  return AddSlowProperty(name, value, attributes);
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    EnqueueChangeRecord(handle(this), "new", handle(name),
+                        handle(heap->the_hole_value()));
+  }
+
+  return *hresult;
+}
+
+
+void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
+                                   const char* type_str,
+                                   Handle<String> name,
+                                   Handle<Object> old_value) {
+  Isolate* isolate = object->GetIsolate();
+  HandleScope scope;
+  Handle<String> type = isolate->factory()->LookupAsciiSymbol(type_str);
+  Handle<Object> args[] = { type, object, name, old_value };
+  bool threw;
+  Execution::Call(Handle<JSFunction>(isolate->observers_notify_change()),
+                  Handle<Object>(isolate->heap()->undefined_value()),
+                  old_value->IsTheHole() ? 3 : 4, args,
+                  &threw);
+  ASSERT(!threw);
+}
+
+
+void JSObject::DeliverChangeRecords(Isolate* isolate) {
+  ASSERT(isolate->observer_delivery_pending());
+  bool threw = false;
+  Execution::Call(
+      isolate->observers_deliver_changes(),
+      isolate->factory()->undefined_value(),
+      0,
+      NULL,
+      &threw);
+  ASSERT(!threw);
+  isolate->set_observer_delivery_pending(false);
 }
 
 
@@ -2733,12 +2776,14 @@
 
 
 MUST_USE_RESULT PropertyAttributes JSProxy::GetElementAttributeWithHandler(
-    JSReceiver* receiver,
+    JSReceiver* receiver_raw,
     uint32_t index) {
   Isolate* isolate = GetIsolate();
   HandleScope scope(isolate);
+  Handle<JSProxy> proxy(this);
+  Handle<JSReceiver> receiver(receiver_raw);
   Handle<String> name = isolate->factory()->Uint32ToString(index);
-  return GetPropertyAttributeWithHandler(receiver, *name);
+  return proxy->GetPropertyAttributeWithHandler(*receiver, *name);
 }
 
 
@@ -2802,7 +2847,7 @@
 }
 
 
-MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
+MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
                                             String* name_raw,
                                             Object* value_raw,
                                             PropertyAttributes attributes,
@@ -2829,7 +2874,7 @@
   if (IsAccessCheckNeeded()) {
     if (!heap->isolate()->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
       return SetPropertyWithFailedAccessCheck(
-          result, name_raw, value_raw, true, strict_mode);
+          lookup, name_raw, value_raw, true, strict_mode);
     }
   }
 
@@ -2838,7 +2883,7 @@
     if (proto->IsNull()) return value_raw;
     ASSERT(proto->IsJSGlobalObject());
     return JSObject::cast(proto)->SetPropertyForResult(
-        result, name_raw, value_raw, attributes, strict_mode, store_mode);
+        lookup, name_raw, value_raw, attributes, strict_mode, store_mode);
   }
 
   // From this point on everything needs to be handlified, because
@@ -2848,19 +2893,20 @@
   Handle<String> name(name_raw);
   Handle<Object> value(value_raw);
 
-  if (!result->IsProperty() && !self->IsJSContextExtensionObject()) {
+  if (!lookup->IsProperty() && !self->IsJSContextExtensionObject()) {
     bool done = false;
     MaybeObject* result_object = self->SetPropertyViaPrototypes(
         *name, *value, attributes, strict_mode, &done);
     if (done) return result_object;
   }
 
-  if (!result->IsFound()) {
+  if (!lookup->IsFound()) {
     // Neither properties nor transitions found.
     return self->AddProperty(
         *name, *value, attributes, strict_mode, store_mode);
   }
-  if (result->IsProperty() && result->IsReadOnly()) {
+
+  if (lookup->IsProperty() && lookup->IsReadOnly()) {
     if (strict_mode == kStrictMode) {
       Handle<Object> args[] = { name, self };
       return heap->isolate()->Throw(*heap->isolate()->factory()->NewTypeError(
@@ -2870,34 +2916,44 @@
     }
   }
 
+  Handle<Object> old_value(heap->the_hole_value());
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    old_value = handle(lookup->GetLazyValue());
+  }
+
   // This is a real property that is not read-only, or it is a
   // transition or null descriptor and there are no setters in the prototypes.
-  switch (result->type()) {
+  MaybeObject* result = *value;
+  switch (lookup->type()) {
     case NORMAL:
-      return self->SetNormalizedProperty(result, *value);
+      result = self->SetNormalizedProperty(lookup, *value);
+      break;
     case FIELD:
-      return self->FastPropertyAtPut(result->GetFieldIndex(), *value);
+      result = self->FastPropertyAtPut(lookup->GetFieldIndex(), *value);
+      break;
     case CONSTANT_FUNCTION:
       // Only replace the function if necessary.
-      if (*value == result->GetConstantFunction()) return *value;
+      if (*value == lookup->GetConstantFunction()) return *value;
       // Preserve the attributes of this existing property.
-      attributes = result->GetAttributes();
-      return self->ConvertDescriptorToField(*name, *value, attributes);
+      attributes = lookup->GetAttributes();
+      result = self->ConvertDescriptorToField(*name, *value, attributes);
+      break;
     case CALLBACKS: {
-      Object* callback_object = result->GetCallbackObject();
+      Object* callback_object = lookup->GetCallbackObject();
       return self->SetPropertyWithCallback(callback_object,
                                            *name,
                                            *value,
-                                           result->holder(),
+                                           lookup->holder(),
                                            strict_mode);
     }
     case INTERCEPTOR:
-      return self->SetPropertyWithInterceptor(*name,
-                                              *value,
-                                              attributes,
-                                              strict_mode);
+      result = self->SetPropertyWithInterceptor(*name,
+                                                *value,
+                                                attributes,
+                                                strict_mode);
+      break;
     case TRANSITION: {
-      Map* transition_map = result->GetTransitionTarget();
+      Map* transition_map = lookup->GetTransitionTarget();
       int descriptor = transition_map->LastAdded();
 
       DescriptorArray* descriptors = transition_map->instance_descriptors();
@@ -2906,37 +2962,55 @@
       if (details.type() == FIELD) {
         if (attributes == details.attributes()) {
           int field_index = descriptors->GetFieldIndex(descriptor);
-          return self->AddFastPropertyUsingMap(transition_map,
-                                               *name,
-                                               *value,
-                                               field_index);
+          result = self->AddFastPropertyUsingMap(transition_map,
+                                                 *name,
+                                                 *value,
+                                                 field_index);
+        } else {
+          result = self->ConvertDescriptorToField(*name, *value, attributes);
         }
-        return self->ConvertDescriptorToField(*name, *value, attributes);
       } else if (details.type() == CALLBACKS) {
-        return ConvertDescriptorToField(*name, *value, attributes);
-      }
+        result = self->ConvertDescriptorToField(*name, *value, attributes);
+      } else {
+        ASSERT(details.type() == CONSTANT_FUNCTION);
 
-      ASSERT(details.type() == CONSTANT_FUNCTION);
-
-      Object* constant_function = descriptors->GetValue(descriptor);
-      // If the same constant function is being added we can simply
-      // transition to the target map.
-      if (constant_function == *value) {
-        self->set_map(transition_map);
-        return constant_function;
+        Object* constant_function = descriptors->GetValue(descriptor);
+        if (constant_function == *value) {
+          // If the same constant function is being added we can simply
+          // transition to the target map.
+          self->set_map(transition_map);
+          result = constant_function;
+        } else {
+          // Otherwise, replace with a map transition to a new map with a FIELD,
+          // even if the value is a constant function.
+          result = self->ConvertTransitionToMapTransition(
+              lookup->GetTransitionIndex(), *name, *value, attributes);
+        }
       }
-      // Otherwise, replace with a map transition to a new map with a FIELD,
-      // even if the value is a constant function.
-      return ConvertTransitionToMapTransition(
-          result->GetTransitionIndex(), *name, *value, attributes);
+      break;
     }
     case HANDLER:
     case NONEXISTENT:
       UNREACHABLE();
-      return *value;
   }
-  UNREACHABLE();  // keep the compiler happy
-  return *value;
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    if (lookup->IsTransition()) {
+      EnqueueChangeRecord(self, "new", name, old_value);
+    } else {
+      LookupResult new_lookup(self->GetIsolate());
+      self->LocalLookup(*name, &new_lookup);
+      ASSERT(!new_lookup.GetLazyValue()->IsTheHole());
+      if (!new_lookup.GetLazyValue()->SameValue(*old_value)) {
+        EnqueueChangeRecord(self, "updated", name, old_value);
+      }
+    }
+  }
+
+  return *hresult;
 }
 
 
@@ -2962,22 +3036,22 @@
 
 
 MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
-    String* name,
-    Object* value,
+    String* name_raw,
+    Object* value_raw,
     PropertyAttributes attributes) {
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
   Isolate* isolate = GetIsolate();
-  LookupResult result(isolate);
-  LocalLookup(name, &result);
-  if (!result.IsFound()) map()->LookupTransition(this, name, &result);
+  LookupResult lookup(isolate);
+  LocalLookup(name_raw, &lookup);
+  if (!lookup.IsFound()) map()->LookupTransition(this, name_raw, &lookup);
   // Check access rights if needed.
   if (IsAccessCheckNeeded()) {
-    if (!isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
-      return SetPropertyWithFailedAccessCheck(&result,
-                                              name,
-                                              value,
+    if (!isolate->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
+      return SetPropertyWithFailedAccessCheck(&lookup,
+                                              name_raw,
+                                              value_raw,
                                               false,
                                               kNonStrictMode);
     }
@@ -2985,40 +3059,59 @@
 
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
-    if (proto->IsNull()) return value;
+    if (proto->IsNull()) return value_raw;
     ASSERT(proto->IsJSGlobalObject());
     return JSObject::cast(proto)->SetLocalPropertyIgnoreAttributes(
-        name,
-        value,
+        name_raw,
+        value_raw,
         attributes);
   }
 
   // Check for accessor in prototype chain removed here in clone.
-  if (!result.IsFound()) {
+  if (!lookup.IsFound()) {
     // Neither properties nor transitions found.
-    return AddProperty(name, value, attributes, kNonStrictMode);
+    return AddProperty(name_raw, value_raw, attributes, kNonStrictMode);
+  }
+
+  // From this point on everything needs to be handlified.
+  HandleScope scope(GetIsolate());
+  Handle<JSObject> self(this);
+  Handle<String> name(name_raw);
+  Handle<Object> value(value_raw);
+
+  Handle<Object> old_value(isolate->heap()->the_hole_value());
+  PropertyAttributes old_attributes = ABSENT;
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    old_value = handle(lookup.GetLazyValue());
+    old_attributes = lookup.GetAttributes();
   }
 
   // Check of IsReadOnly removed from here in clone.
-  switch (result.type()) {
+  MaybeObject* result = *value;
+  switch (lookup.type()) {
     case NORMAL: {
       PropertyDetails details = PropertyDetails(attributes, NORMAL);
-      return SetNormalizedProperty(name, value, details);
+      result = self->SetNormalizedProperty(*name, *value, details);
+      break;
     }
     case FIELD:
-      return FastPropertyAtPut(result.GetFieldIndex(), value);
+      result = self->FastPropertyAtPut(lookup.GetFieldIndex(), *value);
+      break;
     case CONSTANT_FUNCTION:
       // Only replace the function if necessary.
-      if (value == result.GetConstantFunction()) return value;
-      // Preserve the attributes of this existing property.
-      attributes = result.GetAttributes();
-      return ConvertDescriptorToField(name, value, attributes);
+      if (*value != lookup.GetConstantFunction()) {
+        // Preserve the attributes of this existing property.
+        attributes = lookup.GetAttributes();
+        result = self->ConvertDescriptorToField(*name, *value, attributes);
+      }
+      break;
     case CALLBACKS:
     case INTERCEPTOR:
       // Override callback in clone
-      return ConvertDescriptorToField(name, value, attributes);
+      result = self->ConvertDescriptorToField(*name, *value, attributes);
+      break;
     case TRANSITION: {
-      Map* transition_map = result.GetTransitionTarget();
+      Map* transition_map = lookup.GetTransitionTarget();
       int descriptor = transition_map->LastAdded();
 
       DescriptorArray* descriptors = transition_map->instance_descriptors();
@@ -3027,29 +3120,48 @@
       if (details.type() == FIELD) {
         if (attributes == details.attributes()) {
           int field_index = descriptors->GetFieldIndex(descriptor);
-          return AddFastPropertyUsingMap(transition_map,
-                                         name,
-                                         value,
-                                         field_index);
+          result = self->AddFastPropertyUsingMap(
+              transition_map, *name, *value, field_index);
+        } else {
+          result = self->ConvertDescriptorToField(*name, *value, attributes);
         }
-        return ConvertDescriptorToField(name, value, attributes);
       } else if (details.type() == CALLBACKS) {
-        return ConvertDescriptorToField(name, value, attributes);
+        result = self->ConvertDescriptorToField(*name, *value, attributes);
+      } else {
+        ASSERT(details.type() == CONSTANT_FUNCTION);
+
+        // Replace transition to CONSTANT FUNCTION with a map transition to a
+        // new map with a FIELD, even if the value is a function.
+        result = self->ConvertTransitionToMapTransition(
+            lookup.GetTransitionIndex(), *name, *value, attributes);
       }
-
-      ASSERT(details.type() == CONSTANT_FUNCTION);
-
-      // Replace transition to CONSTANT FUNCTION with a map transition to a new
-      // map with a FIELD, even if the value is a function.
-      return ConvertTransitionToMapTransition(
-          result.GetTransitionIndex(), name, value, attributes);
+      break;
     }
     case HANDLER:
     case NONEXISTENT:
       UNREACHABLE();
   }
-  UNREACHABLE();  // keep the compiler happy
-  return value;
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    if (lookup.IsTransition()) {
+      EnqueueChangeRecord(self, "new", name, old_value);
+    } else {
+      LookupResult new_lookup(isolate);
+      self->LocalLookup(*name, &new_lookup);
+      ASSERT(!new_lookup.GetLazyValue()->IsTheHole());
+      if (old_value->IsTheHole() ||
+          new_lookup.GetAttributes() != old_attributes) {
+        EnqueueChangeRecord(self, "reconfigured", name, old_value);
+      } else if (!new_lookup.GetLazyValue()->SameValue(*old_value)) {
+        EnqueueChangeRecord(self, "updated", name, old_value);
+      }
+    }
+  }
+
+  return *hresult;
 }
 
 
@@ -3130,42 +3242,43 @@
       String* key) {
   uint32_t index = 0;
   if (IsJSObject() && key->AsArrayIndex(&index)) {
-    return JSObject::cast(this)->HasElementWithReceiver(receiver, index)
-        ? NONE : ABSENT;
+    return JSObject::cast(this)->GetElementAttributeWithReceiver(
+        receiver, index, true);
   }
   // Named property.
-  LookupResult result(GetIsolate());
-  Lookup(key, &result);
-  return GetPropertyAttribute(receiver, &result, key, true);
+  LookupResult lookup(GetIsolate());
+  Lookup(key, &lookup);
+  return GetPropertyAttributeForResult(receiver, &lookup, key, true);
 }
 
 
-PropertyAttributes JSReceiver::GetPropertyAttribute(JSReceiver* receiver,
-                                                    LookupResult* result,
-                                                    String* name,
-                                                    bool continue_search) {
+PropertyAttributes JSReceiver::GetPropertyAttributeForResult(
+    JSReceiver* receiver,
+    LookupResult* lookup,
+    String* name,
+    bool continue_search) {
   // Check access rights if needed.
   if (IsAccessCheckNeeded()) {
     JSObject* this_obj = JSObject::cast(this);
     Heap* heap = GetHeap();
     if (!heap->isolate()->MayNamedAccess(this_obj, name, v8::ACCESS_HAS)) {
       return this_obj->GetPropertyAttributeWithFailedAccessCheck(
-          receiver, result, name, continue_search);
+          receiver, lookup, name, continue_search);
     }
   }
-  if (result->IsFound()) {
-    switch (result->type()) {
+  if (lookup->IsFound()) {
+    switch (lookup->type()) {
       case NORMAL:  // fall through
       case FIELD:
       case CONSTANT_FUNCTION:
       case CALLBACKS:
-        return result->GetAttributes();
+        return lookup->GetAttributes();
       case HANDLER: {
-        return JSProxy::cast(result->proxy())->GetPropertyAttributeWithHandler(
+        return JSProxy::cast(lookup->proxy())->GetPropertyAttributeWithHandler(
             receiver, name);
       }
       case INTERCEPTOR:
-        return result->holder()->GetPropertyAttributeWithInterceptor(
+        return lookup->holder()->GetPropertyAttributeWithInterceptor(
             JSObject::cast(receiver), name, continue_search);
       case TRANSITION:
       case NONEXISTENT:
@@ -3180,13 +3293,113 @@
   // Check whether the name is an array index.
   uint32_t index = 0;
   if (IsJSObject() && name->AsArrayIndex(&index)) {
-    if (JSObject::cast(this)->HasLocalElement(index)) return NONE;
-    return ABSENT;
+    return GetLocalElementAttribute(index);
   }
   // Named property.
-  LookupResult result(GetIsolate());
-  LocalLookup(name, &result);
-  return GetPropertyAttribute(this, &result, name, false);
+  LookupResult lookup(GetIsolate());
+  LocalLookup(name, &lookup);
+  return GetPropertyAttributeForResult(this, &lookup, name, false);
+}
+
+
+PropertyAttributes JSObject::GetElementAttributeWithReceiver(
+    JSReceiver* receiver, uint32_t index, bool continue_search) {
+  Isolate* isolate = GetIsolate();
+
+  // Check access rights if needed.
+  if (IsAccessCheckNeeded()) {
+    if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
+      isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+      return ABSENT;
+    }
+  }
+
+  if (IsJSGlobalProxy()) {
+    Object* proto = GetPrototype();
+    if (proto->IsNull()) return ABSENT;
+    ASSERT(proto->IsJSGlobalObject());
+    return JSObject::cast(proto)->GetElementAttributeWithReceiver(
+        receiver, index, continue_search);
+  }
+
+  // Check for lookup interceptor except when bootstrapping.
+  if (HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
+    return GetElementAttributeWithInterceptor(receiver, index, continue_search);
+  }
+
+  return GetElementAttributeWithoutInterceptor(
+      receiver, index, continue_search);
+}
+
+
+PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
+    JSReceiver* receiver, uint32_t index, bool continue_search) {
+  Isolate* isolate = GetIsolate();
+  // Make sure that the top context does not change when doing
+  // callbacks or interceptor calls.
+  AssertNoContextChange ncc;
+  HandleScope scope(isolate);
+  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
+  Handle<JSReceiver> hreceiver(receiver);
+  Handle<JSObject> holder(this);
+  CustomArguments args(isolate, interceptor->data(), receiver, this);
+  v8::AccessorInfo info(args.end());
+  if (!interceptor->query()->IsUndefined()) {
+    v8::IndexedPropertyQuery query =
+        v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
+    LOG(isolate,
+        ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
+    v8::Handle<v8::Integer> result;
+    {
+      // Leaving JavaScript.
+      VMState state(isolate, EXTERNAL);
+      result = query(index, info);
+    }
+    if (!result.IsEmpty())
+      return static_cast<PropertyAttributes>(result->Int32Value());
+  } else if (!interceptor->getter()->IsUndefined()) {
+    v8::IndexedPropertyGetter getter =
+        v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
+    LOG(isolate,
+        ApiIndexedPropertyAccess("interceptor-indexed-get-has", this, index));
+    v8::Handle<v8::Value> result;
+    {
+      // Leaving JavaScript.
+      VMState state(isolate, EXTERNAL);
+      result = getter(index, info);
+    }
+    if (!result.IsEmpty()) return DONT_ENUM;
+  }
+
+  return holder->GetElementAttributeWithoutInterceptor(
+      *hreceiver, index, continue_search);
+}
+
+
+PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
+      JSReceiver* receiver, uint32_t index, bool continue_search) {
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
+  Handle<JSReceiver> hreceiver(receiver);
+  Handle<JSObject> holder(this);
+  PropertyAttributes attr = holder->GetElementsAccessor()->GetAttributes(
+      *hreceiver, *holder, index);
+  if (attr != ABSENT) return attr;
+
+  if (holder->IsStringObjectWithCharacterAt(index)) {
+    return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
+  }
+
+  if (!continue_search) return ABSENT;
+
+  Object* pt = holder->GetPrototype();
+  if (pt->IsJSProxy()) {
+    // We need to follow the spec and simulate a call to [[GetOwnProperty]].
+    return JSProxy::cast(pt)->GetElementAttributeWithHandler(*hreceiver, index);
+  }
+  if (pt->IsNull()) return ABSENT;
+  return JSObject::cast(pt)->GetElementAttributeWithReceiver(
+      *hreceiver, index, true);
 }
 
 
@@ -3911,15 +4124,39 @@
     return JSGlobalObject::cast(proto)->DeleteElement(index, mode);
   }
 
-  if (HasIndexedInterceptor()) {
-    // Skip interceptor if forcing deletion.
-    if (mode != FORCE_DELETION) {
-      return DeleteElementWithInterceptor(index);
+  // From this point on everything needs to be handlified.
+  HandleScope scope(isolate);
+  Handle<JSObject> self(this);
+
+  Handle<String> name;
+  Handle<Object> old_value(isolate->heap()->the_hole_value());
+  bool preexists = false;
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    name = isolate->factory()->Uint32ToString(index);
+    preexists = self->HasLocalElement(index);
+    if (preexists) {
+      // TODO(observe): only read & set old_value if it's not an accessor
+      old_value = Object::GetElement(self, index);
     }
-    mode = JSReceiver::FORCE_DELETION;
   }
 
-  return GetElementsAccessor()->Delete(this, index, mode);
+  MaybeObject* result;
+  // Skip interceptor if forcing deletion.
+  if (self->HasIndexedInterceptor() && mode != FORCE_DELETION) {
+    result = self->DeleteElementWithInterceptor(index);
+  } else {
+    result = self->GetElementsAccessor()->Delete(*self, index, mode);
+  }
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    if (preexists && !self->HasLocalElement(index))
+      EnqueueChangeRecord(self, "deleted", name, old_value);
+  }
+
+  return *hresult;
 }
 
 
@@ -3953,38 +4190,60 @@
   uint32_t index = 0;
   if (name->AsArrayIndex(&index)) {
     return DeleteElement(index, mode);
+  }
+
+  LookupResult lookup(isolate);
+  LocalLookup(name, &lookup);
+  if (!lookup.IsFound()) return isolate->heap()->true_value();
+  // Ignore attributes if forcing a deletion.
+  if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
+    if (mode == STRICT_DELETION) {
+      // Deleting a non-configurable property in strict mode.
+      HandleScope scope(isolate);
+      Handle<Object> args[2] = { Handle<Object>(name), Handle<Object>(this) };
+      return isolate->Throw(*isolate->factory()->NewTypeError(
+          "strict_delete_property", HandleVector(args, 2)));
+    }
+    return isolate->heap()->false_value();
+  }
+
+  // From this point on everything needs to be handlified.
+  HandleScope scope(isolate);
+  Handle<JSObject> self(this);
+  Handle<String> hname(name);
+
+  Handle<Object> old_value(isolate->heap()->the_hole_value());
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    old_value = handle(lookup.GetLazyValue());
+  }
+  MaybeObject* result;
+
+  // Check for interceptor.
+  if (lookup.IsInterceptor()) {
+    // Skip interceptor if forcing a deletion.
+    if (mode == FORCE_DELETION) {
+      result = self->DeletePropertyPostInterceptor(*hname, mode);
+    } else {
+      result = self->DeletePropertyWithInterceptor(*hname);
+    }
   } else {
-    LookupResult result(isolate);
-    LocalLookup(name, &result);
-    if (!result.IsFound()) return isolate->heap()->true_value();
-    // Ignore attributes if forcing a deletion.
-    if (result.IsDontDelete() && mode != FORCE_DELETION) {
-      if (mode == STRICT_DELETION) {
-        // Deleting a non-configurable property in strict mode.
-        HandleScope scope(isolate);
-        Handle<Object> args[2] = { Handle<Object>(name), Handle<Object>(this) };
-        return isolate->Throw(*isolate->factory()->NewTypeError(
-            "strict_delete_property", HandleVector(args, 2)));
-      }
-      return isolate->heap()->false_value();
-    }
-    // Check for interceptor.
-    if (result.IsInterceptor()) {
-      // Skip interceptor if forcing a deletion.
-      if (mode == FORCE_DELETION) {
-        return DeletePropertyPostInterceptor(name, mode);
-      }
-      return DeletePropertyWithInterceptor(name);
-    }
     // Normalize object if needed.
     Object* obj;
-    { MaybeObject* maybe_obj =
-          NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
-      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-    }
+    result = self->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+    if (!result->To(&obj)) return result;
     // Make sure the properties are normalized before removing the entry.
-    return DeleteNormalizedProperty(name, mode);
+    result = self->DeleteNormalizedProperty(*hname, mode);
   }
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    if (!self->HasLocalProperty(*hname))
+      EnqueueChangeRecord(self, "deleted", hname, old_value);
+  }
+
+  return *hresult;
 }
 
 
@@ -4453,7 +4712,9 @@
   // to do a lookup, which seems to be a bit of overkill.
   Heap* heap = GetHeap();
   bool only_attribute_changes = getter->IsNull() && setter->IsNull();
-  if (HasFastProperties() && !only_attribute_changes) {
+  if (HasFastProperties() && !only_attribute_changes &&
+      (map()->NumberOfOwnDescriptors() <
+       DescriptorArray::kMaxNumberOfDescriptors)) {
     MaybeObject* getterOk = heap->undefined_value();
     if (!getter->IsNull()) {
       getterOk = DefineFastAccessor(name, ACCESSOR_GETTER, getter, attributes);
@@ -4583,14 +4844,14 @@
       object->DefineAccessor(*name, *getter, *setter, attributes));
 }
 
-MaybeObject* JSObject::DefineAccessor(String* name,
-                                      Object* getter,
-                                      Object* setter,
+MaybeObject* JSObject::DefineAccessor(String* name_raw,
+                                      Object* getter_raw,
+                                      Object* setter_raw,
                                       PropertyAttributes attributes) {
   Isolate* isolate = GetIsolate();
   // Check access rights if needed.
   if (IsAccessCheckNeeded() &&
-      !isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
+      !isolate->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
     isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
     return isolate->heap()->undefined_value();
   }
@@ -4600,7 +4861,7 @@
     if (proto->IsNull()) return this;
     ASSERT(proto->IsJSGlobalObject());
     return JSObject::cast(proto)->DefineAccessor(
-        name, getter, setter, attributes);
+        name_raw, getter_raw, setter_raw, attributes);
   }
 
   // Make sure that the top context does not change when doing callbacks or
@@ -4608,14 +4869,50 @@
   AssertNoContextChange ncc;
 
   // Try to flatten before operating on the string.
-  name->TryFlatten();
+  name_raw->TryFlatten();
 
-  if (!CanSetCallback(name)) return isolate->heap()->undefined_value();
+  if (!CanSetCallback(name_raw)) return isolate->heap()->undefined_value();
+
+  // From this point on everything needs to be handlified.
+  HandleScope scope(GetIsolate());
+  Handle<JSObject> self(this);
+  Handle<String> name(name_raw);
+  Handle<Object> getter(getter_raw);
+  Handle<Object> setter(setter_raw);
 
   uint32_t index = 0;
-  return name->AsArrayIndex(&index) ?
-      DefineElementAccessor(index, getter, setter, attributes) :
-      DefinePropertyAccessor(name, getter, setter, attributes);
+  bool is_element = name->AsArrayIndex(&index);
+
+  Handle<Object> old_value(isolate->heap()->the_hole_value());
+  bool preexists = false;
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    if (is_element) {
+      preexists = HasLocalElement(index);
+      if (preexists) {
+        // TODO(observe): distinguish the case where it's an accessor
+        old_value = Object::GetElement(self, index);
+      }
+    } else {
+      LookupResult lookup(isolate);
+      LocalLookup(*name, &lookup);
+      preexists = lookup.IsProperty();
+      if (preexists) old_value = handle(lookup.GetLazyValue());
+    }
+  }
+
+  MaybeObject* result = is_element ?
+    self->DefineElementAccessor(index, *getter, *setter, attributes) :
+    self->DefinePropertyAccessor(*name, *getter, *setter, attributes);
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    const char* type = preexists ? "reconfigured" : "new";
+    EnqueueChangeRecord(self, type, name, old_value);
+  }
+
+  return *hresult;
 }
 
 
@@ -4695,7 +4992,8 @@
     if (result.IsFound()) {
       Map* target = result.GetTransitionTarget();
       int descriptor_number = target->LastAdded();
-      ASSERT(target->instance_descriptors()->GetKey(descriptor_number) == name);
+      ASSERT(target->instance_descriptors()->GetKey(descriptor_number)
+             ->Equals(name));
       return TryAccessorTransition(
           this, target, descriptor_number, component, accessor, attributes);
     }
@@ -6208,7 +6506,7 @@
     ASSERT(shape.representation_tag() != kConsStringTag &&
            shape.representation_tag() != kSlicedStringTag);
   }
-  if (shape.encoding_tag() == kAsciiStringTag) {
+  if (shape.encoding_tag() == kOneByteStringTag) {
     const char* start;
     if (shape.representation_tag() == kSeqStringTag) {
       start = SeqAsciiString::cast(string)->GetChars();
@@ -6897,7 +7195,7 @@
   while (true) {
     ASSERT(0 <= from && from <= to && to <= source->length());
     switch (StringShape(source).full_representation_tag()) {
-      case kAsciiStringTag | kExternalStringTag: {
+      case kOneByteStringTag | kExternalStringTag: {
         CopyChars(sink,
                   ExternalAsciiString::cast(source)->GetChars() + from,
                   to - from);
@@ -6911,7 +7209,7 @@
                   to - from);
         return;
       }
-      case kAsciiStringTag | kSeqStringTag: {
+      case kOneByteStringTag | kSeqStringTag: {
         CopyChars(sink,
                   SeqAsciiString::cast(source)->GetChars() + from,
                   to - from);
@@ -6923,7 +7221,7 @@
                   to - from);
         return;
       }
-      case kAsciiStringTag | kConsStringTag:
+      case kOneByteStringTag | kConsStringTag:
       case kTwoByteStringTag | kConsStringTag: {
         ConsString* cons_string = ConsString::cast(source);
         String* first = cons_string->first();
@@ -6964,7 +7262,7 @@
         }
         break;
       }
-      case kAsciiStringTag | kSlicedStringTag:
+      case kOneByteStringTag | kSlicedStringTag:
       case kTwoByteStringTag | kSlicedStringTag: {
         SlicedString* slice = SlicedString::cast(source);
         unsigned offset = slice->offset();
@@ -7517,6 +7815,7 @@
     instance_type() == other->instance_type() &&
     bit_field() == other->bit_field() &&
     bit_field2() == other->bit_field2() &&
+    is_observed() == other->is_observed() &&
     function_with_prototype() == other->function_with_prototype();
 }
 
@@ -7635,6 +7934,7 @@
   ASSERT(code != NULL);
   ASSERT(function->context()->native_context() == code_map->get(index - 1));
   function->ReplaceCode(code);
+  code->MakeYoung();
 }
 
 
@@ -7697,6 +7997,35 @@
 }
 
 
+MUST_USE_RESULT static MaybeObject* CacheInitialJSArrayMaps(
+    Context* native_context, Map* initial_map) {
+  // Replace all of the cached initial array maps in the native context with
+  // the appropriate transitioned elements kind maps.
+  Heap* heap = native_context->GetHeap();
+  MaybeObject* maybe_maps =
+      heap->AllocateFixedArrayWithHoles(kElementsKindCount);
+  FixedArray* maps;
+  if (!maybe_maps->To(&maps)) return maybe_maps;
+
+  Map* current_map = initial_map;
+  ElementsKind kind = current_map->elements_kind();
+  ASSERT(kind == GetInitialFastElementsKind());
+  maps->set(kind, current_map);
+  for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
+       i < kFastElementsKindCount; ++i) {
+    Map* new_map;
+    ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
+    MaybeObject* maybe_new_map =
+        current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
+    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+    maps->set(next_kind, new_map);
+    current_map = new_map;
+  }
+  native_context->set_js_array_maps(maps);
+  return initial_map;
+}
+
+
 MaybeObject* JSFunction::SetInstancePrototype(Object* value) {
   ASSERT(value->IsJSReceiver());
   Heap* heap = GetHeap();
@@ -7711,14 +8040,29 @@
   // Now some logic for the maps of the objects that are created by using this
   // function as a constructor.
   if (has_initial_map()) {
-    // If the function has allocated the initial map
-    // replace it with a copy containing the new prototype.
+    // If the function has allocated the initial map replace it with a
+    // copy containing the new prototype.  Also complete any in-object
+    // slack tracking that is in progress at this point because it is
+    // still tracking the old copy.
+    if (shared()->IsInobjectSlackTrackingInProgress()) {
+      shared()->CompleteInobjectSlackTracking();
+    }
     Map* new_map;
-    MaybeObject* maybe_new_map = initial_map()->Copy();
-    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+    MaybeObject* maybe_object = initial_map()->Copy();
+    if (!maybe_object->To(&new_map)) return maybe_object;
     new_map->set_prototype(value);
-    MaybeObject* maybe_object = set_initial_map_and_cache_transitions(new_map);
-    if (maybe_object->IsFailure()) return maybe_object;
+
+    // If the function is used as the global Array function, cache the
+    // initial map (and transitioned versions) in the native context.
+    Context* native_context = context()->native_context();
+    Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
+    if (array_function->IsJSFunction() &&
+        this == JSFunction::cast(array_function)) {
+      MaybeObject* ok = CacheInitialJSArrayMaps(native_context, new_map);
+      if (ok->IsFailure()) return ok;
+    }
+
+    set_initial_map(new_map);
   } else {
     // Put the value in the initial map field until an initial map is
     // needed.  At that point, a new initial map is created and the
@@ -8231,6 +8575,15 @@
 }
 
 
+void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
+  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+  Object* stub = rinfo->code_age_stub();
+  if (stub) {
+    VisitPointer(&stub);
+  }
+}
+
+
 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
   Object* code = Code::GetObjectFromEntryAddress(entry_address);
   Object* old_code = code;
@@ -8443,6 +8796,99 @@
 }
 
 
+void Code::MakeCodeAgeSequenceYoung(byte* sequence) {
+  PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
+}
+
+
+void Code::MakeYoung() {
+  byte* sequence = FindCodeAgeSequence();
+  if (sequence != NULL) {
+    PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
+  }
+}
+
+
+void Code::MakeOlder(MarkingParity current_parity) {
+  byte* sequence = FindCodeAgeSequence();
+  if (sequence != NULL) {
+    Age age;
+    MarkingParity code_parity;
+    GetCodeAgeAndParity(sequence, &age, &code_parity);
+    if (age != kLastCodeAge && code_parity != current_parity) {
+      PatchPlatformCodeAge(sequence, static_cast<Age>(age + 1),
+                           current_parity);
+    }
+  }
+}
+
+
+bool Code::IsOld() {
+  byte* sequence = FindCodeAgeSequence();
+  if (sequence == NULL) return false;
+  Age age;
+  MarkingParity parity;
+  GetCodeAgeAndParity(sequence, &age, &parity);
+  return age >= kSexagenarianCodeAge;
+}
+
+
+byte* Code::FindCodeAgeSequence() {
+  return FLAG_age_code &&
+      strlen(FLAG_stop_at) == 0 &&
+      !ProfileEntryHookStub::HasEntryHook() &&
+      (kind() == OPTIMIZED_FUNCTION ||
+       (kind() == FUNCTION && !has_debug_break_slots()))
+      ? FindPlatformCodeAgeSequence()
+      : NULL;
+}
+
+
+void Code::GetCodeAgeAndParity(Code* code, Age* age,
+                               MarkingParity* parity) {
+  Isolate* isolate = Isolate::Current();
+  Builtins* builtins = isolate->builtins();
+  Code* stub = NULL;
+#define HANDLE_CODE_AGE(AGE)                                            \
+  stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking();             \
+  if (code == stub) {                                                   \
+    *age = k##AGE##CodeAge;                                             \
+    *parity = EVEN_MARKING_PARITY;                                      \
+    return;                                                             \
+  }                                                                     \
+  stub = *builtins->Make##AGE##CodeYoungAgainOddMarking();              \
+  if (code == stub) {                                                   \
+    *age = k##AGE##CodeAge;                                             \
+    *parity = ODD_MARKING_PARITY;                                       \
+    return;                                                             \
+  }
+  CODE_AGE_LIST(HANDLE_CODE_AGE)
+#undef HANDLE_CODE_AGE
+  UNREACHABLE();
+}
+
+
+Code* Code::GetCodeAgeStub(Age age, MarkingParity parity) {
+  Isolate* isolate = Isolate::Current();
+  Builtins* builtins = isolate->builtins();
+  switch (age) {
+#define HANDLE_CODE_AGE(AGE)                                            \
+    case k##AGE##CodeAge: {                                             \
+      Code* stub = parity == EVEN_MARKING_PARITY                        \
+          ? *builtins->Make##AGE##CodeYoungAgainEvenMarking()           \
+          : *builtins->Make##AGE##CodeYoungAgainOddMarking();           \
+      return stub;                                                      \
+    }
+    CODE_AGE_LIST(HANDLE_CODE_AGE)
+#undef HANDLE_CODE_AGE
+    default:
+      UNREACHABLE();
+      break;
+  }
+  return NULL;
+}
+
+
 #ifdef ENABLE_DISASSEMBLER
 
 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
@@ -9106,64 +9552,7 @@
 }
 
 
-bool JSObject::HasElementWithInterceptor(JSReceiver* receiver, uint32_t index) {
-  Isolate* isolate = GetIsolate();
-  // Make sure that the top context does not change when doing
-  // callbacks or interceptor calls.
-  AssertNoContextChange ncc;
-  HandleScope scope(isolate);
-  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
-  Handle<JSReceiver> receiver_handle(receiver);
-  Handle<JSObject> holder_handle(this);
-  CustomArguments args(isolate, interceptor->data(), receiver, this);
-  v8::AccessorInfo info(args.end());
-  if (!interceptor->query()->IsUndefined()) {
-    v8::IndexedPropertyQuery query =
-        v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
-    LOG(isolate,
-        ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
-    v8::Handle<v8::Integer> result;
-    {
-      // Leaving JavaScript.
-      VMState state(isolate, EXTERNAL);
-      result = query(index, info);
-    }
-    if (!result.IsEmpty()) {
-      ASSERT(result->IsInt32());
-      return true;  // absence of property is signaled by empty handle.
-    }
-  } else if (!interceptor->getter()->IsUndefined()) {
-    v8::IndexedPropertyGetter getter =
-        v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
-    LOG(isolate,
-        ApiIndexedPropertyAccess("interceptor-indexed-has-get", this, index));
-    v8::Handle<v8::Value> result;
-    {
-      // Leaving JavaScript.
-      VMState state(isolate, EXTERNAL);
-      result = getter(index, info);
-    }
-    if (!result.IsEmpty()) return true;
-  }
-
-  if (holder_handle->GetElementsAccessor()->HasElement(
-          *receiver_handle, *holder_handle, index)) {
-    return true;
-  }
-
-  if (holder_handle->IsStringObjectWithCharacterAt(index)) return true;
-  Object* pt = holder_handle->GetPrototype();
-  if (pt->IsJSProxy()) {
-    // We need to follow the spec and simulate a call to [[GetOwnProperty]].
-    return JSProxy::cast(pt)->GetElementAttributeWithHandler(
-        receiver, index) != ABSENT;
-  }
-  if (pt->IsNull()) return false;
-  return JSObject::cast(pt)->HasElementWithReceiver(*receiver_handle, index);
-}
-
-
-JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
+JSObject::LocalElementType JSObject::GetLocalElementType(uint32_t index) {
   // Check access rights if needed.
   if (IsAccessCheckNeeded()) {
     Heap* heap = GetHeap();
@@ -9177,13 +9566,13 @@
     Object* proto = GetPrototype();
     if (proto->IsNull()) return UNDEFINED_ELEMENT;
     ASSERT(proto->IsJSGlobalObject());
-    return JSObject::cast(proto)->HasLocalElement(index);
+    return JSObject::cast(proto)->GetLocalElementType(index);
   }
 
   // Check for lookup interceptor
   if (HasIndexedInterceptor()) {
-    return HasElementWithInterceptor(this, index) ? INTERCEPTED_ELEMENT
-                                                  : UNDEFINED_ELEMENT;
+    return GetElementAttributeWithInterceptor(this, index, false) != ABSENT
+        ? INTERCEPTED_ELEMENT : UNDEFINED_ELEMENT;
   }
 
   // Handle [] on String objects.
@@ -9272,40 +9661,6 @@
 }
 
 
-bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
-  // Check access rights if needed.
-  if (IsAccessCheckNeeded()) {
-    Heap* heap = GetHeap();
-    if (!heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
-      heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
-      return false;
-    }
-  }
-
-  // Check for lookup interceptor
-  if (HasIndexedInterceptor()) {
-    return HasElementWithInterceptor(receiver, index);
-  }
-
-  ElementsAccessor* accessor = GetElementsAccessor();
-  if (accessor->HasElement(receiver, this, index)) {
-    return true;
-  }
-
-  // Handle [] on String objects.
-  if (this->IsStringObjectWithCharacterAt(index)) return true;
-
-  Object* pt = GetPrototype();
-  if (pt->IsNull()) return false;
-  if (pt->IsJSProxy()) {
-    // We need to follow the spec and simulate a call to [[GetOwnProperty]].
-    return JSProxy::cast(pt)->GetElementAttributeWithHandler(
-        receiver, index) != ABSENT;
-  }
-  return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
-}
-
-
 MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
                                                  Object* value,
                                                  PropertyAttributes attributes,
@@ -9903,28 +10258,31 @@
 
 
 MaybeObject* JSObject::SetElement(uint32_t index,
-                                  Object* value,
+                                  Object* value_raw,
                                   PropertyAttributes attributes,
                                   StrictModeFlag strict_mode,
                                   bool check_prototype,
                                   SetPropertyMode set_mode) {
+  Isolate* isolate = GetIsolate();
+  HandleScope scope(isolate);
+  Handle<JSObject> self(this);
+  Handle<Object> value(value_raw);
+
   // Check access rights if needed.
   if (IsAccessCheckNeeded()) {
     Heap* heap = GetHeap();
-    if (!heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_SET)) {
-      HandleScope scope(heap->isolate());
-      Handle<Object> value_handle(value);
-      heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_SET);
-      return *value_handle;
+    if (!heap->isolate()->MayIndexedAccess(*self, index, v8::ACCESS_SET)) {
+      heap->isolate()->ReportFailedAccessCheck(*self, v8::ACCESS_SET);
+      return *value;
     }
   }
 
   if (IsJSGlobalProxy()) {
     Object* proto = GetPrototype();
-    if (proto->IsNull()) return value;
+    if (proto->IsNull()) return *value;
     ASSERT(proto->IsJSGlobalObject());
     return JSObject::cast(proto)->SetElement(index,
-                                             value,
+                                             *value,
                                              attributes,
                                              strict_mode,
                                              check_prototype,
@@ -9933,10 +10291,8 @@
 
   // Don't allow element properties to be redefined for external arrays.
   if (HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) {
-    Isolate* isolate = GetHeap()->isolate();
-    Handle<Object> receiver(this);
     Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
-    Handle<Object> args[] = { receiver, number };
+    Handle<Object> args[] = { self, number };
     Handle<Object> error = isolate->factory()->NewTypeError(
         "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
     return isolate->Throw(*error);
@@ -9951,22 +10307,55 @@
     dictionary->set_requires_slow_elements();
   }
 
-  // Check for lookup interceptor
-  if (HasIndexedInterceptor()) {
-    return SetElementWithInterceptor(index,
-                                     value,
-                                     attributes,
-                                     strict_mode,
-                                     check_prototype,
-                                     set_mode);
+  // From here on, everything has to be handlified.
+  Handle<String> name;
+  Handle<Object> old_value(isolate->heap()->the_hole_value());
+  Handle<Object> old_array_length;
+  PropertyAttributes old_attributes = ABSENT;
+  bool preexists = false;
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    name = isolate->factory()->Uint32ToString(index);
+    preexists = self->HasLocalElement(index);
+    if (preexists) {
+      old_attributes = self->GetLocalPropertyAttribute(*name);
+      // TODO(observe): only read & set old_value if we have a data property
+      old_value = Object::GetElement(self, index);
+    } else if (self->IsJSArray()) {
+      // Store old array length in case adding an element grows the array.
+      old_array_length = handle(Handle<JSArray>::cast(self)->length());
+    }
   }
 
-  return SetElementWithoutInterceptor(index,
-                                      value,
-                                      attributes,
-                                      strict_mode,
-                                      check_prototype,
-                                      set_mode);
+  // Check for lookup interceptor
+  MaybeObject* result = self->HasIndexedInterceptor()
+    ? self->SetElementWithInterceptor(
+        index, *value, attributes, strict_mode, check_prototype, set_mode)
+    : self->SetElementWithoutInterceptor(
+        index, *value, attributes, strict_mode, check_prototype, set_mode);
+
+  Handle<Object> hresult;
+  if (!result->ToHandle(&hresult)) return result;
+
+  if (FLAG_harmony_observation && map()->is_observed()) {
+    PropertyAttributes new_attributes = self->GetLocalPropertyAttribute(*name);
+    if (!preexists) {
+      EnqueueChangeRecord(self, "new", name, old_value);
+      if (self->IsJSArray() &&
+          !old_array_length->SameValue(Handle<JSArray>::cast(self)->length())) {
+        EnqueueChangeRecord(self, "updated",
+                            isolate->factory()->length_symbol(),
+                            old_array_length);
+      }
+    } else if (new_attributes != old_attributes || old_value->IsTheHole()) {
+      EnqueueChangeRecord(self, "reconfigured", name, old_value);
+    } else {
+      Handle<Object> new_value = Object::GetElement(self, index);
+      if (!new_value->SameValue(*old_value))
+        EnqueueChangeRecord(self, "updated", name, old_value);
+    }
+  }
+
+  return *hresult;
 }
 
 
diff --git a/src/objects.h b/src/objects.h
index 0d1a69c..c86c0b2 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -194,6 +194,18 @@
   OWN_DESCRIPTORS
 };
 
+// The GC maintains a bit of information, the MarkingParity, which toggles
+// from odd to even and back every time marking is completed. Incremental
+// marking can visit an object twice during a marking phase, so algorithms that
+// that piggy-back on marking can use the parity to ensure that they only
+// perform an operation on an object once per marking phase: they record the
+// MarkingParity when they visit an object, and only re-visit the object when it
+// is marked again and the MarkingParity changes.
+enum MarkingParity {
+  NO_MARKING_PARITY,
+  ODD_MARKING_PARITY,
+  EVEN_MARKING_PARITY
+};
 
 // Instance size sentinel for objects of variable size.
 const int kVariableSizeSentinel = 0;
@@ -467,7 +479,7 @@
 // two-byte characters or one-byte characters.
 const uint32_t kStringEncodingMask = 0x4;
 const uint32_t kTwoByteStringTag = 0x0;
-const uint32_t kAsciiStringTag = 0x4;
+const uint32_t kOneByteStringTag = 0x4;
 
 // If bit 7 is clear, the low-order 2 bits indicate the representation
 // of the string.
@@ -518,39 +530,39 @@
 enum InstanceType {
   // String types.
   SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kSeqStringTag,
-  ASCII_SYMBOL_TYPE = kAsciiStringTag | kSymbolTag | kSeqStringTag,
+  ASCII_SYMBOL_TYPE = kOneByteStringTag | kSymbolTag | kSeqStringTag,
   CONS_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kConsStringTag,
-  CONS_ASCII_SYMBOL_TYPE = kAsciiStringTag | kSymbolTag | kConsStringTag,
+  CONS_ASCII_SYMBOL_TYPE = kOneByteStringTag | kSymbolTag | kConsStringTag,
   SHORT_EXTERNAL_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag |
                                kExternalStringTag | kShortExternalStringTag,
   SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE =
       kTwoByteStringTag | kSymbolTag | kExternalStringTag |
       kAsciiDataHintTag | kShortExternalStringTag,
-  SHORT_EXTERNAL_ASCII_SYMBOL_TYPE = kAsciiStringTag | kExternalStringTag |
+  SHORT_EXTERNAL_ASCII_SYMBOL_TYPE = kOneByteStringTag | kExternalStringTag |
                                      kSymbolTag | kShortExternalStringTag,
   EXTERNAL_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kExternalStringTag,
   EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE =
       kTwoByteStringTag | kSymbolTag | kExternalStringTag | kAsciiDataHintTag,
   EXTERNAL_ASCII_SYMBOL_TYPE =
-      kAsciiStringTag | kSymbolTag | kExternalStringTag,
+      kOneByteStringTag | kSymbolTag | kExternalStringTag,
   STRING_TYPE = kTwoByteStringTag | kSeqStringTag,
-  ASCII_STRING_TYPE = kAsciiStringTag | kSeqStringTag,
+  ASCII_STRING_TYPE = kOneByteStringTag | kSeqStringTag,
   CONS_STRING_TYPE = kTwoByteStringTag | kConsStringTag,
-  CONS_ASCII_STRING_TYPE = kAsciiStringTag | kConsStringTag,
+  CONS_ASCII_STRING_TYPE = kOneByteStringTag | kConsStringTag,
   SLICED_STRING_TYPE = kTwoByteStringTag | kSlicedStringTag,
-  SLICED_ASCII_STRING_TYPE = kAsciiStringTag | kSlicedStringTag,
+  SLICED_ASCII_STRING_TYPE = kOneByteStringTag | kSlicedStringTag,
   SHORT_EXTERNAL_STRING_TYPE =
       kTwoByteStringTag | kExternalStringTag | kShortExternalStringTag,
   SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
       kTwoByteStringTag | kExternalStringTag |
       kAsciiDataHintTag | kShortExternalStringTag,
   SHORT_EXTERNAL_ASCII_STRING_TYPE =
-      kAsciiStringTag | kExternalStringTag | kShortExternalStringTag,
+      kOneByteStringTag | kExternalStringTag | kShortExternalStringTag,
   EXTERNAL_STRING_TYPE = kTwoByteStringTag | kExternalStringTag,
   EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
       kTwoByteStringTag | kExternalStringTag | kAsciiDataHintTag,
   // LAST_STRING_TYPE
-  EXTERNAL_ASCII_STRING_TYPE = kAsciiStringTag | kExternalStringTag,
+  EXTERNAL_ASCII_STRING_TYPE = kOneByteStringTag | kExternalStringTag,
   PRIVATE_EXTERNAL_ASCII_STRING_TYPE = EXTERNAL_ASCII_STRING_TYPE,
 
   // Objects allocated in their own spaces (never in new space).
@@ -769,6 +781,13 @@
     return true;
   }
 
+  template<typename T>
+  inline bool ToHandle(Handle<T>* obj) {
+    if (IsFailure()) return false;
+    *obj = handle(T::cast(reinterpret_cast<Object*>(this)));
+    return true;
+  }
+
 #ifdef OBJECT_PRINT
   // Prints this object with details.
   inline void Print() {
@@ -866,6 +885,7 @@
   V(UndetectableObject)                        \
   V(AccessCheckNeeded)                         \
   V(JSGlobalPropertyCell)                      \
+  V(ObjectHashTable)                           \
 
 
 class JSReceiver;
@@ -1462,10 +1482,14 @@
                                                       String* name);
   PropertyAttributes GetLocalPropertyAttribute(String* name);
 
+  inline PropertyAttributes GetElementAttribute(uint32_t index);
+  inline PropertyAttributes GetLocalElementAttribute(uint32_t index);
+
   // Can cause a GC.
   inline bool HasProperty(String* name);
   inline bool HasLocalProperty(String* name);
   inline bool HasElement(uint32_t index);
+  inline bool HasLocalElement(uint32_t index);
 
   // Return the object's prototype (might be Heap::null_value()).
   inline Object* GetPrototype();
@@ -1490,10 +1514,10 @@
   Smi* GenerateIdentityHash();
 
  private:
-  PropertyAttributes GetPropertyAttribute(JSReceiver* receiver,
-                                          LookupResult* result,
-                                          String* name,
-                                          bool continue_search);
+  PropertyAttributes GetPropertyAttributeForResult(JSReceiver* receiver,
+                                                   LookupResult* result,
+                                                   String* name,
+                                                   bool continue_search);
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSReceiver);
 };
@@ -1680,12 +1704,16 @@
       LookupResult* result,
       String* name,
       bool continue_search);
+  PropertyAttributes GetElementAttributeWithReceiver(JSReceiver* receiver,
+                                                     uint32_t index,
+                                                     bool continue_search);
 
   static void DefineAccessor(Handle<JSObject> object,
                              Handle<String> name,
                              Handle<Object> getter,
                              Handle<Object> setter,
                              PropertyAttributes attributes);
+  // Can cause GC.
   MUST_USE_RESULT MaybeObject* DefineAccessor(String* name,
                                               Object* getter,
                                               Object* setter,
@@ -1761,6 +1789,7 @@
 
   static Handle<Object> DeleteProperty(Handle<JSObject> obj,
                                        Handle<String> name);
+  // Can cause GC.
   MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode);
 
   static Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
@@ -1799,9 +1828,6 @@
   // be represented as a double and not a Smi.
   bool ShouldConvertToFastDoubleElements(bool* has_smi_only_elements);
 
-  // Tells whether the index'th element is present.
-  bool HasElementWithReceiver(JSReceiver* receiver, uint32_t index);
-
   // Computes the new capacity when expanding the elements of a JSObject.
   static int NewElementsCapacity(int old_capacity) {
     // (old_capacity + 50%) + 16
@@ -1826,9 +1852,7 @@
     DICTIONARY_ELEMENT
   };
 
-  LocalElementType HasLocalElement(uint32_t index);
-
-  bool HasElementWithInterceptor(JSReceiver* receiver, uint32_t index);
+  LocalElementType GetLocalElementType(uint32_t index);
 
   MUST_USE_RESULT MaybeObject* SetFastElement(uint32_t index,
                                               Object* value,
@@ -2008,7 +2032,7 @@
                                                Object* value,
                                                PropertyAttributes attributes);
 
-  // Add a property to an object.
+  // Add a property to an object. May cause GC.
   MUST_USE_RESULT MaybeObject* AddProperty(
       String* name,
       Object* value,
@@ -2179,6 +2203,15 @@
     static inline int SizeOf(Map* map, HeapObject* object);
   };
 
+  // Enqueue change record for Object.observe. May cause GC.
+  static void EnqueueChangeRecord(Handle<JSObject> object,
+                                  const char* type,
+                                  Handle<String> name,
+                                  Handle<Object> old_value);
+
+  // Deliver change records to observers. May cause GC.
+  static void DeliverChangeRecords(Isolate* isolate);
+
  private:
   friend class DictionaryElementsAccessor;
 
@@ -2186,6 +2219,14 @@
                                                       Object* structure,
                                                       uint32_t index,
                                                       Object* holder);
+  MUST_USE_RESULT PropertyAttributes GetElementAttributeWithInterceptor(
+      JSReceiver* receiver,
+      uint32_t index,
+      bool continue_search);
+  MUST_USE_RESULT PropertyAttributes GetElementAttributeWithoutInterceptor(
+      JSReceiver* receiver,
+      uint32_t index,
+      bool continue_search);
   MUST_USE_RESULT MaybeObject* SetElementWithCallback(
       Object* structure,
       uint32_t index,
@@ -4522,6 +4563,23 @@
   void ClearInlineCaches();
   void ClearTypeFeedbackCells(Heap* heap);
 
+#define DECLARE_CODE_AGE_ENUM(X) k##X##CodeAge,
+  enum Age {
+    kNoAge = 0,
+    CODE_AGE_LIST(DECLARE_CODE_AGE_ENUM)
+    kAfterLastCodeAge,
+    kLastCodeAge = kAfterLastCodeAge - 1,
+    kCodeAgeCount = kAfterLastCodeAge - 1
+  };
+#undef DECLARE_CODE_AGE_ENUM
+
+  // Code aging
+  static void MakeCodeAgeSequenceYoung(byte* sequence);
+  void MakeYoung();
+  void MakeOlder(MarkingParity);
+  static bool IsYoungSequence(byte* sequence);
+  bool IsOld();
+
   // Max loop nesting marker used to postpose OSR. We don't take loop
   // nesting that is deeper than 5 levels into account.
   static const int kMaxLoopNestingMarker = 6;
@@ -4650,6 +4708,21 @@
       TypeField::kMask | CacheHolderField::kMask;
 
  private:
+  friend class RelocIterator;
+
+  // Code aging
+  byte* FindCodeAgeSequence();
+  static void  GetCodeAgeAndParity(Code* code, Age* age,
+                                   MarkingParity* parity);
+  static void GetCodeAgeAndParity(byte* sequence, Age* age,
+                                  MarkingParity* parity);
+  static Code* GetCodeAgeStub(Age age, MarkingParity parity);
+
+  // Code aging -- platform-specific
+  byte* FindPlatformCodeAgeSequence();
+  static void PatchPlatformCodeAge(byte* sequence, Age age,
+                                   MarkingParity parity);
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
 };
 
@@ -4701,6 +4774,7 @@
   class FunctionWithPrototype:      public BitField<bool, 23,  1> {};
   class DictionaryMap:              public BitField<bool, 24,  1> {};
   class OwnsDescriptors:            public BitField<bool, 25,  1> {};
+  class IsObserved:                 public BitField<bool, 26,  1> {};
 
   // Tells whether the object in the prototype property will be used
   // for instances created from this function.  If the prototype
@@ -4968,6 +5042,8 @@
 
   inline bool owns_descriptors();
   inline void set_owns_descriptors(bool is_shared);
+  inline bool is_observed();
+  inline void set_is_observed(bool is_observed);
 
   MUST_USE_RESULT MaybeObject* RawCopy(int instance_size);
   MUST_USE_RESULT MaybeObject* CopyWithPreallocatedFieldDescriptors();
@@ -6111,8 +6187,6 @@
   // The initial map for an object created by this constructor.
   inline Map* initial_map();
   inline void set_initial_map(Map* value);
-  MUST_USE_RESULT inline MaybeObject* set_initial_map_and_cache_transitions(
-      Map* value);
   inline bool has_initial_map();
 
   // Get and set the prototype property on a JSFunction. If the
@@ -8897,6 +8971,10 @@
   // Visits a debug call target in the instruction stream.
   virtual void VisitDebugTarget(RelocInfo* rinfo);
 
+  // Visits the byte sequence in a function's prologue that contains information
+  // about the code's age.
+  virtual void VisitCodeAgeSequence(RelocInfo* rinfo);
+
   // Handy shorthand for visiting a single pointer.
   virtual void VisitPointer(Object** p) { VisitPointers(p, p + 1); }
 
diff --git a/src/property.h b/src/property.h
index 9eb4194..3faa28b 100644
--- a/src/property.h
+++ b/src/property.h
@@ -290,7 +290,7 @@
       case CONSTANT_FUNCTION:
         return GetConstantFunction();
       default:
-        return Smi::FromInt(0);
+        return Isolate::Current()->heap()->the_hole_value();
     }
   }
 
diff --git a/src/runtime.cc b/src/runtime.cc
index e90e3a9..11ba4c8 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -783,6 +783,15 @@
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetGetSize) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
+  Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
+  return Smi::FromInt(table->NumberOfElements());
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_MapInitialize) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
@@ -842,6 +851,15 @@
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGetSize) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
+  Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
+  return Smi::FromInt(table->NumberOfElements());
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
@@ -1073,7 +1091,7 @@
   // This could be an element.
   uint32_t index;
   if (name->AsArrayIndex(&index)) {
-    switch (obj->HasLocalElement(index)) {
+    switch (obj->GetLocalElementType(index)) {
       case JSObject::UNDEFINED_ELEMENT:
         return heap->undefined_value();
 
@@ -4220,6 +4238,33 @@
 }
 
 
+// Return property without being observable by accessors or interceptors.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDataProperty) {
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
+  CONVERT_ARG_HANDLE_CHECKED(String, key, 1);
+  LookupResult lookup(isolate);
+  object->LookupRealNamedProperty(*key, &lookup);
+  if (!lookup.IsFound()) return isolate->heap()->undefined_value();
+  switch (lookup.type()) {
+    case NORMAL:
+      return lookup.holder()->GetNormalizedProperty(&lookup);
+    case FIELD:
+      return lookup.holder()->FastPropertyAt(lookup.GetFieldIndex());
+    case CONSTANT_FUNCTION:
+      return lookup.GetConstantFunction();
+    case CALLBACKS:
+    case HANDLER:
+    case INTERCEPTOR:
+    case TRANSITION:
+      return isolate->heap()->undefined_value();
+    case NONEXISTENT:
+      UNREACHABLE();
+  }
+  return isolate->heap()->undefined_value();
+}
+
+
 MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
                                         Handle<Object> object,
                                         Handle<Object> key,
@@ -4681,7 +4726,7 @@
 
   uint32_t index;
   if (key->AsArrayIndex(&index)) {
-    JSObject::LocalElementType type = object->HasLocalElement(index);
+    JSObject::LocalElementType type = object->GetLocalElementType(index);
     switch (type) {
       case JSObject::UNDEFINED_ELEMENT:
       case JSObject::STRING_CHARACTER_ELEMENT:
@@ -13083,33 +13128,6 @@
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_NewMessageObject) {
-  HandleScope scope(isolate);
-  CONVERT_ARG_HANDLE_CHECKED(String, type, 0);
-  CONVERT_ARG_HANDLE_CHECKED(JSArray, arguments, 1);
-  return *isolate->factory()->NewJSMessageObject(
-      type,
-      arguments,
-      0,
-      0,
-      isolate->factory()->undefined_value(),
-      isolate->factory()->undefined_value(),
-      isolate->factory()->undefined_value());
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetType) {
-  CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
-  return message->type();
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetArguments) {
-  CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
-  return message->arguments();
-}
-
-
 RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetStartPosition) {
   CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
   return Smi::FromInt(message->start_position());
@@ -13222,6 +13240,78 @@
   return isolate->heap()->ToBoolean(obj1->map() == obj2->map());
 }
 
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_IsObserved) {
+  ASSERT(args.length() == 1);
+  CONVERT_ARG_CHECKED(JSReceiver, obj, 0);
+  return isolate->heap()->ToBoolean(obj->map()->is_observed());
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetIsObserved) {
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(JSReceiver, obj, 0);
+  CONVERT_BOOLEAN_ARG_CHECKED(is_observed, 1);
+  if (obj->map()->is_observed() != is_observed) {
+    MaybeObject* maybe = obj->map()->Copy();
+    Map* map;
+    if (!maybe->To(&map)) return maybe;
+    map->set_is_observed(is_observed);
+    obj->set_map(map);
+  }
+  return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetObserverDeliveryPending) {
+  ASSERT(args.length() == 0);
+  isolate->set_observer_delivery_pending(true);
+  return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetObservationState) {
+  ASSERT(args.length() == 0);
+  return isolate->heap()->observation_state();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateObjectHashTable) {
+  ASSERT(args.length() == 0);
+  return ObjectHashTable::Allocate(0);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectHashTableGet) {
+  NoHandleAllocation ha;
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(ObjectHashTable, table, 0);
+  Object* key = args[1];
+  Object* lookup = table->Lookup(key);
+  return lookup->IsTheHole() ? isolate->heap()->undefined_value() : lookup;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectHashTableSet) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 3);
+  CONVERT_ARG_HANDLE_CHECKED(ObjectHashTable, table, 0);
+  Handle<Object> key = args.at<Object>(1);
+  Handle<Object> value = args.at<Object>(2);
+  return *PutIntoObjectHashTable(table, key, value);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectHashTableHas) {
+  NoHandleAllocation ha;
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(ObjectHashTable, table, 0);
+  Object* key = args[1];
+  Object* lookup = table->Lookup(key);
+  return isolate->heap()->ToBoolean(!lookup->IsTheHole());
+}
+
+
 // ----------------------------------------------------------------------------
 // Implementation of Runtime
 
diff --git a/src/runtime.h b/src/runtime.h
index c802906..f63844c 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -267,6 +267,7 @@
   F(DefineOrRedefineDataProperty, 4, 1) \
   F(DefineOrRedefineAccessorProperty, 5, 1) \
   F(IgnoreAttributesAndSetProperty, -1 /* 3 or 4 */, 1) \
+  F(GetDataProperty, 2, 1) \
   \
   /* Arrays */ \
   F(RemoveArrayHoles, 2, 1) \
@@ -302,6 +303,7 @@
   F(SetAdd, 2, 1) \
   F(SetHas, 2, 1) \
   F(SetDelete, 2, 1) \
+  F(SetGetSize, 1, 1) \
   \
   /* Harmony maps */ \
   F(MapInitialize, 1, 1) \
@@ -309,6 +311,7 @@
   F(MapHas, 2, 1) \
   F(MapDelete, 2, 1) \
   F(MapSet, 3, 1) \
+  F(MapGetSize, 1, 1) \
   \
   /* Harmony weakmaps */ \
   F(WeakMapInitialize, 1, 1) \
@@ -317,6 +320,16 @@
   F(WeakMapDelete, 2, 1) \
   F(WeakMapSet, 3, 1) \
   \
+  /* Harmony observe */ \
+  F(IsObserved, 1, 1) \
+  F(SetIsObserved, 2, 1) \
+  F(SetObserverDeliveryPending, 0, 1) \
+  F(GetObservationState, 0, 1) \
+  F(CreateObjectHashTable, 0, 1) \
+  F(ObjectHashTableGet, 2, 1) \
+  F(ObjectHashTableSet, 3, 1) \
+  F(ObjectHashTableHas, 2, 1) \
+  \
   /* Statements */ \
   F(NewClosure, 3, 1) \
   F(NewObject, 1, 1) \
@@ -364,9 +377,6 @@
   F(GetFromCache, 2, 1) \
   \
   /* Message objects */ \
-  F(NewMessageObject, 2, 1) \
-  F(MessageGetType, 1, 1) \
-  F(MessageGetArguments, 1, 1) \
   F(MessageGetStartPosition, 1, 1) \
   F(MessageGetScript, 1, 1) \
   \
diff --git a/src/serialize.cc b/src/serialize.cc
index b1aa0ed..dfc5574 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -523,6 +523,10 @@
       UNCLASSIFIED,
       50,
       "pending_message_script");
+  Add(ExternalReference::get_make_code_young_function(isolate).address(),
+      UNCLASSIFIED,
+      51,
+      "Code::MakeCodeYoung");
 }
 
 
diff --git a/src/v8.cc b/src/v8.cc
index 2407037..7d01582 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -38,6 +38,7 @@
 #include "hydrogen.h"
 #include "lithium-allocator.h"
 #include "log.h"
+#include "objects.h"
 #include "once.h"
 #include "platform.h"
 #include "runtime-profiler.h"
@@ -216,14 +217,22 @@
 
 
 void V8::FireCallCompletedCallback(Isolate* isolate) {
-  if (call_completed_callbacks_ == NULL) return;
+  bool has_call_completed_callbacks = call_completed_callbacks_ != NULL;
+  bool observer_delivery_pending =
+      FLAG_harmony_observation && isolate->observer_delivery_pending();
+  if (!has_call_completed_callbacks && !observer_delivery_pending) return;
   HandleScopeImplementer* handle_scope_implementer =
       isolate->handle_scope_implementer();
   if (!handle_scope_implementer->CallDepthIsZero()) return;
   // Fire callbacks.  Increase call depth to prevent recursive callbacks.
   handle_scope_implementer->IncrementCallDepth();
-  for (int i = 0; i < call_completed_callbacks_->length(); i++) {
-    call_completed_callbacks_->at(i)();
+  if (observer_delivery_pending) {
+    JSObject::DeliverChangeRecords(isolate);
+  }
+  if (has_call_completed_callbacks) {
+    for (int i = 0; i < call_completed_callbacks_->length(); i++) {
+      call_completed_callbacks_->at(i)();
+    }
   }
   handle_scope_implementer->DecrementCallDepth();
 }
diff --git a/src/v8natives.js b/src/v8natives.js
index e2e6429..20fc74d 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -60,7 +60,17 @@
   %ToFastProperties(object);
 }
 
-// Prevents changes to the prototype of a built-infunction.
+
+// Helper function to install a getter only property.
+function InstallGetter(object, name, getter) {
+  %FunctionSetName(getter, name);
+  %FunctionRemovePrototype(getter);
+  %DefineOrRedefineAccessorProperty(object, name, getter, null, DONT_ENUM);
+  %SetNativeFlag(getter);
+}
+
+
+// Prevents changes to the prototype of a built-in function.
 // The "prototype" property of the function object is made non-configurable,
 // and the prototype object is made non-extensible. The latter prevents
 // changing the __proto__ property.
diff --git a/src/version.cc b/src/version.cc
index d1dacd5..3132b5e 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     15
-#define BUILD_NUMBER      1
-#define PATCH_LEVEL       2
+#define BUILD_NUMBER      2
+#define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index d022340..f864174 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -42,6 +42,9 @@
 // Implementation of Assembler
 
 
+static const byte kCallOpcode = 0xE8;
+
+
 void Assembler::emitl(uint32_t x) {
   Memory::uint32_at(pc_) = x;
   pc_ += sizeof(uint32_t);
@@ -217,6 +220,12 @@
   } else if (IsCodeTarget(rmode_)) {
     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
     CPU::FlushICache(pc_, sizeof(int32_t));
+  } else if (rmode_ == CODE_AGE_SEQUENCE) {
+    if (*pc_ == kCallOpcode) {
+      int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
+      *p -= static_cast<int32_t>(delta);  // Relocate entry.
+      CPU::FlushICache(p, sizeof(uint32_t));
+    }
   }
 }
 
@@ -355,6 +364,21 @@
 }
 
 
+Code* RelocInfo::code_age_stub() {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  ASSERT(*pc_ == kCallOpcode);
+  return Code::GetCodeFromTargetAddress(
+      Assembler::target_address_at(pc_ + 1));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+  ASSERT(*pc_ == kCallOpcode);
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
+}
+
+
 Address RelocInfo::call_address() {
   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
@@ -408,6 +432,8 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    visitor->VisitCodeAgeSequence(this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // TODO(isolates): Get a cached isolate below.
   } else if (((RelocInfo::IsJSReturn(mode) &&
@@ -436,6 +462,8 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    StaticVisitor::VisitCodeAgeSequence(heap, this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 862a735..1f5bea9 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -349,9 +349,7 @@
 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
     : AssemblerBase(arg_isolate),
       code_targets_(100),
-      positions_recorder_(this),
-      emit_debug_code_(FLAG_debug_code),
-      predictable_code_size_(false) {
+      positions_recorder_(this) {
   if (buffer == NULL) {
     // Do our own buffer management.
     if (buffer_size <= kMinimalBufferSize) {
@@ -1238,13 +1236,13 @@
     // Determine whether we can use 1-byte offsets for backwards branches,
     // which have a max range of 128 bytes.
 
-    // We also need to check the predictable_code_size_ flag here, because
-    // on x64, when the full code generator recompiles code for debugging, some
-    // places need to be padded out to a certain size. The debugger is keeping
-    // track of how often it did this so that it can adjust return addresses on
-    // the stack, but if the size of jump instructions can also change, that's
-    // not enough and the calculated offsets would be incorrect.
-    if (is_int8(offs - short_size) && !predictable_code_size_) {
+    // We also need to check predictable_code_size() flag here, because on x64,
+    // when the full code generator recompiles code for debugging, some places
+    // need to be padded out to a certain size. The debugger is keeping track of
+    // how often it did this so that it can adjust return addresses on the
+    // stack, but if the size of jump instructions can also change, that's not
+    // enough and the calculated offsets would be incorrect.
+    if (is_int8(offs - short_size) && !predictable_code_size()) {
       // 0111 tttn #8-bit disp.
       emit(0x70 | cc);
       emit((offs - short_size) & 0xFF);
@@ -1301,7 +1299,7 @@
   if (L->is_bound()) {
     int offs = L->pos() - pc_offset() - 1;
     ASSERT(offs <= 0);
-    if (is_int8(offs - short_size) && !predictable_code_size_) {
+    if (is_int8(offs - short_size) && !predictable_code_size()) {
       // 1110 1011 #8-bit disp.
       emit(0xEB);
       emit((offs - short_size) & 0xFF);
@@ -3047,7 +3045,8 @@
 
 
 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
-                                  1 << RelocInfo::INTERNAL_REFERENCE;
+    1 << RelocInfo::INTERNAL_REFERENCE |
+    1 << RelocInfo::CODE_AGE_SEQUENCE;
 
 
 bool RelocInfo::IsCodedSpecially() {
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index cd10d72..5f9e147 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -558,14 +558,6 @@
   Assembler(Isolate* isolate, void* buffer, int buffer_size);
   ~Assembler();
 
-  // Overrides the default provided by FLAG_debug_code.
-  void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
-  // Avoids using instructions that vary in size in unpredictable ways between
-  // the snapshot and the running VM.  This is needed by the full compiler so
-  // that it can recompile code with debug support and fix the PC.
-  void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
-
   // GetCode emits any pending (non-emitted) code and fills the descriptor
   // desc. GetCode() is idempotent; it returns the same result if no other
   // Assembler functions are invoked in between GetCode() calls.
@@ -1449,10 +1441,6 @@
   byte byte_at(int pos)  { return buffer_[pos]; }
   void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
 
- protected:
-  bool emit_debug_code() const { return emit_debug_code_; }
-  bool predictable_code_size() const { return predictable_code_size_; }
-
  private:
   byte* addr_at(int pos)  { return buffer_ + pos; }
   uint32_t long_at(int pos)  {
@@ -1654,10 +1642,6 @@
   List< Handle<Code> > code_targets_;
 
   PositionsRecorder positions_recorder_;
-
-  bool emit_debug_code_;
-  bool predictable_code_size_;
-
   friend class PositionsRecorder;
 };
 
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 9e4153a..ed0ec68 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -606,6 +606,46 @@
 }
 
 
+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+  // For now, we are relying on the fact that make_code_young doesn't do any
+  // garbage collection which allows us to save/restore the registers without
+  // worrying about which of them contain pointers. We also don't build an
+  // internal frame to make the code faster, since we shouldn't have to do stack
+  // crawls in MakeCodeYoung. This seems a bit fragile.
+
+  // Re-execute the code that was patched back to the young age when
+  // the stub returns.
+  __ subq(Operand(rsp, 0), Immediate(5));
+  __ Pushad();
+#ifdef _WIN64
+  __ movq(rcx, Operand(rsp, kNumSafepointRegisters * kPointerSize));
+#else
+  __ movq(rdi, Operand(rsp, kNumSafepointRegisters * kPointerSize));
+#endif
+  {  // NOLINT
+    FrameScope scope(masm, StackFrame::MANUAL);
+    __ PrepareCallCFunction(1);
+    __ CallCFunction(
+        ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+  }
+  __ Popad();
+  __ ret(0);
+}
+
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}                                                            \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
                                              Deoptimizer::BailoutType type) {
   // Enter an internal frame.
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index f0f9c5d..a8e52e9 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -4740,7 +4740,7 @@
   Label non_ascii, allocated, ascii_data;
   __ movl(rcx, r8);
   __ and_(rcx, r9);
-  STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+  STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   __ testl(rcx, Immediate(kStringEncodingMask));
   __ j(zero, &non_ascii);
@@ -4766,9 +4766,9 @@
   __ testb(rcx, Immediate(kAsciiDataHintMask));
   __ j(not_zero, &ascii_data);
   __ xor_(r8, r9);
-  STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
-  __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
-  __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
+  STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+  __ andb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
+  __ cmpb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
   __ j(equal, &ascii_data);
   // Allocate a two byte cons string.
   __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
@@ -5288,7 +5288,7 @@
     // string's encoding is wrong because we always have to recheck encoding of
     // the newly created string's parent anyways due to externalized strings.
     Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
     __ testb(rbx, Immediate(kStringEncodingMask));
     __ j(zero, &two_byte_slice, Label::kNear);
@@ -5332,7 +5332,7 @@
   __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
 
   __ bind(&sequential_string);
-  STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   __ testb(rbx, Immediate(kStringEncodingMask));
   __ j(zero, &two_byte_sequential);
 
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index 2924810..ffccf47 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -551,7 +551,7 @@
   // Dispatch on the encoding: ASCII or two-byte.
   Label ascii;
   __ bind(&seq_string);
-  STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+  STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   __ testb(result, Immediate(kStringEncodingMask));
   __ j(not_zero, &ascii, Label::kNear);
@@ -577,6 +577,91 @@
 
 #undef __
 
+
+static const int kNoCodeAgeSequenceLength = 6;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+  static bool initialized = false;
+  static byte sequence[kNoCodeAgeSequenceLength];
+  *length = kNoCodeAgeSequenceLength;
+  if (!initialized) {
+    // The sequence of instructions that is patched out for aging code is the
+    // following boilerplate stack-building prologue that is found both in
+    // FUNCTION and OPTIMIZED_FUNCTION code:
+    CodePatcher patcher(sequence, kNoCodeAgeSequenceLength);
+    patcher.masm()->push(rbp);
+    patcher.masm()->movq(rbp, rsp);
+    patcher.masm()->push(rsi);
+    patcher.masm()->push(rdi);
+    initialized = true;
+  }
+  return sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+  byte* start = instruction_start();
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (!memcmp(start, young_sequence, young_length) ||
+      *start == kCallOpcode) {
+    return start;
+  } else {
+    byte* start_after_strict = NULL;
+    if (kind() == FUNCTION) {
+      start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
+    } else {
+      ASSERT(kind() == OPTIMIZED_FUNCTION);
+      start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
+    }
+    ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+           *start_after_strict == kCallOpcode);
+    return start_after_strict;
+  }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  bool result = (!memcmp(sequence, young_sequence, young_length));
+  ASSERT(result || *sequence == kCallOpcode);
+  return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+                               MarkingParity* parity) {
+  if (IsYoungSequence(sequence)) {
+    *age = kNoAge;
+    *parity = NO_MARKING_PARITY;
+  } else {
+    sequence++;  // Skip the kCallOpcode byte
+    Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
+        Assembler::kCallTargetAddressOffset;
+    Code* stub = GetCodeFromTargetAddress(target_address);
+    GetCodeAgeAndParity(stub, age, parity);
+  }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+                                Code::Age age,
+                                MarkingParity parity) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (age == kNoAge) {
+    memcpy(sequence, young_sequence, young_length);
+    CPU::FlushICache(sequence, young_length);
+  } else {
+    Code* stub = GetCodeAgeStub(age, parity);
+    CodePatcher patcher(sequence, young_length);
+    patcher.masm()->call(stub->instruction_start());
+    patcher.masm()->nop();
+  }
+}
+
+
 } }  // namespace v8::internal
 
 #endif  // V8_TARGET_ARCH_X64
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index 2e80751..5d8bbff 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -39,6 +39,8 @@
 
 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
 
+static const int kSizeOfFullCodegenStrictModePrologue = 14;
+static const int kSizeOfOptimizedStrictModePrologue = 14;
 
 // -------------------------------------------------------------------------
 // CodeGenerator
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 475fb9d..a198739 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -138,6 +138,8 @@
   // function calls.
   if (!info->is_classic_mode() || info->is_native()) {
     Label ok;
+    Label begin;
+    __ bind(&begin);
     __ testq(rcx, rcx);
     __ j(zero, &ok, Label::kNear);
     // +1 for return address.
@@ -145,6 +147,8 @@
     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
     __ bind(&ok);
+    ASSERT(!FLAG_age_code ||
+           (kSizeOfFullCodegenStrictModePrologue == ok.pos() - begin.pos()));
   }
 
   // Open a frame scope to indicate that there is a frame on the stack.  The
@@ -3551,7 +3555,7 @@
   __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   __ andb(scratch, Immediate(
       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
-  __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
+  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
   __ j(not_equal, &bailout);
   __ AddSmiField(string_length,
                  FieldOperand(string, SeqAsciiString::kLengthOffset));
@@ -3590,7 +3594,7 @@
   __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   __ andb(scratch, Immediate(
       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
-  __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
+  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
   __ j(not_equal, &bailout);
 
   // Live registers:
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 4f8f10e..4596aca 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -133,6 +133,8 @@
   // object). rcx is zero for method calls and non-zero for function
   // calls.
   if (!info_->is_classic_mode() || info_->is_native()) {
+    Label begin;
+    __ bind(&begin);
     Label ok;
     __ testq(rcx, rcx);
     __ j(zero, &ok, Label::kNear);
@@ -141,6 +143,8 @@
     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
     __ bind(&ok);
+    ASSERT(!FLAG_age_code ||
+           (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos()));
   }
 
   __ push(rbp);  // Caller's frame pointer.
@@ -2620,24 +2624,30 @@
 }
 
 
-void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
-  ElementsKind elements_kind = instr->elements_kind();
-  LOperand* key = instr->key();
-  if (!key->IsConstantOperand()) {
-    Register key_reg = ToRegister(key);
+template <class T>
+inline void LCodeGen::PrepareKeyForKeyedOp(T* hydrogen_instr, LOperand* key) {
+  if (ArrayOpClobbersKey<T>(hydrogen_instr)) {
     // Even though the HLoad/StoreKeyed (in this case) instructions force
     // the input representation for the key to be an integer, the input
     // gets replaced during bound check elimination with the index argument
     // to the bounds check, which can be tagged, so that case must be
     // handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
+    Register key_reg = ToRegister(key);
+    if (hydrogen_instr->key()->representation().IsTagged()) {
       __ SmiToInteger64(key_reg, key_reg);
-    } else if (instr->hydrogen()->IsDehoisted()) {
+    } else if (hydrogen_instr->IsDehoisted()) {
       // Sign extend key because it could be a 32 bit negative value
       // and the dehoisted address computation happens in 64 bits
       __ movsxlq(key_reg, key_reg);
     }
   }
+}
+
+
+void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
+  ElementsKind elements_kind = instr->elements_kind();
+  LOperand* key = instr->key();
+  PrepareKeyForKeyedOp(instr->hydrogen(), key);
   Operand operand(BuildFastArrayOperand(
       instr->elements(),
       key,
@@ -2697,21 +2707,7 @@
 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
   XMMRegister result(ToDoubleRegister(instr->result()));
   LOperand* key = instr->key();
-  if (!key->IsConstantOperand()) {
-    Register key_reg = ToRegister(key);
-    // Even though the HLoad/StoreKeyed instructions force the input
-    // representation for the key to be an integer, the input gets replaced
-    // during bound check elimination with the index argument to the bounds
-    // check, which can be tagged, so that case must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ SmiToInteger64(key_reg, key_reg);
-    } else if (instr->hydrogen()->IsDehoisted()) {
-      // Sign extend key because it could be a 32 bit negative value
-      // and the dehoisted address computation happens in 64 bits
-      __ movsxlq(key_reg, key_reg);
-    }
-  }
-
+  PrepareKeyForKeyedOp<HLoadKeyed>(instr->hydrogen(), key);
   if (instr->hydrogen()->RequiresHoleCheck()) {
     int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
         sizeof(kHoleNanLower32);
@@ -2738,21 +2734,7 @@
 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
   Register result = ToRegister(instr->result());
   LOperand* key = instr->key();
-  if (!key->IsConstantOperand()) {
-    Register key_reg = ToRegister(key);
-    // Even though the HLoad/StoreKeyedFastElement instructions force
-    // the input representation for the key to be an integer, the input
-    // gets replaced during bound check elimination with the index
-    // argument to the bounds check, which can be tagged, so that
-    // case must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ SmiToInteger64(key_reg, key_reg);
-    } else if (instr->hydrogen()->IsDehoisted()) {
-      // Sign extend key because it could be a 32 bit negative value
-      // and the dehoisted address computation happens in 64 bits
-      __ movsxlq(key_reg, key_reg);
-    }
-  }
+  PrepareKeyForKeyedOp<HLoadKeyed>(instr->hydrogen(), key);
 
   // Load the result.
   __ movq(result,
@@ -3747,21 +3729,7 @@
 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
   ElementsKind elements_kind = instr->elements_kind();
   LOperand* key = instr->key();
-  if (!key->IsConstantOperand()) {
-    Register key_reg = ToRegister(key);
-    // Even though the HLoad/StoreKeyedFastElement instructions force
-    // the input representation for the key to be an integer, the input
-    // gets replaced during bound check elimination with the index
-    // argument to the bounds check, which can be tagged, so that case
-    // must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ SmiToInteger64(key_reg, key_reg);
-    } else if (instr->hydrogen()->IsDehoisted()) {
-      // Sign extend key because it could be a 32 bit negative value
-      // and the dehoisted address computation happens in 64 bits
-      __ movsxlq(key_reg, key_reg);
-    }
-  }
+  PrepareKeyForKeyedOp<HStoreKeyed>(instr->hydrogen(), key);
   Operand operand(BuildFastArrayOperand(
       instr->elements(),
       key,
@@ -3811,22 +3779,7 @@
 void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
   XMMRegister value = ToDoubleRegister(instr->value());
   LOperand* key = instr->key();
-  if (!key->IsConstantOperand()) {
-    Register key_reg = ToRegister(key);
-    // Even though the HLoad/StoreKeyedFastElement instructions force
-    // the input representation for the key to be an integer, the
-    // input gets replaced during bound check elimination with the index
-    // argument to the bounds check, which can be tagged, so that case
-    // must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ SmiToInteger64(key_reg, key_reg);
-    } else if (instr->hydrogen()->IsDehoisted()) {
-      // Sign extend key because it could be a 32 bit negative value
-      // and the dehoisted address computation happens in 64 bits
-      __ movsxlq(key_reg, key_reg);
-    }
-  }
-
+  PrepareKeyForKeyedOp<HStoreKeyed>(instr->hydrogen(), key);
   if (instr->NeedsCanonicalization()) {
     Label have_value;
 
@@ -3855,21 +3808,7 @@
   Register value = ToRegister(instr->value());
   Register elements = ToRegister(instr->elements());
   LOperand* key = instr->key();
-  if (!key->IsConstantOperand()) {
-    Register key_reg = ToRegister(key);
-    // Even though the HLoad/StoreKeyedFastElement instructions force
-    // the input representation for the key to be an integer, the
-    // input gets replaced during bound check elimination with the index
-    // argument to the bounds check, which can be tagged, so that case
-    // must be handled here, too.
-    if (instr->hydrogen()->key()->representation().IsTagged()) {
-      __ SmiToInteger64(key_reg, key_reg);
-    } else if (instr->hydrogen()->IsDehoisted()) {
-      // Sign extend key because it could be a 32 bit negative value
-      // and the dehoisted address computation happens in 64 bits
-      __ movsxlq(key_reg, key_reg);
-    }
-  }
+  PrepareKeyForKeyedOp<HStoreKeyed>(instr->hydrogen(), key);
 
   Operand operand =
       BuildFastArrayOperand(instr->elements(),
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index e068f14..0f8a62a 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -341,6 +341,8 @@
   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
+  template <class T>
+      void PrepareKeyForKeyedOp(T* hydrogen_instr, LOperand* key);
 
   Zone* zone_;
   LPlatformChunk* const chunk_;
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index a874a24..c6004e5 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -1844,16 +1844,15 @@
   ASSERT(instr->key()->representation().IsInteger32() ||
          instr->key()->representation().IsTagged());
   ElementsKind elements_kind = instr->elements_kind();
-  bool clobbers_key = instr->key()->representation().IsTagged();
+  bool clobbers_key = ArrayOpClobbersKey<HLoadKeyed>(instr);
   LOperand* key = clobbers_key
       ? UseTempRegister(instr->key())
       : UseRegisterOrConstantAtStart(instr->key());
-  LLoadKeyed* result = NULL;
+  LOperand* elements = UseRegisterAtStart(instr->elements());
+  LLoadKeyed* result = new(zone()) LLoadKeyed(elements, key);
 
-  if (!instr->is_external()) {
-    LOperand* obj = UseRegisterAtStart(instr->elements());
-    result = new(zone()) LLoadKeyed(obj, key);
-  } else {
+#ifdef DEBUG
+  if (instr->is_external()) {
     ASSERT(
         (instr->representation().IsInteger32() &&
          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
@@ -1861,9 +1860,8 @@
         (instr->representation().IsDouble() &&
          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
-    LOperand* external_pointer = UseRegister(instr->elements());
-    result = new(zone()) LLoadKeyed(external_pointer, key);
   }
+#endif
 
   DefineAsRegister(result);
   bool can_deoptimize = instr->RequiresHoleCheck() ||
@@ -1884,33 +1882,21 @@
 
 
 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
-  ElementsKind elements_kind = instr->elements_kind();
   bool needs_write_barrier = instr->NeedsWriteBarrier();
-  bool clobbers_key = instr->key()->representation().IsTagged();
+  bool clobbers_key = ArrayOpClobbersKey<HStoreKeyed>(instr);
   LOperand* key = (clobbers_key || needs_write_barrier)
       ? UseTempRegister(instr->key())
       : UseRegisterOrConstantAtStart(instr->key());
-  bool val_is_temp_register =
-      elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
-      elements_kind == EXTERNAL_FLOAT_ELEMENTS;
-  LOperand* val = (needs_write_barrier || val_is_temp_register)
+  LOperand* val = needs_write_barrier
       ? UseTempRegister(instr->value())
       : UseRegisterAtStart(instr->value());
-  LStoreKeyed* result = NULL;
+  LOperand* elements = UseRegisterAtStart(instr->elements());
 
+#ifdef DEBUG
   if (!instr->is_external()) {
     ASSERT(instr->elements()->representation().IsTagged());
-
-    LOperand* object = NULL;
-    if (instr->value()->representation().IsDouble()) {
-      object = UseRegisterAtStart(instr->elements());
-    } else {
-      ASSERT(instr->value()->representation().IsTagged());
-      object = UseTempRegister(instr->elements());
-    }
-
-    result = new(zone()) LStoreKeyed(object, key, val);
   } else {
+    ElementsKind elements_kind = instr->elements_kind();
     ASSERT(
         (instr->value()->representation().IsInteger32() &&
          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
@@ -1919,11 +1905,10 @@
          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
     ASSERT(instr->elements()->representation().IsExternal());
-
-    LOperand* external_pointer = UseRegister(instr->elements());
-    result = new(zone()) LStoreKeyed(external_pointer, key, val);
   }
+#endif
 
+  LStoreKeyed* result = new(zone()) LStoreKeyed(elements, key, val);
   ASSERT(result != NULL);
   return result;
 }
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index 79ce968..5439028 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -1371,6 +1371,14 @@
 };
 
 
+template <class T>
+inline static bool ArrayOpClobbersKey(T *value) {
+  CHECK(value->IsLoadKeyed() || value->IsStoreKeyed());
+  return !value->IsConstant() && (value->key()->representation().IsTagged()
+                                  || value->IsDehoisted());
+}
+
+
 class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
  public:
   LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 7750674..962c2e8 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -2228,7 +2228,7 @@
       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
 
   andl(scratch, Immediate(kFlatAsciiStringMask));
-  cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
+  cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
   j(not_equal, failure, near_jump);
 }
 
@@ -4503,7 +4503,7 @@
 
   bind(&not_external);
   // Sequential string, either ASCII or UC16.
-  ASSERT(kAsciiStringTag == 0x04);
+  ASSERT(kOneByteStringTag == 0x04);
   and_(length, Immediate(kStringEncodingMask));
   xor_(length, Immediate(kStringEncodingMask));
   addq(length, Immediate(0x04));
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index cd71086..b120efb 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -3240,6 +3240,7 @@
 #endif
 
   // Load the initial map and verify that it is in fact a map.
+  // rdi: constructor
   __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   // Will both indicate a NULL and a Smi.
   STATIC_ASSERT(kSmiTag == 0);
@@ -3249,18 +3250,22 @@
 
 #ifdef DEBUG
   // Cannot construct functions this way.
-  // rdi: constructor
   // rbx: initial map
   __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
-  __ Assert(not_equal, "Function constructed by construct stub.");
+  __ Check(not_equal, "Function constructed by construct stub.");
 #endif
 
   // Now allocate the JSObject in new space.
-  // rdi: constructor
   // rbx: initial map
+  ASSERT(function->has_initial_map());
+  int instance_size = function->initial_map()->instance_size();
+#ifdef DEBUG
   __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
   __ shl(rcx, Immediate(kPointerSizeLog2));
-  __ AllocateInNewSpace(rcx, rdx, rcx, no_reg,
+  __ cmpq(rcx, Immediate(instance_size));
+  __ Check(equal, "Instance size of initial map changed.");
+#endif
+  __ AllocateInNewSpace(instance_size, rdx, rcx, no_reg,
                         &generic_stub_call, NO_ALLOCATION_FLAGS);
 
   // Allocated the JSObject, now initialize the fields and add the heap tag.
@@ -3306,7 +3311,6 @@
   }
 
   // Fill the unused in-object property fields with undefined.
-  ASSERT(function->has_initial_map());
   for (int i = shared->this_property_assignments_count();
        i < function->initial_map()->inobject_properties();
        i++) {
diff --git a/test/cctest/cctest.gyp b/test/cctest/cctest.gyp
index 66d848c..80eecfd 100644
--- a/test/cctest/cctest.gyp
+++ b/test/cctest/cctest.gyp
@@ -79,6 +79,7 @@
         'test-lockers.cc',
         'test-log.cc',
         'test-mark-compact.cc',
+        'test-object-observe.cc',
         'test-parsing.cc',
         'test-platform-tls.cc',
         'test-profile-generator.cc',
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 3cf9d8a..41eb68f 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -2447,6 +2447,100 @@
 }
 
 
+// TODO(mstarzinger): This should be a THREADED_TEST but causes failures
+// on the buildbots, so was made non-threaded for the time being.
+TEST(ApiObjectGroupsCycleForScavenger) {
+  HandleScope scope;
+  LocalContext env;
+
+  WeakCallCounter counter(1234);
+
+  Persistent<Object> g1s1;
+  Persistent<Object> g1s2;
+  Persistent<Object> g2s1;
+  Persistent<Object> g2s2;
+  Persistent<Object> g3s1;
+  Persistent<Object> g3s2;
+
+  {
+    HandleScope scope;
+    g1s1 = Persistent<Object>::New(Object::New());
+    g1s2 = Persistent<Object>::New(Object::New());
+    g1s1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g1s2.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+
+    g2s1 = Persistent<Object>::New(Object::New());
+    g2s2 = Persistent<Object>::New(Object::New());
+    g2s1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g2s2.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+
+    g3s1 = Persistent<Object>::New(Object::New());
+    g3s2 = Persistent<Object>::New(Object::New());
+    g3s1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+    g3s2.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  }
+
+  // Make a root.
+  Persistent<Object> root = Persistent<Object>::New(g1s1);
+  root.MarkPartiallyDependent();
+
+  // Connect groups.  We're building the following cycle:
+  // G1: { g1s1, g2s1 }, g1s1 implicitly references g2s1, ditto for other
+  // groups.
+  {
+    g1s1.MarkPartiallyDependent();
+    g1s2.MarkPartiallyDependent();
+    g2s1.MarkPartiallyDependent();
+    g2s2.MarkPartiallyDependent();
+    g3s1.MarkPartiallyDependent();
+    g3s2.MarkPartiallyDependent();
+    Persistent<Value> g1_objects[] = { g1s1, g1s2 };
+    Persistent<Value> g2_objects[] = { g2s1, g2s2 };
+    Persistent<Value> g3_objects[] = { g3s1, g3s2 };
+    V8::AddObjectGroup(g1_objects, 2);
+    g1s1->Set(v8_str("x"), g2s1);
+    V8::AddObjectGroup(g2_objects, 2);
+    g2s1->Set(v8_str("x"), g3s1);
+    V8::AddObjectGroup(g3_objects, 2);
+    g3s1->Set(v8_str("x"), g1s1);
+  }
+
+  HEAP->CollectGarbage(i::NEW_SPACE);
+
+  // All objects should be alive.
+  CHECK_EQ(0, counter.NumberOfWeakCalls());
+
+  // Weaken the root.
+  root.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+  root.MarkPartiallyDependent();
+
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  // Groups are deleted, rebuild groups.
+  {
+    g1s1.MarkPartiallyDependent(isolate);
+    g1s2.MarkPartiallyDependent(isolate);
+    g2s1.MarkPartiallyDependent(isolate);
+    g2s2.MarkPartiallyDependent(isolate);
+    g3s1.MarkPartiallyDependent(isolate);
+    g3s2.MarkPartiallyDependent(isolate);
+    Persistent<Value> g1_objects[] = { g1s1, g1s2 };
+    Persistent<Value> g2_objects[] = { g2s1, g2s2 };
+    Persistent<Value> g3_objects[] = { g3s1, g3s2 };
+    V8::AddObjectGroup(g1_objects, 2);
+    g1s1->Set(v8_str("x"), g2s1);
+    V8::AddObjectGroup(g2_objects, 2);
+    g2s1->Set(v8_str("x"), g3s1);
+    V8::AddObjectGroup(g3_objects, 2);
+    g3s1->Set(v8_str("x"), g1s1);
+  }
+
+  HEAP->CollectGarbage(i::NEW_SPACE);
+
+  // All objects should be gone. 7 global handles in total.
+  CHECK_EQ(7, counter.NumberOfWeakCalls());
+}
+
+
 THREADED_TEST(ScriptException) {
   v8::HandleScope scope;
   LocalContext env;
@@ -5316,23 +5410,28 @@
   v8::Persistent<Context> context = Context::New();
   Context::Scope context_scope(context);
 
-  v8::Persistent<v8::Object> object_a;
+  v8::Persistent<v8::Object> object_a, object_b;
 
   {
     v8::HandleScope handle_scope;
     object_a = v8::Persistent<v8::Object>::New(v8::Object::New());
+    object_b = v8::Persistent<v8::Object>::New(v8::Object::New());
   }
 
   v8::Isolate* isolate = v8::Isolate::GetCurrent();
   bool object_a_disposed = false;
+  bool object_b_disposed = false;
   object_a.MakeWeak(&object_a_disposed, &DisposeAndSetFlag);
+  object_b.MakeWeak(&object_b_disposed, &DisposeAndSetFlag);
   CHECK(!object_a.IsIndependent());
-  CHECK(!object_a.IsIndependent(isolate));
+  CHECK(!object_b.IsIndependent(isolate));
   object_a.MarkIndependent();
+  object_b.MarkIndependent(isolate);
   CHECK(object_a.IsIndependent());
-  CHECK(object_a.IsIndependent(isolate));
+  CHECK(object_b.IsIndependent(isolate));
   HEAP->PerformScavenge();
   CHECK(object_a_disposed);
+  CHECK(object_b_disposed);
 }
 
 
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index 811973b..2bb3af6 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -1133,6 +1133,65 @@
 }
 
 
+TEST(TestCodeFlushingIncrementalAbort) {
+  // If we do not flush code this test is invalid.
+  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+  i::FLAG_allow_natives_syntax = true;
+  InitializeVM();
+  v8::HandleScope scope;
+  const char* source = "function foo() {"
+                       "  var x = 42;"
+                       "  var y = 42;"
+                       "  var z = x + y;"
+                       "};"
+                       "foo()";
+  Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+
+  // This compile will add the code to the compilation cache.
+  { v8::HandleScope scope;
+    CompileRun(source);
+  }
+
+  // Check function is compiled.
+  Object* func_value = Isolate::Current()->context()->global_object()->
+      GetProperty(*foo_name)->ToObjectChecked();
+  CHECK(func_value->IsJSFunction());
+  Handle<JSFunction> function(JSFunction::cast(func_value));
+  CHECK(function->shared()->is_compiled());
+
+  // The code will survive at least two GCs.
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+  CHECK(function->shared()->is_compiled());
+
+  // Bump the code age so that flushing is triggered.
+  const int kAgingThreshold = 6;
+  function->shared()->set_code_age(kAgingThreshold);
+
+  // Simulate incremental marking so that the function is enqueued as
+  // code flushing candidate.
+  SimulateIncrementalMarking();
+
+  // Enable the debugger and add a breakpoint while incremental marking
+  // is running so that incremental marking aborts and code flushing is
+  // disabled.
+  int position = 0;
+  Handle<Object> breakpoint_object(Smi::FromInt(0));
+  ISOLATE->debug()->SetBreakPoint(function, breakpoint_object, &position);
+  ISOLATE->debug()->ClearAllBreakPoints();
+
+  // Force optimization now that code flushing is disabled.
+  { v8::HandleScope scope;
+    CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
+  }
+
+  // Simulate one final GC to make sure the candidate queue is sane.
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+  CHECK(function->shared()->is_compiled() || !function->IsOptimized());
+  CHECK(function->is_compiled() || !function->IsOptimized());
+}
+
+
 // Count the number of native contexts in the weak list of native contexts.
 int CountNativeContexts() {
   int count = 0;
diff --git a/test/cctest/test-object-observe.cc b/test/cctest/test-object-observe.cc
new file mode 100644
index 0000000..374dca4
--- /dev/null
+++ b/test/cctest/test-object-observe.cc
@@ -0,0 +1,196 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "cctest.h"
+
+using namespace v8;
+
+namespace {
+// Need to create a new isolate when FLAG_harmony_observation is on.
+class HarmonyIsolate {
+ public:
+  HarmonyIsolate() {
+    i::FLAG_harmony_observation = true;
+    isolate_ = Isolate::New();
+    isolate_->Enter();
+  }
+
+  ~HarmonyIsolate() {
+    isolate_->Exit();
+    isolate_->Dispose();
+  }
+
+ private:
+  Isolate* isolate_;
+};
+}
+
+TEST(PerIsolateState) {
+  HarmonyIsolate isolate;
+  HandleScope scope;
+  LocalContext context1;
+  CompileRun(
+      "var count = 0;"
+      "var calls = 0;"
+      "var observer = function(records) { count = records.length; calls++ };"
+      "var obj = {};"
+      "Object.observe(obj, observer);");
+  Handle<Value> observer = CompileRun("observer");
+  Handle<Value> obj = CompileRun("obj");
+  Handle<Value> notify_fun1 = CompileRun(
+      "(function() { obj.foo = 'bar'; })");
+  Handle<Value> notify_fun2;
+  {
+    LocalContext context2;
+    context2->Global()->Set(String::New("obj"), obj);
+    notify_fun2 = CompileRun(
+        "(function() { obj.foo = 'baz'; })");
+  }
+  Handle<Value> notify_fun3;
+  {
+    LocalContext context3;
+    context3->Global()->Set(String::New("obj"), obj);
+    notify_fun3 = CompileRun(
+        "(function() { obj.foo = 'bat'; })");
+  }
+  {
+    LocalContext context4;
+    context4->Global()->Set(String::New("observer"), observer);
+    context4->Global()->Set(String::New("fun1"), notify_fun1);
+    context4->Global()->Set(String::New("fun2"), notify_fun2);
+    context4->Global()->Set(String::New("fun3"), notify_fun3);
+    CompileRun("fun1(); fun2(); fun3(); Object.deliverChangeRecords(observer)");
+  }
+  CHECK_EQ(1, CompileRun("calls")->Int32Value());
+  CHECK_EQ(3, CompileRun("count")->Int32Value());
+}
+
+TEST(EndOfMicrotaskDelivery) {
+  HarmonyIsolate isolate;
+  HandleScope scope;
+  LocalContext context;
+  CompileRun(
+      "var obj = {};"
+      "var count = 0;"
+      "var observer = function(records) { count = records.length };"
+      "Object.observe(obj, observer);"
+      "obj.foo = 'bar';");
+  CHECK_EQ(1, CompileRun("count")->Int32Value());
+}
+
+TEST(DeliveryOrdering) {
+  HarmonyIsolate isolate;
+  HandleScope scope;
+  LocalContext context;
+  CompileRun(
+      "var obj1 = {};"
+      "var obj2 = {};"
+      "var ordering = [];"
+      "function observer2() { ordering.push(2); };"
+      "function observer1() { ordering.push(1); };"
+      "function observer3() { ordering.push(3); };"
+      "Object.observe(obj1, observer1);"
+      "Object.observe(obj1, observer2);"
+      "Object.observe(obj1, observer3);"
+      "obj1.foo = 'bar';");
+  CHECK_EQ(3, CompileRun("ordering.length")->Int32Value());
+  CHECK_EQ(1, CompileRun("ordering[0]")->Int32Value());
+  CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+  CHECK_EQ(3, CompileRun("ordering[2]")->Int32Value());
+  CompileRun(
+      "ordering = [];"
+      "Object.observe(obj2, observer3);"
+      "Object.observe(obj2, observer2);"
+      "Object.observe(obj2, observer1);"
+      "obj2.foo = 'baz'");
+  CHECK_EQ(3, CompileRun("ordering.length")->Int32Value());
+  CHECK_EQ(1, CompileRun("ordering[0]")->Int32Value());
+  CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+  CHECK_EQ(3, CompileRun("ordering[2]")->Int32Value());
+}
+
+TEST(DeliveryOrderingReentrant) {
+  HarmonyIsolate isolate;
+  HandleScope scope;
+  LocalContext context;
+  CompileRun(
+      "var obj = {};"
+      "var reentered = false;"
+      "var ordering = [];"
+      "function observer1() { ordering.push(1); };"
+      "function observer2() {"
+      "  if (!reentered) {"
+      "    obj.foo = 'baz';"
+      "    reentered = true;"
+      "  }"
+      "  ordering.push(2);"
+      "};"
+      "function observer3() { ordering.push(3); };"
+      "Object.observe(obj, observer1);"
+      "Object.observe(obj, observer2);"
+      "Object.observe(obj, observer3);"
+      "obj.foo = 'bar';");
+  CHECK_EQ(5, CompileRun("ordering.length")->Int32Value());
+  CHECK_EQ(1, CompileRun("ordering[0]")->Int32Value());
+  CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+  CHECK_EQ(3, CompileRun("ordering[2]")->Int32Value());
+  // Note that we re-deliver to observers 1 and 2, while observer3
+  // already received the second record during the first round.
+  CHECK_EQ(1, CompileRun("ordering[3]")->Int32Value());
+  CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+}
+
+TEST(ObjectHashTableGrowth) {
+  HarmonyIsolate isolate;
+  HandleScope scope;
+  // Initializing this context sets up initial hash tables.
+  LocalContext context;
+  Handle<Value> obj = CompileRun("obj = {};");
+  Handle<Value> observer = CompileRun(
+      "var ran = false;"
+      "(function() { ran = true })");
+  {
+    // As does initializing this context.
+    LocalContext context2;
+    context2->Global()->Set(String::New("obj"), obj);
+    context2->Global()->Set(String::New("observer"), observer);
+    CompileRun(
+        "var objArr = [];"
+        // 100 objects should be enough to make the hash table grow
+        // (and thus relocate).
+        "for (var i = 0; i < 100; ++i) {"
+        "  objArr.push({});"
+        "  Object.observe(objArr[objArr.length-1], function(){});"
+        "}"
+        "Object.observe(obj, observer);");
+  }
+  // obj is now marked "is_observed", but our map has moved.
+  CompileRun("obj.foo = 'bar'");
+  CHECK(CompileRun("ran")->BooleanValue());
+}
diff --git a/test/mjsunit/error-accessors.js b/test/mjsunit/error-accessors.js
new file mode 100644
index 0000000..9581050
--- /dev/null
+++ b/test/mjsunit/error-accessors.js
@@ -0,0 +1,54 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that the message property of error objects is a data property.
+
+var o;
+
+// message is constructed using the constructor.
+var error1 = new Error("custom message");
+o = {};
+o.__proto__ = error1;
+
+assertEquals("custom message",
+             Object.getOwnPropertyDescriptor(error1, "message").value);
+o.message = "another message";
+assertEquals("another message", o.message);
+assertEquals("custom message", error1.message);
+
+// message is constructed by the runtime.
+var error2;
+try { x.x } catch (e) { error2 = e; }
+o = {};
+o.__proto__ = error2;
+
+assertEquals("x is not defined",
+             Object.getOwnPropertyDescriptor(error2, "message").value);
+o.message = "another message";
+assertEquals("another message", o.message);
+assertEquals("x is not defined", error2.message);
+
diff --git a/test/mjsunit/harmony/collections.js b/test/mjsunit/harmony/collections.js
index f3db7ea..0219f39 100644
--- a/test/mjsunit/harmony/collections.js
+++ b/test/mjsunit/harmony/collections.js
@@ -313,4 +313,60 @@
 // Stress Test
 // There is a proposed stress-test available at the es-discuss mailing list
 // which cannot be reasonably automated.  Check it out by hand if you like:
-// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
\ No newline at end of file
+// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
+
+
+// Set and Map size getters
+var setSizeDescriptor = Object.getOwnPropertyDescriptor(Set.prototype, 'size');
+assertEquals(undefined, setSizeDescriptor.value);
+assertEquals(undefined, setSizeDescriptor.set);
+assertTrue(setSizeDescriptor.get instanceof Function);
+assertEquals(undefined, setSizeDescriptor.get.prototype);
+assertFalse(setSizeDescriptor.enumerable);
+assertTrue(setSizeDescriptor.configurable);
+
+var s = new Set();
+assertFalse(s.hasOwnProperty('size'));
+for (var i = 0; i < 10; i++) {
+  assertEquals(i, s.size);
+  s.add(i);
+}
+for (var i = 9; i >= 0; i--) {
+  s.delete(i);
+  assertEquals(i, s.size);
+}
+
+
+var mapSizeDescriptor = Object.getOwnPropertyDescriptor(Map.prototype, 'size');
+assertEquals(undefined, mapSizeDescriptor.value);
+assertEquals(undefined, mapSizeDescriptor.set);
+assertTrue(mapSizeDescriptor.get instanceof Function);
+assertEquals(undefined, mapSizeDescriptor.get.prototype);
+assertFalse(mapSizeDescriptor.enumerable);
+assertTrue(mapSizeDescriptor.configurable);
+
+var m = new Map();
+assertFalse(m.hasOwnProperty('size'));
+for (var i = 0; i < 10; i++) {
+  assertEquals(i, m.size);
+  m.set(i, i);
+}
+for (var i = 9; i >= 0; i--) {
+  m.delete(i);
+  assertEquals(i, m.size);
+}
+
+// Test clear
+var a = new Set();
+s.add(42);
+assertTrue(s.has(42));
+s.clear();
+assertFalse(s.has(42));
+assertEquals(0, s.size);
+
+var m = new Map();
+m.set(42, true);
+assertTrue(m.has(42));
+m.clear();
+assertFalse(m.has(42));
+assertEquals(0, m.size);
diff --git a/test/mjsunit/harmony/object-observe.js b/test/mjsunit/harmony/object-observe.js
index 07656d3..945841b 100644
--- a/test/mjsunit/harmony/object-observe.js
+++ b/test/mjsunit/harmony/object-observe.js
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --harmony-object-observe
+// Flags: --harmony-observation
 
 var allObservers = [];
 function reset() {
@@ -88,6 +88,7 @@
 Object.defineProperty(changeRecordWithAccessor, 'name', {
   get: function() {
     recordCreated = true;
+    return "bar";
   },
   enumerable: true
 })
@@ -100,26 +101,42 @@
 // Object.unobserve
 assertThrows(function() { Object.unobserve(4, observer.callback); }, TypeError);
 
-// Object.notify
-assertThrows(function() { Object.notify(obj, {}); }, TypeError);
-assertThrows(function() { Object.notify(obj, { type: 4 }); }, TypeError);
-Object.notify(obj, changeRecordWithAccessor);
+// Object.getNotifier
+var notifier = Object.getNotifier(obj);
+assertSame(notifier, Object.getNotifier(obj));
+assertEquals(null, Object.getNotifier(Object.freeze({})));
+assertFalse(notifier.hasOwnProperty('notify'));
+assertEquals([], Object.keys(notifier));
+var notifyDesc = Object.getOwnPropertyDescriptor(notifier.__proto__, 'notify');
+assertTrue(notifyDesc.configurable);
+assertTrue(notifyDesc.writable);
+assertFalse(notifyDesc.enumerable);
+assertThrows(function() { notifier.notify({}); }, TypeError);
+assertThrows(function() { notifier.notify({ type: 4 }); }, TypeError);
+var notify = notifier.notify;
+assertThrows(function() { notify.call(undefined, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call(null, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call(5, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call('hello', { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call(false, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call({}, { type: 'a' }); }, TypeError);
 assertFalse(recordCreated);
+notifier.notify(changeRecordWithAccessor);
+assertFalse(recordCreated);  // not observed yet
 
 // Object.deliverChangeRecords
 assertThrows(function() { Object.deliverChangeRecords(nonFunction); }, TypeError);
 
 // Multiple records are delivered.
 Object.observe(obj, observer.callback);
-Object.notify(obj, {
-  object: obj,
+notifier.notify({
   type: 'updated',
   name: 'foo',
   expando: 1
 });
 
-Object.notify(obj, {
-  object: obj,
+notifier.notify({
+  object: notifier,  // object property is ignored
   type: 'deleted',
   name: 'bar',
   expando2: 'str'
@@ -139,7 +156,7 @@
 reset();
 Object.observe(obj, observer.callback);
 Object.observe(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
 });
 Object.deliverChangeRecords(observer.callback);
@@ -148,7 +165,7 @@
 // Observation can be stopped.
 reset();
 Object.unobserve(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
 });
 Object.deliverChangeRecords(observer.callback);
@@ -158,7 +175,7 @@
 reset();
 Object.unobserve(obj, observer.callback);
 Object.unobserve(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
 });
 Object.deliverChangeRecords(observer.callback);
@@ -166,11 +183,11 @@
 
 // Re-observation works and only includes changeRecords after of call.
 reset();
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
 });
 Object.observe(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
 });
 records = undefined;
@@ -180,31 +197,31 @@
 // Observing a continuous stream of changes, while itermittantly unobserving.
 reset();
 Object.observe(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
   val: 1
 });
 
 Object.unobserve(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
   val: 2
 });
 
 Object.observe(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
   val: 3
 });
 
 Object.unobserve(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
   val: 4
 });
 
 Object.observe(obj, observer.callback);
-Object.notify(obj, {
+Object.getNotifier(obj).notify({
   type: 'foo',
   val: 5
 });
@@ -217,25 +234,358 @@
   { object: obj, type: 'foo', val: 5 }
 ]);
 
-// Observing multiple objects; records appear in order;.
+// Observing multiple objects; records appear in order.
 reset();
 var obj2 = {};
 var obj3 = {}
 Object.observe(obj, observer.callback);
 Object.observe(obj3, observer.callback);
 Object.observe(obj2, observer.callback);
-Object.notify(obj, {
-  type: 'foo',
+Object.getNotifier(obj).notify({
+  type: 'foo1',
 });
-Object.notify(obj2, {
-  type: 'foo',
+Object.getNotifier(obj2).notify({
+  type: 'foo2',
 });
-Object.notify(obj3, {
-  type: 'foo',
+Object.getNotifier(obj3).notify({
+  type: 'foo3',
 });
+Object.observe(obj3, observer.callback);
 Object.deliverChangeRecords(observer.callback);
 observer.assertCallbackRecords([
-  { object: obj, type: 'foo' },
-  { object: obj2, type: 'foo' },
-  { object: obj3, type: 'foo' }
-]);
\ No newline at end of file
+  { object: obj, type: 'foo1' },
+  { object: obj2, type: 'foo2' },
+  { object: obj3, type: 'foo3' }
+]);
+
+// Observing named properties.
+reset();
+var obj = {a: 1}
+Object.observe(obj, observer.callback);
+obj.a = 2;
+obj["a"] = 3;
+delete obj.a;
+obj.a = 4;
+obj.a = 4;  // ignored
+obj.a = 5;
+Object.defineProperty(obj, "a", {value: 6});
+Object.defineProperty(obj, "a", {writable: false});
+obj.a = 7;  // ignored
+Object.defineProperty(obj, "a", {value: 8});
+Object.defineProperty(obj, "a", {value: 7, writable: true});
+Object.defineProperty(obj, "a", {get: function() {}});
+Object.defineProperty(obj, "a", {get: function() {}});
+delete obj.a;
+delete obj.a;
+Object.defineProperty(obj, "a", {get: function() {}, configurable: true});
+Object.defineProperty(obj, "a", {value: 9, writable: true});
+obj.a = 10;
+delete obj.a;
+Object.defineProperty(obj, "a", {value: 11, configurable: true});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: obj, name: "a", type: "updated", oldValue: 1 },
+  { object: obj, name: "a", type: "updated", oldValue: 2 },
+  { object: obj, name: "a", type: "deleted", oldValue: 3 },
+  { object: obj, name: "a", type: "new" },
+  { object: obj, name: "a", type: "updated", oldValue: 4 },
+  { object: obj, name: "a", type: "updated", oldValue: 5 },
+  { object: obj, name: "a", type: "reconfigured", oldValue: 6 },
+  { object: obj, name: "a", type: "updated", oldValue: 6 },
+  { object: obj, name: "a", type: "reconfigured", oldValue: 8 },
+  { object: obj, name: "a", type: "reconfigured", oldValue: 7 },
+  { object: obj, name: "a", type: "reconfigured" },
+  { object: obj, name: "a", type: "deleted" },
+  { object: obj, name: "a", type: "new" },
+  { object: obj, name: "a", type: "reconfigured" },
+  { object: obj, name: "a", type: "updated", oldValue: 9 },
+  { object: obj, name: "a", type: "deleted", oldValue: 10 },
+  { object: obj, name: "a", type: "new" },
+]);
+
+// Observing indexed properties.
+reset();
+var obj = {'1': 1}
+Object.observe(obj, observer.callback);
+obj[1] = 2;
+obj[1] = 3;
+delete obj[1];
+obj[1] = 4;
+obj[1] = 4;  // ignored
+obj[1] = 5;
+Object.defineProperty(obj, "1", {value: 6});
+Object.defineProperty(obj, "1", {writable: false});
+obj[1] = 7;  // ignored
+Object.defineProperty(obj, "1", {value: 8});
+Object.defineProperty(obj, "1", {value: 7, writable: true});
+Object.defineProperty(obj, "1", {get: function() {}});
+delete obj[1];
+delete obj[1];
+Object.defineProperty(obj, "1", {get: function() {}, configurable: true});
+Object.defineProperty(obj, "1", {value: 9, writable: true});
+obj[1] = 10;
+delete obj[1];
+Object.defineProperty(obj, "1", {value: 11, configurable: true});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: obj, name: "1", type: "updated", oldValue: 1 },
+  { object: obj, name: "1", type: "updated", oldValue: 2 },
+  { object: obj, name: "1", type: "deleted", oldValue: 3 },
+  { object: obj, name: "1", type: "new" },
+  { object: obj, name: "1", type: "updated", oldValue: 4 },
+  { object: obj, name: "1", type: "updated", oldValue: 5 },
+  { object: obj, name: "1", type: "reconfigured", oldValue: 6 },
+  { object: obj, name: "1", type: "updated", oldValue: 6 },
+  { object: obj, name: "1", type: "reconfigured", oldValue: 8 },
+  { object: obj, name: "1", type: "reconfigured", oldValue: 7 },
+  // TODO(observe): oldValue should not be present below.
+  { object: obj, name: "1", type: "deleted", oldValue: undefined },
+  { object: obj, name: "1", type: "new" },
+  // TODO(observe): oldValue should be absent below, and type = "reconfigured".
+  { object: obj, name: "1", type: "updated", oldValue: undefined },
+  { object: obj, name: "1", type: "updated", oldValue: 9 },
+  { object: obj, name: "1", type: "deleted", oldValue: 10 },
+  { object: obj, name: "1", type: "new" },
+]);
+
+// Observing array length (including truncation)
+reset();
+var arr = ['a', 'b', 'c', 'd'];
+var arr2 = ['alpha', 'beta'];
+var arr3 = ['hello'];
+arr3[2] = 'goodbye';
+arr3.length = 6;
+// TODO(adamk): Enable this test case when it can run in a reasonable
+// amount of time.
+//var slow_arr = new Array(1000000000);
+//slow_arr[500000000] = 'hello';
+Object.defineProperty(arr, '0', {configurable: false});
+Object.defineProperty(arr, '2', {get: function(){}});
+Object.defineProperty(arr2, '0', {get: function(){}, configurable: false});
+Object.observe(arr, observer.callback);
+Object.observe(arr2, observer.callback);
+Object.observe(arr3, observer.callback);
+arr.length = 2;
+arr.length = 0;
+arr.length = 10;
+arr2.length = 0;
+arr2.length = 1; // no change expected
+arr3.length = 0;
+Object.defineProperty(arr3, 'length', {value: 5});
+Object.defineProperty(arr3, 'length', {value: 10, writable: false});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: arr, name: '3', type: 'deleted', oldValue: 'd' },
+  // TODO(adamk): oldValue should not be present below
+  { object: arr, name: '2', type: 'deleted', oldValue: undefined },
+  { object: arr, name: 'length', type: 'updated', oldValue: 4 },
+  { object: arr, name: '1', type: 'deleted', oldValue: 'b' },
+  { object: arr, name: 'length', type: 'updated', oldValue: 2 },
+  { object: arr, name: 'length', type: 'updated', oldValue: 1 },
+  { object: arr2, name: '1', type: 'deleted', oldValue: 'beta' },
+  { object: arr2, name: 'length', type: 'updated', oldValue: 2 },
+  { object: arr3, name: '2', type: 'deleted', oldValue: 'goodbye' },
+  { object: arr3, name: '0', type: 'deleted', oldValue: 'hello' },
+  { object: arr3, name: 'length', type: 'updated', oldValue: 6 },
+  { object: arr3, name: 'length', type: 'updated', oldValue: 0 },
+  { object: arr3, name: 'length', type: 'updated', oldValue: 5 },
+  // TODO(adamk): This record should be merged with the above
+  { object: arr3, name: 'length', type: 'reconfigured' },
+]);
+
+// Assignments in loops (checking different IC states).
+reset();
+var obj = {};
+Object.observe(obj, observer.callback);
+for (var i = 0; i < 5; i++) {
+  obj["a" + i] = i;
+}
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: obj, name: "a0", type: "new" },
+  { object: obj, name: "a1", type: "new" },
+  { object: obj, name: "a2", type: "new" },
+  { object: obj, name: "a3", type: "new" },
+  { object: obj, name: "a4", type: "new" },
+]);
+
+reset();
+var obj = {};
+Object.observe(obj, observer.callback);
+for (var i = 0; i < 5; i++) {
+  obj[i] = i;
+}
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: obj, name: "0", type: "new" },
+  { object: obj, name: "1", type: "new" },
+  { object: obj, name: "2", type: "new" },
+  { object: obj, name: "3", type: "new" },
+  { object: obj, name: "4", type: "new" },
+]);
+
+// Adding elements past the end of an array should notify on length
+reset();
+var arr = [1, 2, 3];
+Object.observe(arr, observer.callback);
+arr[3] = 10;
+arr[100] = 20;
+Object.defineProperty(arr, '200', {value: 7});
+Object.defineProperty(arr, '400', {get: function(){}});
+arr[50] = 30; // no length change expected
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: arr, name: '3', type: 'new' },
+  { object: arr, name: 'length', type: 'updated', oldValue: 3 },
+  { object: arr, name: '100', type: 'new' },
+  { object: arr, name: 'length', type: 'updated', oldValue: 4 },
+  { object: arr, name: '200', type: 'new' },
+  { object: arr, name: 'length', type: 'updated', oldValue: 101 },
+  { object: arr, name: '400', type: 'new' },
+  { object: arr, name: 'length', type: 'updated', oldValue: 201 },
+  { object: arr, name: '50', type: 'new' },
+]);
+
+// Tests for array methods, first on arrays and then on plain objects
+//
+// === ARRAYS ===
+//
+// Push
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.push(3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '2', type: 'new' },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+  { object: array, name: '3', type: 'new' },
+  { object: array, name: 'length', type: 'updated', oldValue: 3 },
+]);
+
+// Pop
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.pop();
+array.pop();
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '1', type: 'deleted', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+  { object: array, name: '0', type: 'deleted', oldValue: 1 },
+  { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Shift
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.shift();
+array.shift();
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '0', type: 'updated', oldValue: 1 },
+  { object: array, name: '1', type: 'deleted', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+  { object: array, name: '0', type: 'deleted', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Unshift
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.unshift(3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '3', type: 'new' },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+  { object: array, name: '2', type: 'new' },
+  { object: array, name: '0', type: 'updated', oldValue: 1 },
+  { object: array, name: '1', type: 'updated', oldValue: 2 },
+]);
+
+// Splice
+reset();
+var array = [1, 2, 3];
+Object.observe(array, observer.callback);
+array.splice(1, 1, 4, 5);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '3', type: 'new' },
+  { object: array, name: 'length', type: 'updated', oldValue: 3 },
+  { object: array, name: '1', type: 'updated', oldValue: 2 },
+  { object: array, name: '2', type: 'updated', oldValue: 3 },
+]);
+
+//
+// === PLAIN OBJECTS ===
+//
+// Push
+reset()
+var array = {0: 1, 1: 2, length: 2}
+Object.observe(array, observer.callback);
+Array.prototype.push.call(array, 3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '2', type: 'new' },
+  { object: array, name: '3', type: 'new' },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+]);
+
+// Pop
+reset()
+var array = {0: 1, 1: 2, length: 2};
+Object.observe(array, observer.callback);
+Array.prototype.pop.call(array);
+Array.prototype.pop.call(array);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '1', type: 'deleted', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+  { object: array, name: '0', type: 'deleted', oldValue: 1 },
+  { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Shift
+reset()
+var array = {0: 1, 1: 2, length: 2};
+Object.observe(array, observer.callback);
+Array.prototype.shift.call(array);
+Array.prototype.shift.call(array);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '0', type: 'updated', oldValue: 1 },
+  { object: array, name: '1', type: 'deleted', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+  { object: array, name: '0', type: 'deleted', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Unshift
+reset()
+var array = {0: 1, 1: 2, length: 2};
+Object.observe(array, observer.callback);
+Array.prototype.unshift.call(array, 3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '3', type: 'new' },
+  { object: array, name: '2', type: 'new' },
+  { object: array, name: '0', type: 'updated', oldValue: 1 },
+  { object: array, name: '1', type: 'updated', oldValue: 2 },
+  { object: array, name: 'length', type: 'updated', oldValue: 2 },
+]);
+
+// Splice
+reset()
+var array = {0: 1, 1: 2, 2: 3, length: 3};
+Object.observe(array, observer.callback);
+Array.prototype.splice.call(array, 1, 1, 4, 5);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+  { object: array, name: '3', type: 'new' },
+  { object: array, name: '1', type: 'updated', oldValue: 2 },
+  { object: array, name: '2', type: 'updated', oldValue: 3 },
+  { object: array, name: 'length', type: 'updated', oldValue: 3 },
+]);
diff --git a/test/mjsunit/harmony/proxies.js b/test/mjsunit/harmony/proxies.js
index 7170ffd..04fc769 100644
--- a/test/mjsunit/harmony/proxies.js
+++ b/test/mjsunit/harmony/proxies.js
@@ -649,6 +649,11 @@
 
 TestSetForDerived(
   function(k) {
+    // TODO(yangguo): issue 2398 - throwing an error causes formatting of
+    // the message string, which can be observable through this handler.
+    // We ignore keys that occur when formatting the message string.
+    if (k == "toString" || k == "valueOf") return;
+
     key = k;
     switch (k) {
       case "p_writable": return {writable: true, configurable: true}
diff --git a/test/mjsunit/json-recursive.js b/test/mjsunit/json-recursive.js
index e9c15bb..7a8c547 100644
--- a/test/mjsunit/json-recursive.js
+++ b/test/mjsunit/json-recursive.js
@@ -42,19 +42,14 @@
              RangeError);
 
 
-var depth1 = 1500;
-var depth2 = 10000;
+var depth = 10000;
 var deepArray = [];
-for (var i = 0; i < depth1; i++) deepArray = [deepArray];
-JSON.stringify(deepArray);
-for (var i = depth1; i < depth2; i++) deepArray = [deepArray];
+for (var i = 0; i < depth; i++) deepArray = [deepArray];
 assertThrows(function() { JSON.stringify(deepArray); }, RangeError);
 
 
 var deepObject = {};
-for (var i = 0; i < depth1; i++) deepObject = { next: deepObject };
-JSON.stringify(deepObject);
-for (var i = depth1; i < depth2; i++) deepObject = { next: deepObject };
+for (var i = 0; i < depth; i++) deepObject = { next: deepObject };
 assertThrows(function() { JSON.stringify(deepObject); }, RangeError);
 
 
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index 2e1a8f4..0bf378b 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -66,6 +66,10 @@
 json-recursive: PASS, (PASS || FAIL) if $mode == debug
 
 ##############################################################################
+# Skip long running test that times out in debug mode.
+regress/regress-crbug-160010: PASS, SKIP if $mode == debug
+
+##############################################################################
 # This test sets the umask on a per-process basis and hence cannot be
 # used in multi-threaded runs.
 # On android there is no /tmp directory.
diff --git a/test/mjsunit/regress/regress-2398.js b/test/mjsunit/regress/regress-2398.js
new file mode 100644
index 0000000..1c66e7f
--- /dev/null
+++ b/test/mjsunit/regress/regress-2398.js
@@ -0,0 +1,41 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"use strict";
+
+var observed = false;
+
+var object = { get toString() { observed = true; } };
+Object.defineProperty(object, "ro", { value: 1 });
+
+try {
+  object.ro = 2;  // TypeError caused by trying to write to read-only.
+} catch (e) {
+  e.message;  // Forces formatting of the message object.
+}
+
+assertFalse(observed);
diff --git a/test/mjsunit/regress/regress-crbug-157019.js b/test/mjsunit/regress/regress-crbug-157019.js
new file mode 100644
index 0000000..1c54089
--- /dev/null
+++ b/test/mjsunit/regress/regress-crbug-157019.js
@@ -0,0 +1,54 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --nocrankshaft
+
+function makeConstructor() {
+  return function() {
+    this.a = 1;
+    this.b = 2;
+  };
+}
+
+var c1 = makeConstructor();
+var o1 = new c1();
+
+c1.prototype = {};
+
+for (var i = 0; i < 10; i++) {
+  var o = new c1();
+  for (var j = 0; j < 8; j++) {
+    o["x" + j] = 0;
+  }
+}
+
+var c2 = makeConstructor();
+var o2 = new c2();
+
+for (var i = 0; i < 50000; i++) {
+  new c2();
+}
diff --git a/test/mjsunit/regress/regress-crbug-160010.js b/test/mjsunit/regress/regress-crbug-160010.js
new file mode 100644
index 0000000..266e545
--- /dev/null
+++ b/test/mjsunit/regress/regress-crbug-160010.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var str = "a";
+for (var i = 0; i < 28; i++) {
+  str += str;
+}
+JSON.stringify(str);
+
diff --git a/tools/gen-postmortem-metadata.py b/tools/gen-postmortem-metadata.py
index f59cfd3..71f58bf 100644
--- a/tools/gen-postmortem-metadata.py
+++ b/tools/gen-postmortem-metadata.py
@@ -61,7 +61,7 @@
 
     { 'name': 'StringEncodingMask',     'value': 'kStringEncodingMask' },
     { 'name': 'TwoByteStringTag',       'value': 'kTwoByteStringTag' },
-    { 'name': 'AsciiStringTag',         'value': 'kAsciiStringTag' },
+    { 'name': 'AsciiStringTag',         'value': 'kOneByteStringTag' },
 
     { 'name': 'StringRepresentationMask',
         'value': 'kStringRepresentationMask' },
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index c24314f..aad07c7 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -69,6 +69,14 @@
                 ],
               },
               'conditions': [
+                ['OS=="android"', {
+                  'libraries': [
+                    '-llog',
+                  ],
+                  'include_dirs': [
+                    'src/common/android/include',
+                  ],
+                }],
                 ['OS=="mac"', {
                   'xcode_settings': {
                     'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']