Version 3.9.12

Fix the negative lookup stub to handle deleted entries in a dictionary. (issue 1964)

Add a new API where the host can supply a callback function. The callback function can resolve the location of a return address on stack to the location where a return-address rewriting profiler stashed the original return address.

When compiling for-in pass correct context value to the increment instruction. (Chromium issue http://crbug.com/115646)

Update breakpoints set with partial file name after compile. (issue 1853)

git-svn-id: http://v8.googlecode.com/svn/trunk@10852 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index 8922db6..5b852bb 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,20 @@
+2012-02-28: Version 3.9.12
+
+        Fix the negative lookup stub to handle deleted entries in a dictionary.
+        (issue 1964)
+
+        Add a new API where the host can supply a callback function. The
+        callback function can resolve the location of a return address on stack
+        to the location where a return-address rewriting profiler stashed the
+        original return address.
+
+        When compiling for-in pass correct context value to the increment
+        instruction. (Chromium issue http://crbug.com/115646)
+
+        Update breakpoints set with partial file name after compile.
+        (issue 1853)
+
+
 2012-02-27: Version 3.9.11
 
         Make 'module' a context-sensitive keyword (V8 issue 1957).
diff --git a/include/v8-profiler.h b/include/v8-profiler.h
index 5a3a40f..fc3ee78 100644
--- a/include/v8-profiler.h
+++ b/include/v8-profiler.h
@@ -284,14 +284,8 @@
    * the objects that are reachable only from this object. In other
    * words, the size of memory that will be reclaimed having this node
    * collected.
-   *
-   * Exact retained size calculation has O(N) (number of nodes)
-   * computational complexity, while approximate has O(1). It is
-   * assumed that initially heap profiling tools provide approximate
-   * sizes for all nodes, and then exact sizes are calculated for the
-   * most 'interesting' nodes.
    */
-  int GetRetainedSize(bool exact) const;
+  int GetRetainedSize() const;
 
   /** Returns child nodes count of the node. */
   int GetChildrenCount() const;
diff --git a/include/v8.h b/include/v8.h
index a29cd9f..893df12 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -2858,6 +2858,20 @@
 
 
 /**
+ * ReturnAddressLocationResolver is used as a callback function when v8 is
+ * resolving the location of a return address on the stack. Profilers that
+ * change the return address on the stack can use this to resolve the stack
+ * location to whereever the profiler stashed the original return address.
+ * When invoked, return_addr_location will point to a location on stack where
+ * a machine return address resides, this function should return either the
+ * same pointer, or a pointer to the profiler's copy of the original return
+ * address.
+ */
+typedef uintptr_t (*ReturnAddressLocationResolver)(
+    uintptr_t return_addr_location);
+
+
+/**
  * Interface for iterating though all external resources in the heap.
  */
 class V8EXPORT ExternalResourceVisitor {  // NOLINT
@@ -3111,6 +3125,13 @@
   static void SetEntropySource(EntropySource source);
 
   /**
+   * Allows the host application to provide a callback that allows v8 to
+   * cooperate with a profiler that rewrites return addresses on stack.
+   */
+  static void SetReturnAddressLocationResolver(
+      ReturnAddressLocationResolver return_address_resolver);
+
+  /**
    * Adjusts the amount of registered external memory.  Used to give
    * V8 an indication of the amount of externally allocated memory
    * that is kept alive by JavaScript objects.  V8 uses this to decide
diff --git a/src/api.cc b/src/api.cc
index 775c884..4498286 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -4026,6 +4026,12 @@
 }
 
 
+void v8::V8::SetReturnAddressLocationResolver(
+      ReturnAddressLocationResolver return_address_resolver) {
+  i::V8::SetReturnAddressLocationResolver(return_address_resolver);
+}
+
+
 bool v8::V8::Dispose() {
   i::Isolate* isolate = i::Isolate::Current();
   if (!ApiCheck(isolate != NULL && isolate->IsDefaultIsolate(),
@@ -5867,10 +5873,10 @@
 }
 
 
-int HeapGraphNode::GetRetainedSize(bool exact) const {
+int HeapGraphNode::GetRetainedSize() const {
   i::Isolate* isolate = i::Isolate::Current();
   IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
-  return ToInternal(this)->RetainedSize(exact);
+  return ToInternal(this)->retained_size();
 }
 
 
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 993addc..c99e778 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -978,6 +978,11 @@
                         NullCallWrapper(), CALL_AS_METHOD);
     }
 
+    // Store offset of return address for deoptimizer.
+    if (!is_api_function && !count_constructions) {
+      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+    }
+
     // Restore context from the frame.
     // r0: result
     // sp[0]: receiver
@@ -1740,7 +1745,9 @@
   __ bind(&invoke);
   __ Call(r3);
 
+  // Store offset of return address for deoptimizer.
   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
   // Exit frame and return.
   LeaveArgumentsAdaptorFrame(masm);
   __ Jump(lr);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 62e6c80..25563ed 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -6812,7 +6812,7 @@
   // not equal to the name and kProbes-th slot is not used (its name is the
   // undefined value), it guarantees the hash table doesn't contain the
   // property. It's true even if some slots represent deleted properties
-  // (their names are the null value).
+  // (their names are the hole value).
   for (int i = 0; i < kInlinedProbes; i++) {
     // scratch0 points to properties hash.
     // Compute the masked index: (hash + i + i * i) & mask.
@@ -6840,10 +6840,17 @@
     __ b(eq, done);
 
     if (i != kInlinedProbes - 1) {
+      // Load the hole ready for use below:
+      __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
+
       // Stop if found the property.
       __ cmp(entity_name, Operand(Handle<String>(name)));
       __ b(eq, miss);
 
+      Label the_hole;
+      __ cmp(entity_name, tmp);
+      __ b(eq, &the_hole);
+
       // Check if the entry name is not a symbol.
       __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
       __ ldrb(entity_name,
@@ -6851,6 +6858,8 @@
       __ tst(entity_name, Operand(kIsSymbolMask));
       __ b(eq, miss);
 
+      __ bind(&the_hole);
+
       // Restore the properties.
       __ ldr(properties,
              FieldMemOperand(receiver, JSObject::kPropertiesOffset));
diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc
index 76d8954..d9a4d4b 100644
--- a/src/arm/deoptimizer-arm.cc
+++ b/src/arm/deoptimizer-arm.cc
@@ -351,7 +351,6 @@
   }
 
   unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
-  unsigned input_frame_size = input_->GetFrameSize();
   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
 
   // Allocate and store the output frame description.
@@ -373,16 +372,13 @@
   // Compute the incoming parameter translation.
   int parameter_count = height;
   unsigned output_offset = output_frame_size;
-  unsigned input_offset = input_frame_size;
   for (int i = 0; i < parameter_count; ++i) {
     output_offset -= kPointerSize;
     DoTranslateCommand(iterator, frame_index, output_offset);
   }
-  input_offset -= (parameter_count * kPointerSize);
 
   // Read caller's PC from the previous frame.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   output_frame->SetFrameSlot(output_offset, callers_pc);
   if (FLAG_trace_deopt) {
@@ -392,7 +388,6 @@
 
   // Read caller's FP from the previous frame, and set this frame's FP.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t value = output_[frame_index - 1]->GetFp();
   output_frame->SetFrameSlot(output_offset, value);
   intptr_t fp_value = top_address + output_offset;
@@ -404,7 +399,6 @@
 
   // A marker value is used in place of the context.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t context = reinterpret_cast<intptr_t>(
       Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   output_frame->SetFrameSlot(output_offset, context);
@@ -415,7 +409,6 @@
 
   // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   value = reinterpret_cast<intptr_t>(function);
   output_frame->SetFrameSlot(output_offset, value);
   if (FLAG_trace_deopt) {
@@ -425,7 +418,6 @@
 
   // Number of incoming arguments.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
   output_frame->SetFrameSlot(output_offset, value);
   if (FLAG_trace_deopt) {
@@ -445,6 +437,119 @@
 }
 
 
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+                                              int frame_index) {
+  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+  unsigned height = iterator->Next();
+  unsigned height_in_bytes = height * kPointerSize;
+  if (FLAG_trace_deopt) {
+    PrintF("  translating construct stub => height=%d\n", height_in_bytes);
+  }
+
+  unsigned fixed_frame_size = 7 * kPointerSize;
+  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+  // Allocate and store the output frame description.
+  FrameDescription* output_frame =
+      new(output_frame_size) FrameDescription(output_frame_size, function);
+  output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+  // Construct stub can not be topmost or bottommost.
+  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+  ASSERT(output_[frame_index] == NULL);
+  output_[frame_index] = output_frame;
+
+  // The top address of the frame is computed from the previous
+  // frame's top and this frame's size.
+  uint32_t top_address;
+  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+  output_frame->SetTop(top_address);
+
+  // Compute the incoming parameter translation.
+  int parameter_count = height;
+  unsigned output_offset = output_frame_size;
+  for (int i = 0; i < parameter_count; ++i) {
+    output_offset -= kPointerSize;
+    DoTranslateCommand(iterator, frame_index, output_offset);
+  }
+
+  // Read caller's PC from the previous frame.
+  output_offset -= kPointerSize;
+  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+  output_frame->SetFrameSlot(output_offset, callers_pc);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+           top_address + output_offset, output_offset, callers_pc);
+  }
+
+  // Read caller's FP from the previous frame, and set this frame's FP.
+  output_offset -= kPointerSize;
+  intptr_t value = output_[frame_index - 1]->GetFp();
+  output_frame->SetFrameSlot(output_offset, value);
+  intptr_t fp_value = top_address + output_offset;
+  output_frame->SetFp(fp_value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+           fp_value, output_offset, value);
+  }
+
+  // The context can be gotten from the previous frame.
+  output_offset -= kPointerSize;
+  value = output_[frame_index - 1]->GetContext();
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; context\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // A marker value is used in place of the function.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // Number of incoming arguments.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+           top_address + output_offset, output_offset, value, height - 1);
+  }
+
+  // Constructor function being invoked by the stub.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<intptr_t>(function);
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // The newly allocated object was passed as receiver in the artificial
+  // constructor stub environment created by HEnvironment::CopyForInlining().
+  output_offset -= kPointerSize;
+  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  ASSERT(0 == output_offset);
+
+  Builtins* builtins = isolate_->builtins();
+  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+  uint32_t pc = reinterpret_cast<uint32_t>(
+      construct_stub->instruction_start() +
+      isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+  output_frame->SetPc(pc);
+}
+
+
 // This code is very similar to ia32 code, but relies on register names (fp, sp)
 // and how the frame is laid out.
 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
@@ -557,9 +662,8 @@
     value = reinterpret_cast<intptr_t>(function->context());
   }
   output_frame->SetFrameSlot(output_offset, value);
-  if (is_topmost) {
-    output_frame->SetRegister(cp.code(), value);
-  }
+  output_frame->SetContext(value);
+  if (is_topmost) output_frame->SetRegister(cp.code(), value);
   if (FLAG_trace_deopt) {
     PrintF("    0x%08x: [top + %d] <- 0x%08x ; context\n",
            top_address + output_offset, output_offset, value);
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 51e9208..418e0c5 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -2382,6 +2382,7 @@
 
   CallConstructStub stub(flags);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(r0);
 }
 
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index a934aac..4e20404 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -995,10 +995,11 @@
   LEnvironment* outer =
       CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
   int ast_id = hydrogen_env->ast_id();
-  ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+  ASSERT(ast_id != AstNode::kNoNumber ||
+         hydrogen_env->frame_type() != JS_FUNCTION);
   int value_count = hydrogen_env->length();
   LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
-                                          hydrogen_env->is_arguments_adaptor(),
+                                          hydrogen_env->frame_type(),
                                           ast_id,
                                           hydrogen_env->parameter_count(),
                                           argument_count_,
@@ -1020,7 +1021,7 @@
     result->AddValue(op, value->representation());
   }
 
-  if (!hydrogen_env->is_arguments_adaptor()) {
+  if (hydrogen_env->frame_type() == JS_FUNCTION) {
     *argument_index_accumulator = argument_index;
   }
 
@@ -2093,6 +2094,12 @@
 }
 
 
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+  LAllocateObject* result = new LAllocateObject();
+  return AssignPointerMap(DefineAsRegister(result));
+}
+
+
 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
   return MarkAsCall(DefineFixed(new LFastLiteral, r0), instr);
 }
@@ -2245,7 +2252,8 @@
                                                instr->arguments_count(),
                                                instr->function(),
                                                undefined,
-                                               instr->call_kind());
+                                               instr->call_kind(),
+                                               instr->is_construct());
   current_block_->UpdateEnvironment(inner);
   chunk_->AddInlinedClosure(instr->closure());
   return NULL;
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 1846922..81566c4 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -49,6 +49,7 @@
 #define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)    \
   V(AccessArgumentsAt)                          \
   V(AddI)                                       \
+  V(AllocateObject)                             \
   V(ApplyArguments)                             \
   V(ArgumentsElements)                          \
   V(ArgumentsLength)                            \
@@ -1922,6 +1923,13 @@
 };
 
 
+class LAllocateObject: public LTemplateInstruction<1, 0, 0> {
+ public:
+  DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+  DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
 class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
  public:
   DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 8045556..630ae8d 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -479,10 +479,18 @@
 
   WriteTranslation(environment->outer(), translation);
   int closure_id = DefineDeoptimizationLiteral(environment->closure());
-  if (environment->is_arguments_adaptor()) {
-    translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
-  } else {
-    translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+  switch (environment->frame_type()) {
+    case JS_FUNCTION:
+      translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+      break;
+    case JS_CONSTRUCT:
+      translation->BeginConstructStubFrame(closure_id, translation_size);
+      break;
+    case ARGUMENTS_ADAPTOR:
+      translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+      break;
+    default:
+      UNREACHABLE();
   }
   for (int i = 0; i < translation_size; ++i) {
     LOperand* value = environment->values()->at(i);
@@ -619,7 +627,7 @@
     int jsframe_count = 0;
     for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
       ++frame_count;
-      if (!e->is_arguments_adaptor()) {
+      if (e->frame_type() == JS_FUNCTION) {
         ++jsframe_count;
       }
     }
@@ -4322,6 +4330,44 @@
 }
 
 
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+  class DeferredAllocateObject: public LDeferredCode {
+   public:
+    DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+        : LDeferredCode(codegen), instr_(instr) { }
+    virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+    virtual LInstruction* instr() { return instr_; }
+   private:
+    LAllocateObject* instr_;
+  };
+
+  DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+  // TODO(mstarzinger): Implement inlined version instead of jumping to
+  // deferred runtime call.
+  __ jmp(deferred->entry());
+
+  __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+  Register result = ToRegister(instr->result());
+  Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+  // TODO(3095996): Get rid of this. For now, we need to make the
+  // result register contain a valid pointer because it is already
+  // contained in the register pointer map.
+  __ mov(result, Operand(0));
+
+  PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+  __ LoadHeapObject(r0, constructor);
+  __ push(r0);
+  CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+  __ StoreToSafepointRegisterSlot(r0, result);
+}
+
+
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   Heap* heap = isolate()->heap();
   ElementsKind boilerplate_elements_kind =
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 00823e1..e23dc29 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -116,6 +116,7 @@
   void DoDeferredStackCheck(LStackCheck* instr);
   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+  void DoDeferredAllocateObject(LAllocateObject* instr);
   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
                                        Label* map_check);
 
diff --git a/src/ast.cc b/src/ast.cc
index ca3ab78..61923bb 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -596,6 +596,14 @@
 }
 
 
+void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+  is_monomorphic_ = oracle->CallNewIsMonomorphic(this);
+  if (is_monomorphic_) {
+    target_ = oracle->GetCallNewTarget(this);
+  }
+}
+
+
 void CompareOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
   TypeInfo info = oracle->CompareType(this);
   if (info.IsSmi()) {
diff --git a/src/ast.h b/src/ast.h
index 3acd121..74c4c61 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -1528,6 +1528,13 @@
   ZoneList<Expression*>* arguments() const { return arguments_; }
   virtual int position() const { return pos_; }
 
+  void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+  virtual bool IsMonomorphic() { return is_monomorphic_; }
+  Handle<JSFunction> target() { return target_; }
+
+  // Bailout support.
+  int ReturnId() const { return return_id_; }
+
  protected:
   template<class> friend class AstNodeFactory;
 
@@ -1538,12 +1545,19 @@
       : Expression(isolate),
         expression_(expression),
         arguments_(arguments),
-        pos_(pos) { }
+        pos_(pos),
+        is_monomorphic_(false),
+        return_id_(GetNextId(isolate)) { }
 
  private:
   Expression* expression_;
   ZoneList<Expression*>* arguments_;
   int pos_;
+
+  bool is_monomorphic_;
+  Handle<JSFunction> target_;
+
+  int return_id_;
 };
 
 
diff --git a/src/debug-debugger.js b/src/debug-debugger.js
index 91c70a0..802f622 100644
--- a/src/debug-debugger.js
+++ b/src/debug-debugger.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -478,7 +478,8 @@
 function UpdateScriptBreakPoints(script) {
   for (var i = 0; i < script_break_points.length; i++) {
     var break_point = script_break_points[i];
-    if ((break_point.type() == Debug.ScriptBreakPointType.ScriptName) &&
+    if ((break_point.type() == Debug.ScriptBreakPointType.ScriptName ||
+         break_point.type() == Debug.ScriptBreakPointType.ScriptRegExp) &&
         break_point.matchesScript(script)) {
       break_point.set(script);
     }
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index 55ecc71..d069a45 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -170,8 +170,16 @@
       deoptimizer->output_[frame_index - 1]->GetFrameType() ==
       StackFrame::ARGUMENTS_ADAPTOR;
 
-  DeoptimizedFrameInfo* info =
-      new DeoptimizedFrameInfo(deoptimizer, frame_index, has_arguments_adaptor);
+  int construct_offset = has_arguments_adaptor ? 2 : 1;
+  bool has_construct_stub =
+      frame_index >= construct_offset &&
+      deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
+      StackFrame::CONSTRUCT;
+
+  DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
+                                                        frame_index,
+                                                        has_arguments_adaptor,
+                                                        has_construct_stub);
   isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
 
   // Get the "simulated" top and size for the requested frame.
@@ -570,6 +578,9 @@
       case Translation::ARGUMENTS_ADAPTOR_FRAME:
         DoComputeArgumentsAdaptorFrame(&iterator, i);
         break;
+      case Translation::CONSTRUCT_STUB_FRAME:
+        DoComputeConstructStubFrame(&iterator, i);
+        break;
       default:
         UNREACHABLE();
         break;
@@ -686,6 +697,7 @@
     case Translation::BEGIN:
     case Translation::JS_FRAME:
     case Translation::ARGUMENTS_ADAPTOR_FRAME:
+    case Translation::CONSTRUCT_STUB_FRAME:
     case Translation::DUPLICATE:
       UNREACHABLE();
       return;
@@ -873,6 +885,7 @@
     case Translation::BEGIN:
     case Translation::JS_FRAME:
     case Translation::ARGUMENTS_ADAPTOR_FRAME:
+    case Translation::CONSTRUCT_STUB_FRAME:
     case Translation::DUPLICATE:
       UNREACHABLE();  // Malformed input.
        return false;
@@ -1206,7 +1219,8 @@
       function_(function),
       top_(kZapUint32),
       pc_(kZapUint32),
-      fp_(kZapUint32) {
+      fp_(kZapUint32),
+      context_(kZapUint32) {
   // Zap all the registers.
   for (int r = 0; r < Register::kNumRegisters; r++) {
     SetRegister(r, kZapUint32);
@@ -1320,6 +1334,13 @@
 }
 
 
+void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
+  buffer_->Add(CONSTRUCT_STUB_FRAME);
+  buffer_->Add(literal_id);
+  buffer_->Add(height);
+}
+
+
 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
   buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
   buffer_->Add(literal_id);
@@ -1402,6 +1423,7 @@
       return 1;
     case BEGIN:
     case ARGUMENTS_ADAPTOR_FRAME:
+    case CONSTRUCT_STUB_FRAME:
       return 2;
     case JS_FRAME:
       return 3;
@@ -1421,6 +1443,8 @@
       return "JS_FRAME";
     case ARGUMENTS_ADAPTOR_FRAME:
       return "ARGUMENTS_ADAPTOR_FRAME";
+    case CONSTRUCT_STUB_FRAME:
+      return "CONSTRUCT_STUB_FRAME";
     case REGISTER:
       return "REGISTER";
     case INT32_REGISTER:
@@ -1476,6 +1500,7 @@
     case Translation::BEGIN:
     case Translation::JS_FRAME:
     case Translation::ARGUMENTS_ADAPTOR_FRAME:
+    case Translation::CONSTRUCT_STUB_FRAME:
       // Peeled off before getting here.
       break;
 
@@ -1598,10 +1623,13 @@
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 
-DeoptimizedFrameInfo::DeoptimizedFrameInfo(
-    Deoptimizer* deoptimizer, int frame_index, bool has_arguments_adaptor) {
+DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
+                                           int frame_index,
+                                           bool has_arguments_adaptor,
+                                           bool has_construct_stub) {
   FrameDescription* output_frame = deoptimizer->output_[frame_index];
-  SetFunction(output_frame->GetFunction());
+  function_ = output_frame->GetFunction();
+  has_construct_stub_ = has_construct_stub;
   expression_count_ = output_frame->GetExpressionCount();
   expression_stack_ = new Object*[expression_count_];
   // Get the source position using the unoptimized code.
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index 68bc48d..7699222 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -283,6 +283,8 @@
   void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
   void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
                                       int frame_index);
+  void DoComputeConstructStubFrame(TranslationIterator* iterator,
+                                   int frame_index);
   void DoTranslateCommand(TranslationIterator* iterator,
                           int frame_index,
                           unsigned output_offset);
@@ -431,6 +433,9 @@
   intptr_t GetFp() const { return fp_; }
   void SetFp(intptr_t fp) { fp_ = fp; }
 
+  intptr_t GetContext() const { return context_; }
+  void SetContext(intptr_t context) { context_ = context; }
+
   Smi* GetState() const { return state_; }
   void SetState(Smi* state) { state_ = state; }
 
@@ -492,6 +497,7 @@
   intptr_t top_;
   intptr_t pc_;
   intptr_t fp_;
+  intptr_t context_;
   StackFrame::Type type_;
   Smi* state_;
 #ifdef DEBUG
@@ -556,6 +562,7 @@
   enum Opcode {
     BEGIN,
     JS_FRAME,
+    CONSTRUCT_STUB_FRAME,
     ARGUMENTS_ADAPTOR_FRAME,
     REGISTER,
     INT32_REGISTER,
@@ -584,6 +591,7 @@
   // Commands.
   void BeginJSFrame(int node_id, int literal_id, unsigned height);
   void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
+  void BeginConstructStubFrame(int literal_id, unsigned height);
   void StoreRegister(Register reg);
   void StoreInt32Register(Register reg);
   void StoreDoubleRegister(DoubleRegister reg);
@@ -716,7 +724,8 @@
  public:
   DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
                        int frame_index,
-                       bool has_arguments_adaptor);
+                       bool has_arguments_adaptor,
+                       bool has_construct_stub);
   virtual ~DeoptimizedFrameInfo();
 
   // GC support.
@@ -733,6 +742,12 @@
     return function_;
   }
 
+  // Check if this frame is preceded by construct stub frame.  The bottom-most
+  // inlined frame might still be called by an uninlined construct stub.
+  bool HasConstructStub() {
+    return has_construct_stub_;
+  }
+
   // Get an incoming argument.
   Object* GetParameter(int index) {
     ASSERT(0 <= index && index < parameters_count());
@@ -750,11 +765,6 @@
   }
 
  private:
-  // Set the frame function.
-  void SetFunction(JSFunction* function) {
-    function_ = function;
-  }
-
   // Set an incoming argument.
   void SetParameter(int index, Object* obj) {
     ASSERT(0 <= index && index < parameters_count());
@@ -768,6 +778,7 @@
   }
 
   JSFunction* function_;
+  bool has_construct_stub_;
   int parameters_count_;
   int expression_count_;
   Object** parameters_;
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 2968445..71af9ae 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -167,9 +167,10 @@
 DEFINE_bool(trace_osr, false, "trace on-stack replacement")
 DEFINE_int(stress_runs, 0, "number of stress runs")
 DEFINE_bool(optimize_closures, true, "optimize closures")
+DEFINE_bool(inline_construct, false, "inline constructor calls")
 DEFINE_int(loop_weight, 1, "loop weight for representation inference")
 
-DEFINE_bool(optimize_for_in, false,
+DEFINE_bool(optimize_for_in, true,
             "optimize functions containing for-in loops")
 
 // Experimental profiler changes.
diff --git a/src/frames-inl.h b/src/frames-inl.h
index 010233a..27a526c 100644
--- a/src/frames-inl.h
+++ b/src/frames-inl.h
@@ -191,7 +191,7 @@
 inline bool StandardFrame::IsConstructFrame(Address fp) {
   Object* marker =
       Memory::Object_at(fp + StandardFrameConstants::kMarkerOffset);
-  return marker == Smi::FromInt(CONSTRUCT);
+  return marker == Smi::FromInt(StackFrame::CONSTRUCT);
 }
 
 
diff --git a/src/frames.cc b/src/frames.cc
index 40df12c..be537c9 100644
--- a/src/frames.cc
+++ b/src/frames.cc
@@ -41,6 +41,22 @@
 namespace v8 {
 namespace internal {
 
+
+static ReturnAddressLocationResolver return_address_location_resolver = NULL;
+
+
+// Resolves pc_address through the resolution address function if one is set.
+static inline Address* ResolveReturnAddressLocation(Address* pc_address) {
+  if (return_address_location_resolver == NULL) {
+    return pc_address;
+  } else {
+    return reinterpret_cast<Address*>(
+        return_address_location_resolver(
+            reinterpret_cast<uintptr_t>(pc_address)));
+  }
+}
+
+
 // Iterator that supports traversing the stack handlers of a
 // particular frame. Needs to know the top of the handler chain.
 class StackHandlerIterator BASE_EMBEDDED {
@@ -155,8 +171,8 @@
     ASSERT(fp_ != NULL);
     state.fp = fp_;
     state.sp = sp_;
-    state.pc_address =
-        reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_));
+    state.pc_address = ResolveReturnAddressLocation(
+        reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_)));
     type = StackFrame::ComputeType(isolate(), &state);
   }
   if (SingletonFor(type) == NULL) return;
@@ -414,6 +430,13 @@
 }
 
 
+void StackFrame::SetReturnAddressLocationResolver(
+    ReturnAddressLocationResolver resolver) {
+  ASSERT(return_address_location_resolver == NULL);
+  return_address_location_resolver = resolver;
+}
+
+
 StackFrame::Type StackFrame::ComputeType(Isolate* isolate, State* state) {
   ASSERT(state->fp != NULL);
   if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
@@ -488,8 +511,8 @@
   // Set up the caller state.
   state->sp = caller_sp();
   state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
-  state->pc_address
-      = reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset);
+  state->pc_address = ResolveReturnAddressLocation(
+      reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
 }
 
 
@@ -523,7 +546,8 @@
 void ExitFrame::FillState(Address fp, Address sp, State* state) {
   state->sp = sp;
   state->fp = fp;
-  state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize);
+  state->pc_address = ResolveReturnAddressLocation(
+      reinterpret_cast<Address*>(sp - 1 * kPointerSize));
 }
 
 
@@ -558,7 +582,8 @@
 void StandardFrame::ComputeCallerState(State* state) const {
   state->sp = caller_sp();
   state->fp = caller_fp();
-  state->pc_address = reinterpret_cast<Address*>(ComputePCAddress(fp()));
+  state->pc_address = ResolveReturnAddressLocation(
+      reinterpret_cast<Address*>(ComputePCAddress(fp())));
 }
 
 
@@ -818,14 +843,11 @@
 
   // We create the summary in reverse order because the frames
   // in the deoptimization translation are ordered bottom-to-top.
+  bool is_constructor = IsConstructor();
   int i = jsframe_count;
   while (i > 0) {
     opcode = static_cast<Translation::Opcode>(it.Next());
     if (opcode == Translation::JS_FRAME) {
-      // We don't inline constructor calls, so only the first, outermost
-      // frame can be a constructor frame in case of inlining.
-      bool is_constructor = (i == jsframe_count) && IsConstructor();
-
       i--;
       int ast_id = it.Next();
       int function_id = it.Next();
@@ -875,11 +897,18 @@
 
       FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
       frames->Add(summary);
+      is_constructor = false;
+    } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
+      // The next encountered JS_FRAME will be marked as a constructor call.
+      it.Skip(Translation::NumberOfOperandsFor(opcode));
+      ASSERT(!is_constructor);
+      is_constructor = true;
     } else {
       // Skip over operands to advance to the next opcode.
       it.Skip(Translation::NumberOfOperandsFor(opcode));
     }
   }
+  ASSERT(!is_constructor);
 }
 
 
diff --git a/src/frames.h b/src/frames.h
index e550f76..9071555 100644
--- a/src/frames.h
+++ b/src/frames.h
@@ -241,6 +241,11 @@
   virtual void Iterate(ObjectVisitor* v) const = 0;
   static void IteratePc(ObjectVisitor* v, Address* pc_address, Code* holder);
 
+  // Sets a callback function for return-address rewriting profilers
+  // to resolve the location of a return address to the location of the
+  // profiler's stashed return address.
+  static void SetReturnAddressLocationResolver(
+      ReturnAddressLocationResolver resolver);
 
   // Printing support.
   enum PrintMode { OVERVIEW, DETAILS };
diff --git a/src/heap.h b/src/heap.h
index 70c3146..d205b9c 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -150,7 +150,8 @@
   V(Script, empty_script, EmptyScript)                                         \
   V(Smi, real_stack_limit, RealStackLimit)                                     \
   V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames)        \
-  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)
+  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
+  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
 
 #define ROOT_LIST(V)                                  \
   STRONG_ROOT_LIST(V)                                 \
@@ -1567,6 +1568,11 @@
     set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
   }
 
+  void SetConstructStubDeoptPCOffset(int pc_offset) {
+    ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
+    set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+  }
+
  private:
   Heap();
 
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 6cd9998..d09e3a3 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -1923,6 +1923,11 @@
 }
 
 
+HType HAllocateObject::CalculateInferredType() {
+  return HType::JSObject();
+}
+
+
 HType HFastLiteral::CalculateInferredType() {
   // TODO(mstarzinger): Be smarter, could also be JSArray here.
   return HType::JSObject();
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 92645a2..a0df009 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -62,6 +62,7 @@
   V(AbnormalExit)                              \
   V(AccessArgumentsAt)                         \
   V(Add)                                       \
+  V(AllocateObject)                            \
   V(ApplyArguments)                            \
   V(ArgumentsElements)                         \
   V(ArgumentsLength)                           \
@@ -1376,11 +1377,13 @@
   HEnterInlined(Handle<JSFunction> closure,
                 int arguments_count,
                 FunctionLiteral* function,
-                CallKind call_kind)
+                CallKind call_kind,
+                bool is_construct)
       : closure_(closure),
         arguments_count_(arguments_count),
         function_(function),
-        call_kind_(call_kind) {
+        call_kind_(call_kind),
+        is_construct_(is_construct) {
   }
 
   virtual void PrintDataTo(StringStream* stream);
@@ -1389,6 +1392,7 @@
   int arguments_count() const { return arguments_count_; }
   FunctionLiteral* function() const { return function_; }
   CallKind call_kind() const { return call_kind_; }
+  bool is_construct() const { return is_construct_; }
 
   virtual Representation RequiredInputRepresentation(int index) {
     return Representation::None();
@@ -1401,6 +1405,7 @@
   int arguments_count_;
   FunctionLiteral* function_;
   CallKind call_kind_;
+  bool is_construct_;
 };
 
 
@@ -4358,6 +4363,29 @@
 };
 
 
+class HAllocateObject: public HTemplateInstruction<1> {
+ public:
+  HAllocateObject(HValue* context, Handle<JSFunction> constructor)
+      : constructor_(constructor) {
+    SetOperandAt(0, context);
+    set_representation(Representation::Tagged());
+  }
+
+  HValue* context() { return OperandAt(0); }
+  Handle<JSFunction> constructor() { return constructor_; }
+
+  virtual Representation RequiredInputRepresentation(int index) {
+    return Representation::Tagged();
+  }
+  virtual HType CalculateInferredType();
+
+  DECLARE_CONCRETE_INSTRUCTION(AllocateObject)
+
+ private:
+  Handle<JSFunction> constructor_;
+};
+
+
 template <int V>
 class HMaterializedLiteral: public HTemplateInstruction<V> {
  public:
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 4908586..4307a0d 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -600,7 +600,7 @@
 HGraphBuilder::HGraphBuilder(CompilationInfo* info,
                              TypeFeedbackOracle* oracle)
     : function_state_(NULL),
-      initial_function_state_(this, info, oracle, false),
+      initial_function_state_(this, info, oracle, NORMAL_RETURN),
       ast_context_(NULL),
       break_scope_(NULL),
       graph_(NULL),
@@ -2156,12 +2156,12 @@
 FunctionState::FunctionState(HGraphBuilder* owner,
                              CompilationInfo* info,
                              TypeFeedbackOracle* oracle,
-                             bool drop_extra)
+                             ReturnHandlingFlag return_handling)
     : owner_(owner),
       compilation_info_(info),
       oracle_(oracle),
       call_context_(NULL),
-      drop_extra_(drop_extra),
+      return_handling_(return_handling),
       function_return_(NULL),
       test_context_(NULL),
       outer_(owner->function_state()) {
@@ -2204,7 +2204,7 @@
       for_typeof_(false) {
   owner->set_ast_context(this);  // Push.
 #ifdef DEBUG
-  ASSERT(!owner->environment()->is_arguments_adaptor());
+  ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
   original_length_ = owner->environment()->length();
 #endif
 }
@@ -2219,7 +2219,7 @@
   ASSERT(owner()->HasStackOverflow() ||
          owner()->current_block() == NULL ||
          (owner()->environment()->length() == original_length_ &&
-          !owner()->environment()->is_arguments_adaptor()));
+          owner()->environment()->frame_type() == JS_FUNCTION));
 }
 
 
@@ -2227,7 +2227,7 @@
   ASSERT(owner()->HasStackOverflow() ||
          owner()->current_block() == NULL ||
          (owner()->environment()->length() == original_length_ + 1 &&
-          !owner()->environment()->is_arguments_adaptor()));
+          owner()->environment()->frame_type() == JS_FUNCTION));
 }
 
 
@@ -2590,8 +2590,8 @@
 }
 
 
-template <int V>
-HInstruction* HGraphBuilder::PreProcessCall(HCall<V>* call) {
+template <class Instruction>
+HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
   int count = call->argument_count();
   ZoneList<HValue*> arguments(count);
   for (int i = 0; i < count; ++i) {
@@ -2819,7 +2819,38 @@
     CHECK_ALIVE(VisitForValue(stmt->expression()));
     HValue* result = environment()->Pop();
     current_block()->FinishExit(new(zone()) HReturn(result));
-    set_current_block(NULL);
+  } else if (function_state()->is_construct()) {
+    // Return from an inlined construct call.  In a test context the return
+    // value will always evaluate to true, in a value context the return value
+    // needs to be a JSObject.
+    if (context->IsTest()) {
+      TestContext* test = TestContext::cast(context);
+      CHECK_ALIVE(VisitForEffect(stmt->expression()));
+      current_block()->Goto(test->if_true(), function_state()->drop_extra());
+    } else if (context->IsEffect()) {
+      CHECK_ALIVE(VisitForEffect(stmt->expression()));
+      current_block()->Goto(function_return(), function_state()->drop_extra());
+    } else {
+      ASSERT(context->IsValue());
+      CHECK_ALIVE(VisitForValue(stmt->expression()));
+      HValue* return_value = Pop();
+      HValue* receiver = environment()->Lookup(0);
+      HHasInstanceTypeAndBranch* typecheck =
+          new(zone()) HHasInstanceTypeAndBranch(return_value,
+                                                FIRST_SPEC_OBJECT_TYPE,
+                                                LAST_SPEC_OBJECT_TYPE);
+      HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
+      HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
+      typecheck->SetSuccessorAt(0, if_spec_object);
+      typecheck->SetSuccessorAt(1, not_spec_object);
+      current_block()->Finish(typecheck);
+      if_spec_object->AddLeaveInlined(return_value,
+                                      function_return(),
+                                      function_state()->drop_extra());
+      not_spec_object->AddLeaveInlined(receiver,
+                                       function_return(),
+                                       function_state()->drop_extra());
+    }
   } else {
     // Return from an inlined function, visit the subexpression in the
     // expression context of the call.
@@ -2834,13 +2865,13 @@
     } else {
       ASSERT(context->IsValue());
       CHECK_ALIVE(VisitForValue(stmt->expression()));
-      HValue* return_value = environment()->Pop();
+      HValue* return_value = Pop();
       current_block()->AddLeaveInlined(return_value,
                                        function_return(),
                                        function_state()->drop_extra());
     }
-    set_current_block(NULL);
   }
+  set_current_block(NULL);
 }
 
 
@@ -3075,7 +3106,6 @@
     }
   }
 
-
   AddSimulate(osr_entry_id);
   AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
   HContext* context = new(zone()) HContext;
@@ -3256,10 +3286,8 @@
   CHECK_ALIVE(VisitForValue(stmt->enumerable()));
   HValue* enumerable = Top();  // Leave enumerable at the top.
 
-  HValue* context = environment()->LookupContext();
-
   HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
-      context, enumerable));
+      environment()->LookupContext(), enumerable));
   AddSimulate(stmt->PrepareId());
 
   HInstruction* array = AddInstruction(
@@ -3336,9 +3364,11 @@
     set_current_block(body_exit);
 
     HValue* current_index = Pop();
-    PushAndAdd(
-        new(zone()) HAdd(context, current_index, graph()->GetConstant1()));
-
+    HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(),
+                                               current_index,
+                                               graph()->GetConstant1());
+    new_index->AssumeRepresentation(Representation::Integer32());
+    PushAndAdd(new_index);
     body_exit = current_block();
   }
 
@@ -5002,7 +5032,7 @@
         PrintF("Trying to inline the polymorphic call to %s\n",
                *name->ToCString());
       }
-      if (FLAG_polymorphic_inlining && TryInline(expr)) {
+      if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
         // Trying to inline will signal that we should bailout from the
         // entire compilation by setting stack overflow on the visitor.
         if (HasStackOverflow()) return;
@@ -5072,19 +5102,18 @@
 }
 
 
-bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
+bool HGraphBuilder::TryInline(CallKind call_kind,
+                              Handle<JSFunction> target,
+                              ZoneList<Expression*>* arguments,
+                              HValue* receiver,
+                              int ast_id,
+                              int return_id,
+                              ReturnHandlingFlag return_handling) {
   if (!FLAG_use_inlining) return false;
 
-  // The function call we are inlining is a method call if the call
-  // is a property call.
-  CallKind call_kind = (expr->expression()->AsProperty() == NULL)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-
   // Precondition: call is monomorphic and we have found a target with the
   // appropriate arity.
   Handle<JSFunction> caller = info()->closure();
-  Handle<JSFunction> target = expr->target();
   Handle<SharedFunctionInfo> target_shared(target->shared());
 
   // Do a quick check on source code length to avoid parsing large
@@ -5132,7 +5161,7 @@
       TraceInline(target, caller, "inline depth limit reached");
       return false;
     }
-    if (!env->outer()->is_arguments_adaptor()) {
+    if (env->outer()->frame_type() == JS_FUNCTION) {
       current_level++;
     }
     env = env->outer();
@@ -5240,16 +5269,17 @@
       isolate());
   // The function state is new-allocated because we need to delete it
   // in two different places.
-  FunctionState* target_state =
-      new FunctionState(this, &target_info, &target_oracle, drop_extra);
+  FunctionState* target_state = new FunctionState(
+      this, &target_info, &target_oracle, return_handling);
 
   HConstant* undefined = graph()->GetConstantUndefined();
   HEnvironment* inner_env =
       environment()->CopyForInlining(target,
-                                     expr->arguments()->length(),
+                                     arguments->length(),
                                      function,
                                      undefined,
-                                     call_kind);
+                                     call_kind,
+                                     function_state()->is_construct());
 #ifdef V8_TARGET_ARCH_IA32
   // IA32 only, overwrite the caller's context in the deoptimization
   // environment with the correct one.
@@ -5263,12 +5293,13 @@
 #endif
   HBasicBlock* body_entry = CreateBasicBlock(inner_env);
   current_block()->Goto(body_entry);
-  body_entry->SetJoinId(expr->ReturnId());
+  body_entry->SetJoinId(return_id);
   set_current_block(body_entry);
   AddInstruction(new(zone()) HEnterInlined(target,
-                                           expr->arguments()->length(),
+                                           arguments->length(),
                                            function,
-                                           call_kind));
+                                           call_kind,
+                                           function_state()->is_construct()));
   VisitDeclarations(target_info.scope()->declarations());
   VisitStatements(function->body());
   if (HasStackOverflow()) {
@@ -5287,32 +5318,27 @@
   TraceInline(target, caller, NULL);
 
   if (current_block() != NULL) {
-    // Add a return of undefined if control can fall off the body.  In a
-    // test context, undefined is false.
-    if (inlined_test_context() == NULL) {
+    // Add default return value (i.e. undefined for normals calls or the newly
+    // allocated receiver for construct calls) if control can fall off the
+    // body.  In a test context, undefined is false and any JSObject is true.
+    if (call_context()->IsValue()) {
       ASSERT(function_return() != NULL);
-      ASSERT(call_context()->IsEffect() || call_context()->IsValue());
-      if (call_context()->IsEffect()) {
-        current_block()->Goto(function_return(), drop_extra);
-      } else {
-        current_block()->AddLeaveInlined(undefined,
-                                         function_return(),
-                                         drop_extra);
-      }
+      HValue* return_value = function_state()->is_construct()
+          ? receiver
+          : undefined;
+      current_block()->AddLeaveInlined(return_value,
+                                       function_return(),
+                                       function_state()->drop_extra());
+    } else if (call_context()->IsEffect()) {
+      ASSERT(function_return() != NULL);
+      current_block()->Goto(function_return(), function_state()->drop_extra());
     } else {
-      // The graph builder assumes control can reach both branches of a
-      // test, so we materialize the undefined value and test it rather than
-      // simply jumping to the false target.
-      //
-      // TODO(3168478): refactor to avoid this.
       ASSERT(call_context()->IsTest());
-      HBasicBlock* empty_true = graph()->CreateBasicBlock();
-      HBasicBlock* empty_false = graph()->CreateBasicBlock();
-      HBranch* test = new(zone()) HBranch(undefined, empty_true, empty_false);
-      current_block()->Finish(test);
-
-      empty_true->Goto(inlined_test_context()->if_true(), drop_extra);
-      empty_false->Goto(inlined_test_context()->if_false(), drop_extra);
+      ASSERT(inlined_test_context() != NULL);
+      HBasicBlock* target = function_state()->is_construct()
+          ? inlined_test_context()->if_true()
+          : inlined_test_context()->if_false();
+      current_block()->Goto(target, function_state()->drop_extra());
     }
   }
 
@@ -5328,12 +5354,12 @@
 
     // Forward to the real test context.
     if (if_true->HasPredecessor()) {
-      if_true->SetJoinId(expr->id());
+      if_true->SetJoinId(ast_id);
       HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
       if_true->Goto(true_target, function_state()->drop_extra());
     }
     if (if_false->HasPredecessor()) {
-      if_false->SetJoinId(expr->id());
+      if_false->SetJoinId(ast_id);
       HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
       if_false->Goto(false_target, function_state()->drop_extra());
     }
@@ -5341,7 +5367,7 @@
     return true;
 
   } else if (function_return()->HasPredecessor()) {
-    function_return()->SetJoinId(expr->id());
+    function_return()->SetJoinId(ast_id);
     set_current_block(function_return());
   } else {
     set_current_block(NULL);
@@ -5351,6 +5377,34 @@
 }
 
 
+bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
+  // The function call we are inlining is a method call if the call
+  // is a property call.
+  CallKind call_kind = (expr->expression()->AsProperty() == NULL)
+      ? CALL_AS_FUNCTION
+      : CALL_AS_METHOD;
+
+  return TryInline(call_kind,
+                   expr->target(),
+                   expr->arguments(),
+                   NULL,
+                   expr->id(),
+                   expr->ReturnId(),
+                   drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
+}
+
+
+bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) {
+  return TryInline(CALL_AS_FUNCTION,
+                   expr->target(),
+                   expr->arguments(),
+                   receiver,
+                   expr->id(),
+                   expr->ReturnId(),
+                   CONSTRUCT_CALL_RETURN);
+}
+
+
 bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
   if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
   BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
@@ -5680,7 +5734,7 @@
       } else {
         AddCheckConstantFunction(expr, receiver, receiver_map, true);
 
-        if (TryInline(expr)) return;
+        if (TryInlineCall(expr)) return;
         call = PreProcessCall(
             new(zone()) HCallConstantFunction(expr->target(),
                                               argument_count));
@@ -5744,7 +5798,7 @@
           }
           return;
         }
-        if (TryInline(expr)) return;
+        if (TryInlineCall(expr)) return;
         call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
                                                            argument_count));
       } else {
@@ -5780,7 +5834,7 @@
         return;
       }
 
-      if (TryInline(expr, true)) {   // Drop function from environment.
+      if (TryInlineCall(expr, true)) {   // Drop function from environment.
         return;
       } else {
         call = PreProcessCall(new(zone()) HInvokeFunction(context,
@@ -5810,25 +5864,64 @@
 }
 
 
+// Checks whether allocation using the given constructor can be inlined.
+static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
+  return constructor->has_initial_map() &&
+      constructor->initial_map()->instance_type() == JS_OBJECT_TYPE;
+}
+
+
 void HGraphBuilder::VisitCallNew(CallNew* expr) {
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
   ASSERT(current_block()->HasPredecessor());
-  // The constructor function is also used as the receiver argument to the
-  // JS construct call builtin.
-  HValue* constructor = NULL;
-  CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
-  CHECK_ALIVE(VisitArgumentList(expr->arguments()));
-
+  expr->RecordTypeFeedback(oracle());
+  int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
   HValue* context = environment()->LookupContext();
 
-  // The constructor is both an operand to the instruction and an argument
-  // to the construct call.
-  int arg_count = expr->arguments()->length() + 1;  // Plus constructor.
-  HCallNew* call = new(zone()) HCallNew(context, constructor, arg_count);
-  call->set_position(expr->position());
-  Drop(arg_count);
-  return ast_context()->ReturnInstruction(call, expr->id());
+  if (FLAG_inline_construct &&
+      expr->IsMonomorphic() &&
+      IsAllocationInlineable(expr->target())) {
+    // The constructor function is on the stack in the unoptimized code
+    // during evaluation of the arguments.
+    CHECK_ALIVE(VisitForValue(expr->expression()));
+    HValue* function = Top();
+    CHECK_ALIVE(VisitExpressions(expr->arguments()));
+    Handle<JSFunction> constructor = expr->target();
+    AddInstruction(new(zone()) HCheckFunction(function, constructor));
+
+    // Replace the constructor function with a newly allocated receiver.
+    HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
+    // Index of the receiver from the top of the expression stack.
+    const int receiver_index = argument_count - 1;
+    AddInstruction(receiver);
+    ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
+    environment()->SetExpressionStackAt(receiver_index, receiver);
+
+    if (TryInlineConstruct(expr, receiver)) return;
+
+    // TODO(mstarzinger): For now we remove the previous HAllocateObject and
+    // add HPushArgument for the arguments in case inlining failed.  What we
+    // actually should do is emit HInvokeFunction on the constructor instead
+    // of using HCallNew as a fallback.
+    receiver->DeleteAndReplaceWith(NULL);
+    environment()->SetExpressionStackAt(receiver_index, function);
+    HInstruction* call = PreProcessCall(
+        new(zone()) HCallNew(context, function, argument_count));
+    call->set_position(expr->position());
+    return ast_context()->ReturnInstruction(call, expr->id());
+  } else {
+    // The constructor function is both an operand to the instruction and an
+    // argument to the construct call.
+    HValue* constructor = NULL;
+    CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
+    CHECK_ALIVE(VisitArgumentList(expr->arguments()));
+    HInstruction* call =
+        new(zone()) HCallNew(context, constructor, argument_count);
+    Drop(argument_count);
+    call->set_position(expr->position());
+    return ast_context()->ReturnInstruction(call, expr->id());
+  }
 }
 
 
@@ -6953,10 +7046,11 @@
 void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 0);
   if (function_state()->outer() != NULL) {
-    // We are generating graph for inlined function. Currently
-    // constructor inlining is not supported and we can just return
-    // false from %_IsConstructCall().
-    return ast_context()->ReturnValue(graph()->GetConstantFalse());
+    // We are generating graph for inlined function.
+    HValue* value = function_state()->is_construct()
+        ? graph()->GetConstantTrue()
+        : graph()->GetConstantFalse();
+    return ast_context()->ReturnValue(value);
   } else {
     return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch,
                                         call->id());
@@ -7344,14 +7438,14 @@
     : closure_(closure),
       values_(0),
       assigned_variables_(4),
+      frame_type_(JS_FUNCTION),
       parameter_count_(0),
       specials_count_(1),
       local_count_(0),
       outer_(outer),
       pop_count_(0),
       push_count_(0),
-      ast_id_(AstNode::kNoNumber),
-      arguments_adaptor_(false) {
+      ast_id_(AstNode::kNoNumber) {
   Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
 }
 
@@ -7359,31 +7453,32 @@
 HEnvironment::HEnvironment(const HEnvironment* other)
     : values_(0),
       assigned_variables_(0),
+      frame_type_(JS_FUNCTION),
       parameter_count_(0),
       specials_count_(1),
       local_count_(0),
       outer_(NULL),
       pop_count_(0),
       push_count_(0),
-      ast_id_(other->ast_id()),
-      arguments_adaptor_(false) {
+      ast_id_(other->ast_id()) {
   Initialize(other);
 }
 
 
 HEnvironment::HEnvironment(HEnvironment* outer,
                            Handle<JSFunction> closure,
+                           FrameType frame_type,
                            int arguments)
     : closure_(closure),
       values_(arguments),
       assigned_variables_(0),
+      frame_type_(frame_type),
       parameter_count_(arguments),
       local_count_(0),
       outer_(outer),
       pop_count_(0),
       push_count_(0),
-      ast_id_(AstNode::kNoNumber),
-      arguments_adaptor_(true) {
+      ast_id_(AstNode::kNoNumber) {
 }
 
 
@@ -7404,13 +7499,13 @@
   closure_ = other->closure();
   values_.AddAll(other->values_);
   assigned_variables_.AddAll(other->assigned_variables_);
+  frame_type_ = other->frame_type_;
   parameter_count_ = other->parameter_count_;
   local_count_ = other->local_count_;
   if (other->outer_ != NULL) outer_ = other->outer_->Copy();  // Deep copy.
   pop_count_ = other->pop_count_;
   push_count_ = other->push_count_;
   ast_id_ = other->ast_id_;
-  arguments_adaptor_ = other->arguments_adaptor_;
 }
 
 
@@ -7511,13 +7606,28 @@
 }
 
 
+HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
+                                                  Handle<JSFunction> target,
+                                                  FrameType frame_type,
+                                                  int arguments) const {
+  HEnvironment* new_env = new(closure()->GetIsolate()->zone())
+      HEnvironment(outer, target, frame_type, arguments + 1);
+  for (int i = 0; i <= arguments; ++i) {  // Include receiver.
+    new_env->Push(ExpressionStackAt(arguments - i));
+  }
+  new_env->ClearHistory();
+  return new_env;
+}
+
+
 HEnvironment* HEnvironment::CopyForInlining(
     Handle<JSFunction> target,
     int arguments,
     FunctionLiteral* function,
     HConstant* undefined,
-    CallKind call_kind) const {
-  ASSERT(!is_arguments_adaptor());
+    CallKind call_kind,
+    bool is_construct) const {
+  ASSERT(frame_type() == JS_FUNCTION);
 
   Zone* zone = closure()->GetIsolate()->zone();
 
@@ -7528,13 +7638,16 @@
   outer->Drop(arguments + 1);  // Including receiver.
   outer->ClearHistory();
 
+  if (is_construct) {
+    // Create artificial constructor stub environment.  The receiver should
+    // actually be the constructor function, but we pass the newly allocated
+    // object instead, DoComputeConstructStubFrame() relies on that.
+    outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
+  }
+
   if (arity != arguments) {
     // Create artificial arguments adaptation environment.
-    outer = new(zone) HEnvironment(outer, target, arguments + 1);
-    for (int i = 0; i <= arguments; ++i) {  // Include receiver.
-      outer->Push(ExpressionStackAt(arguments - i));
-    }
-    outer->ClearHistory();
+    outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
   }
 
   HEnvironment* inner =
diff --git a/src/hydrogen.h b/src/hydrogen.h
index 65aa346..50c76cb 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -361,19 +361,19 @@
 Zone* HBasicBlock::zone() { return graph_->zone(); }
 
 
+// Type of stack frame an environment might refer to.
+enum FrameType { JS_FUNCTION, JS_CONSTRUCT, ARGUMENTS_ADAPTOR };
+
+
 class HEnvironment: public ZoneObject {
  public:
   HEnvironment(HEnvironment* outer,
                Scope* scope,
                Handle<JSFunction> closure);
 
-  bool is_arguments_adaptor() const {
-    return arguments_adaptor_;
-  }
-
   HEnvironment* DiscardInlined(bool drop_extra) {
-    HEnvironment* outer = outer_->is_arguments_adaptor() ?
-        outer_->outer_ : outer_;
+    HEnvironment* outer = outer_;
+    while (outer->frame_type() != JS_FUNCTION) outer = outer->outer_;
     if (drop_extra) outer->Drop(1);
     return outer;
   }
@@ -384,6 +384,7 @@
   const ZoneList<int>* assigned_variables() const {
     return &assigned_variables_;
   }
+  FrameType frame_type() const { return frame_type_; }
   int parameter_count() const { return parameter_count_; }
   int specials_count() const { return specials_count_; }
   int local_count() const { return local_count_; }
@@ -469,7 +470,8 @@
                                 int arguments,
                                 FunctionLiteral* function,
                                 HConstant* undefined,
-                                CallKind call_kind) const;
+                                CallKind call_kind,
+                                bool is_construct) const;
 
   void AddIncomingEdge(HBasicBlock* block, HEnvironment* other);
 
@@ -490,9 +492,17 @@
  private:
   explicit HEnvironment(const HEnvironment* other);
 
-  // Create an argument adaptor environment.
-  HEnvironment(HEnvironment* outer, Handle<JSFunction> closure, int arguments);
+  HEnvironment(HEnvironment* outer,
+               Handle<JSFunction> closure,
+               FrameType frame_type,
+               int arguments);
 
+  // Create an artificial stub environment (e.g. for argument adaptor or
+  // constructor stub).
+  HEnvironment* CreateStubEnvironment(HEnvironment* outer,
+                                      Handle<JSFunction> target,
+                                      FrameType frame_type,
+                                      int arguments) const;
 
   // True if index is included in the expression stack part of the environment.
   bool HasExpressionAt(int index) const;
@@ -515,6 +525,7 @@
   // Value array [parameters] [specials] [locals] [temporaries].
   ZoneList<HValue*> values_;
   ZoneList<int> assigned_variables_;
+  FrameType frame_type_;
   int parameter_count_;
   int specials_count_;
   int local_count_;
@@ -522,7 +533,6 @@
   int pop_count_;
   int push_count_;
   int ast_id_;
-  bool arguments_adaptor_;
 };
 
 
@@ -650,18 +660,26 @@
 };
 
 
+enum ReturnHandlingFlag {
+  NORMAL_RETURN,
+  DROP_EXTRA_ON_RETURN,
+  CONSTRUCT_CALL_RETURN
+};
+
+
 class FunctionState {
  public:
   FunctionState(HGraphBuilder* owner,
                 CompilationInfo* info,
                 TypeFeedbackOracle* oracle,
-                bool drop_extra);
+                ReturnHandlingFlag return_handling);
   ~FunctionState();
 
   CompilationInfo* compilation_info() { return compilation_info_; }
   TypeFeedbackOracle* oracle() { return oracle_; }
   AstContext* call_context() { return call_context_; }
-  bool drop_extra() { return drop_extra_; }
+  bool drop_extra() { return return_handling_ == DROP_EXTRA_ON_RETURN; }
+  bool is_construct() { return return_handling_ == CONSTRUCT_CALL_RETURN; }
   HBasicBlock* function_return() { return function_return_; }
   TestContext* test_context() { return test_context_; }
   void ClearInlinedTestContext() {
@@ -681,11 +699,13 @@
   // inlined. NULL when not inlining.
   AstContext* call_context_;
 
-  // Indicate if we have to drop an extra value from the environment on
-  // return from inlined functions.
-  bool drop_extra_;
+  // Indicate whether we have to perform special handling on return from
+  // inlined functions.
+  // - DROP_EXTRA_ON_RETURN: Drop an extra value from the environment.
+  // - CONSTRUCT_CALL_RETURN: Either use allocated receiver or return value.
+  ReturnHandlingFlag return_handling_;
 
-  // When inlining in an effect of value context, this is the return block.
+  // When inlining in an effect or value context, this is the return block.
   // It is NULL otherwise.  When inlining in a test context, there are a
   // pair of return blocks in the context.  When not inlining, there is no
   // local return point.
@@ -825,7 +845,6 @@
   CompilationInfo* info() const {
     return function_state()->compilation_info();
   }
-
   AstContext* call_context() const {
     return function_state()->call_context();
   }
@@ -922,7 +941,7 @@
 
   // Remove the arguments from the bailout environment and emit instructions
   // to push them as outgoing parameters.
-  template <int V> HInstruction* PreProcessCall(HCall<V>* call);
+  template <class Instruction> HInstruction* PreProcessCall(Instruction* call);
 
   void TraceRepresentation(Token::Value op,
                            TypeInfo info,
@@ -954,11 +973,20 @@
   // Try to optimize fun.apply(receiver, arguments) pattern.
   bool TryCallApply(Call* expr);
 
-  bool TryInline(Call* expr, bool drop_extra = false);
+  bool TryInline(CallKind call_kind,
+                 Handle<JSFunction> target,
+                 ZoneList<Expression*>* arguments,
+                 HValue* receiver,
+                 int ast_id,
+                 int return_id,
+                 ReturnHandlingFlag return_handling);
+
+  bool TryInlineCall(Call* expr, bool drop_extra = false);
+  bool TryInlineConstruct(CallNew* expr, HValue* receiver);
   bool TryInlineBuiltinMethodCall(Call* expr,
-                                HValue* receiver,
-                                Handle<Map> receiver_map,
-                                CheckType check_type);
+                                  HValue* receiver,
+                                  Handle<Map> receiver_map,
+                                  CheckType check_type);
   bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra);
 
   // If --trace-inlining, print a line of the inlining trace.  Inlining
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index fdf21e5..a5d42cf 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -324,6 +324,11 @@
                         NullCallWrapper(), CALL_AS_METHOD);
     }
 
+    // Store offset of return address for deoptimizer.
+    if (!is_api_function && !count_constructions) {
+      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+    }
+
     // Restore context from the frame.
     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
 
@@ -1639,7 +1644,9 @@
   __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   __ call(edx);
 
+  // Store offset of return address for deoptimizer.
   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
   // Leave frame and return.
   LeaveArgumentsAdaptorFrame(masm);
   __ ret(0);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 2d078cd..864e76c 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -6823,7 +6823,7 @@
   // not equal to the name and kProbes-th slot is not used (its name is the
   // undefined value), it guarantees the hash table doesn't contain the
   // property. It's true even if some slots represent deleted properties
-  // (their names are the null value).
+  // (their names are the hole value).
   for (int i = 0; i < kInlinedProbes; i++) {
     // Compute the masked index: (hash + i + i * i) & mask.
     Register index = r0;
@@ -6849,11 +6849,17 @@
     __ cmp(entity_name, Handle<String>(name));
     __ j(equal, miss);
 
+    Label the_hole;
+    // Check for the hole and skip.
+    __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
+    __ j(equal, &the_hole, Label::kNear);
+
     // Check if the entry name is not a symbol.
     __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
     __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
               kIsSymbolMask);
     __ j(zero, miss);
+    __ bind(&the_hole);
   }
 
   StringDictionaryLookupStub stub(properties,
diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc
index 0c552d7..67291c3 100644
--- a/src/ia32/deoptimizer-ia32.cc
+++ b/src/ia32/deoptimizer-ia32.cc
@@ -467,7 +467,6 @@
   }
 
   unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
-  unsigned input_frame_size = input_->GetFrameSize();
   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
 
   // Allocate and store the output frame description.
@@ -489,16 +488,13 @@
   // Compute the incoming parameter translation.
   int parameter_count = height;
   unsigned output_offset = output_frame_size;
-  unsigned input_offset = input_frame_size;
   for (int i = 0; i < parameter_count; ++i) {
     output_offset -= kPointerSize;
     DoTranslateCommand(iterator, frame_index, output_offset);
   }
-  input_offset -= (parameter_count * kPointerSize);
 
   // Read caller's PC from the previous frame.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   output_frame->SetFrameSlot(output_offset, callers_pc);
   if (FLAG_trace_deopt) {
@@ -508,7 +504,6 @@
 
   // Read caller's FP from the previous frame, and set this frame's FP.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t value = output_[frame_index - 1]->GetFp();
   output_frame->SetFrameSlot(output_offset, value);
   intptr_t fp_value = top_address + output_offset;
@@ -520,7 +515,6 @@
 
   // A marker value is used in place of the context.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t context = reinterpret_cast<intptr_t>(
       Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   output_frame->SetFrameSlot(output_offset, context);
@@ -531,7 +525,6 @@
 
   // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   value = reinterpret_cast<intptr_t>(function);
   output_frame->SetFrameSlot(output_offset, value);
   if (FLAG_trace_deopt) {
@@ -541,7 +534,6 @@
 
   // Number of incoming arguments.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
   output_frame->SetFrameSlot(output_offset, value);
   if (FLAG_trace_deopt) {
@@ -561,6 +553,110 @@
 }
 
 
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+                                              int frame_index) {
+  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+  unsigned height = iterator->Next();
+  unsigned height_in_bytes = height * kPointerSize;
+  if (FLAG_trace_deopt) {
+    PrintF("  translating construct stub => height=%d\n", height_in_bytes);
+  }
+
+  unsigned fixed_frame_size = 6 * kPointerSize;
+  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+  // Allocate and store the output frame description.
+  FrameDescription* output_frame =
+      new(output_frame_size) FrameDescription(output_frame_size, function);
+  output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+  // Construct stub can not be topmost or bottommost.
+  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+  ASSERT(output_[frame_index] == NULL);
+  output_[frame_index] = output_frame;
+
+  // The top address of the frame is computed from the previous
+  // frame's top and this frame's size.
+  uint32_t top_address;
+  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+  output_frame->SetTop(top_address);
+
+  // Compute the incoming parameter translation.
+  int parameter_count = height;
+  unsigned output_offset = output_frame_size;
+  for (int i = 0; i < parameter_count; ++i) {
+    output_offset -= kPointerSize;
+    DoTranslateCommand(iterator, frame_index, output_offset);
+  }
+
+  // Read caller's PC from the previous frame.
+  output_offset -= kPointerSize;
+  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+  output_frame->SetFrameSlot(output_offset, callers_pc);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+           top_address + output_offset, output_offset, callers_pc);
+  }
+
+  // Read caller's FP from the previous frame, and set this frame's FP.
+  output_offset -= kPointerSize;
+  intptr_t value = output_[frame_index - 1]->GetFp();
+  output_frame->SetFrameSlot(output_offset, value);
+  intptr_t fp_value = top_address + output_offset;
+  output_frame->SetFp(fp_value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+           fp_value, output_offset, value);
+  }
+
+  // The context can be gotten from the previous frame.
+  output_offset -= kPointerSize;
+  value = output_[frame_index - 1]->GetContext();
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; context\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // A marker value is used in place of the function.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // Number of incoming arguments.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+           top_address + output_offset, output_offset, value, height - 1);
+  }
+
+  // The newly allocated object was passed as receiver in the artificial
+  // constructor stub environment created by HEnvironment::CopyForInlining().
+  output_offset -= kPointerSize;
+  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  ASSERT(0 == output_offset);
+
+  Builtins* builtins = isolate_->builtins();
+  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+  uint32_t pc = reinterpret_cast<uint32_t>(
+      construct_stub->instruction_start() +
+      isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+  output_frame->SetPc(pc);
+}
+
+
 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
                                    int frame_index) {
   int node_id = iterator->Next();
@@ -672,6 +768,7 @@
     value = reinterpret_cast<uint32_t>(function->context());
   }
   output_frame->SetFrameSlot(output_offset, value);
+  output_frame->SetContext(value);
   if (is_topmost) output_frame->SetRegister(esi.code(), value);
   if (FLAG_trace_deopt) {
     PrintF("    0x%08x: [top + %d] <- 0x%08x ; context\n",
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index ee14625..0b7c7fd 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -2400,6 +2400,7 @@
 
   CallConstructStub stub(flags);
   __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(eax);
 }
 
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index fec3308..d8c3972 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -394,10 +394,18 @@
 
   WriteTranslation(environment->outer(), translation);
   int closure_id = DefineDeoptimizationLiteral(environment->closure());
-  if (environment->is_arguments_adaptor()) {
-    translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
-  } else {
-    translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+  switch (environment->frame_type()) {
+    case JS_FUNCTION:
+      translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+      break;
+    case JS_CONSTRUCT:
+      translation->BeginConstructStubFrame(closure_id, translation_size);
+      break;
+    case ARGUMENTS_ADAPTOR:
+      translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+      break;
+    default:
+      UNREACHABLE();
   }
   for (int i = 0; i < translation_size; ++i) {
     LOperand* value = environment->values()->at(i);
@@ -550,7 +558,7 @@
     int jsframe_count = 0;
     for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
       ++frame_count;
-      if (!e->is_arguments_adaptor()) {
+      if (e->frame_type() == JS_FUNCTION) {
         ++jsframe_count;
       }
     }
@@ -4194,6 +4202,43 @@
 }
 
 
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+  class DeferredAllocateObject: public LDeferredCode {
+   public:
+    DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+        : LDeferredCode(codegen), instr_(instr) { }
+    virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+    virtual LInstruction* instr() { return instr_; }
+   private:
+    LAllocateObject* instr_;
+  };
+
+  DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+  // TODO(mstarzinger): Implement inlined version instead of jumping to
+  // deferred runtime call.
+  __ jmp(deferred->entry());
+
+  __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+  Register result = ToRegister(instr->result());
+  Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+  // TODO(3095996): Get rid of this. For now, we need to make the
+  // result register contain a valid pointer because it is already
+  // contained in the register pointer map.
+  __ Set(result, Immediate(0));
+
+  PushSafepointRegistersScope scope(this);
+  __ PushHeapObject(constructor);
+  CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr, instr->context());
+  __ StoreToSafepointRegisterSlot(result, eax);
+}
+
+
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   ASSERT(ToRegister(instr->context()).is(esi));
   Heap* heap = isolate()->heap();
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index d86d48c..baf2036 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -107,6 +107,7 @@
   void DoDeferredStackCheck(LStackCheck* instr);
   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+  void DoDeferredAllocateObject(LAllocateObject* instr);
   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
                                        Label* map_check);
 
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 32704b0..16dc261 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -1005,11 +1005,12 @@
   LEnvironment* outer =
       CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
   int ast_id = hydrogen_env->ast_id();
-  ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+  ASSERT(ast_id != AstNode::kNoNumber ||
+         hydrogen_env->frame_type() != JS_FUNCTION);
   int value_count = hydrogen_env->length();
   LEnvironment* result =
       new(zone()) LEnvironment(hydrogen_env->closure(),
-                               hydrogen_env->is_arguments_adaptor(),
+                               hydrogen_env->frame_type(),
                                ast_id,
                                hydrogen_env->parameter_count(),
                                argument_count_,
@@ -1031,7 +1032,7 @@
     result->AddValue(op, value->representation());
   }
 
-  if (!hydrogen_env->is_arguments_adaptor()) {
+  if (hydrogen_env->frame_type() == JS_FUNCTION) {
     *argument_index_accumulator = argument_index;
   }
 
@@ -2213,6 +2214,13 @@
 }
 
 
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+  LOperand* context = UseFixed(instr->context(), esi);
+  LAllocateObject* result = new(zone()) LAllocateObject(context);
+  return AssignPointerMap(DefineAsRegister(result));
+}
+
+
 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
   LOperand* context = UseFixed(instr->context(), esi);
   return MarkAsCall(
@@ -2385,7 +2393,8 @@
                                                instr->arguments_count(),
                                                instr->function(),
                                                undefined,
-                                               instr->call_kind());
+                                               instr->call_kind(),
+                                               instr->is_construct());
   current_block_->UpdateEnvironment(inner);
   chunk_->AddInlinedClosure(instr->closure());
   return NULL;
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index b879fc1..647ba6a 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -43,6 +43,7 @@
 #define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)    \
   V(AccessArgumentsAt)                          \
   V(AddI)                                       \
+  V(AllocateObject)                             \
   V(ApplyArguments)                             \
   V(ArgumentsElements)                          \
   V(ArgumentsLength)                            \
@@ -1995,6 +1996,19 @@
 };
 
 
+class LAllocateObject: public LTemplateInstruction<1, 1, 0> {
+ public:
+  explicit LAllocateObject(LOperand* context) {
+    inputs_[0] = context;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+  DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+
+  LOperand* context() { return inputs_[0]; }
+};
+
+
 class LFastLiteral: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LFastLiteral(LOperand* context) {
diff --git a/src/lithium.h b/src/lithium.h
index 474e555..ec72695 100644
--- a/src/lithium.h
+++ b/src/lithium.h
@@ -438,14 +438,14 @@
 class LEnvironment: public ZoneObject {
  public:
   LEnvironment(Handle<JSFunction> closure,
-               bool is_arguments_adaptor,
+               FrameType frame_type,
                int ast_id,
                int parameter_count,
                int argument_count,
                int value_count,
                LEnvironment* outer)
       : closure_(closure),
-        is_arguments_adaptor_(is_arguments_adaptor),
+        frame_type_(frame_type),
         arguments_stack_height_(argument_count),
         deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
         translation_index_(-1),
@@ -459,6 +459,7 @@
         outer_(outer) { }
 
   Handle<JSFunction> closure() const { return closure_; }
+  FrameType frame_type() const { return frame_type_; }
   int arguments_stack_height() const { return arguments_stack_height_; }
   int deoptimization_index() const { return deoptimization_index_; }
   int translation_index() const { return translation_index_; }
@@ -503,11 +504,9 @@
 
   void PrintTo(StringStream* stream);
 
-  bool is_arguments_adaptor() const { return is_arguments_adaptor_; }
-
  private:
   Handle<JSFunction> closure_;
-  bool is_arguments_adaptor_;
+  FrameType frame_type_;
   int arguments_stack_height_;
   int deoptimization_index_;
   int translation_index_;
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index de6fb95..6f79225 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -7070,7 +7070,7 @@
   // not equal to the name and kProbes-th slot is not used (its name is the
   // undefined value), it guarantees the hash table doesn't contain the
   // property. It's true even if some slots represent deleted properties
-  // (their names are the null value).
+  // (their names are the hole value).
   for (int i = 0; i < kInlinedProbes; i++) {
     // scratch0 points to properties hash.
     // Compute the masked index: (hash + i + i * i) & mask.
@@ -7099,9 +7099,15 @@
     __ Branch(done, eq, entity_name, Operand(tmp));
 
     if (i != kInlinedProbes - 1) {
+      // Load the hole ready for use below:
+      __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
+
       // Stop if found the property.
       __ Branch(miss, eq, entity_name, Operand(Handle<String>(name)));
 
+      Label the_hole;
+      __ Branch(&the_hole, eq, entity_name, Operand(tmp));
+
       // Check if the entry name is not a symbol.
       __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
       __ lbu(entity_name,
@@ -7109,6 +7115,8 @@
       __ And(scratch0, entity_name, Operand(kIsSymbolMask));
       __ Branch(miss, eq, scratch0, Operand(zero_reg));
 
+      __ bind(&the_hole);
+
       // Restore the properties.
       __ lw(properties,
             FieldMemOperand(receiver, JSObject::kPropertiesOffset));
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index c8239e3..d6e7a44 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -120,7 +120,7 @@
 
 
 int FullCodeGenerator::self_optimization_header_size() {
-  return 0;  // TODO(jkummerow): determine correct value.
+  return 11 * Instruction::kInstrSize;
 }
 
 
@@ -164,7 +164,7 @@
       Handle<Code> compile_stub(
           isolate()->builtins()->builtin(Builtins::kLazyRecompile));
       __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq, a3, Operand(zero_reg));
-      ASSERT(masm_->pc_offset() == self_optimization_header_size());
+      ASSERT_EQ(masm_->pc_offset(), self_optimization_header_size());
     }
   }
 
@@ -2444,6 +2444,7 @@
 
   CallConstructStub stub(flags);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(v0);
 }
 
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index 252e8f4..f600904 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -4222,6 +4222,44 @@
 }
 
 
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+  class DeferredAllocateObject: public LDeferredCode {
+   public:
+    DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+        : LDeferredCode(codegen), instr_(instr) { }
+    virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+    virtual LInstruction* instr() { return instr_; }
+   private:
+    LAllocateObject* instr_;
+  };
+
+  DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+  // TODO(mstarzinger): Implement inlined version instead of jumping to
+  // deferred runtime call.
+  __ jmp(deferred->entry());
+
+  __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+  Register result = ToRegister(instr->result());
+  Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+  // TODO(3095996): Get rid of this. For now, we need to make the
+  // result register contain a valid pointer because it is already
+  // contained in the register pointer map.
+  __ mov(result, zero_reg);
+
+  PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+  __ LoadHeapObject(a0, constructor);
+  __ push(a0);
+  CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+  __ StoreToSafepointRegisterSlot(v0, result);
+}
+
+
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   Heap* heap = isolate()->heap();
   ElementsKind boilerplate_elements_kind =
diff --git a/src/mips/lithium-codegen-mips.h b/src/mips/lithium-codegen-mips.h
index 513992c..9e5b983 100644
--- a/src/mips/lithium-codegen-mips.h
+++ b/src/mips/lithium-codegen-mips.h
@@ -112,6 +112,7 @@
   void DoDeferredStackCheck(LStackCheck* instr);
   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+  void DoDeferredAllocateObject(LAllocateObject* instr);
   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
                                        Label* map_check);
 
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index c534abc..0d78845 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -2098,6 +2098,12 @@
 }
 
 
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+  LAllocateObject* result = new LAllocateObject();
+  return AssignPointerMap(DefineAsRegister(result));
+}
+
+
 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
   return MarkAsCall(DefineFixed(new LFastLiteral, v0), instr);
 }
diff --git a/src/mips/lithium-mips.h b/src/mips/lithium-mips.h
index f4c3c21..f41689e 100644
--- a/src/mips/lithium-mips.h
+++ b/src/mips/lithium-mips.h
@@ -49,6 +49,7 @@
 #define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)    \
   V(AccessArgumentsAt)                          \
   V(AddI)                                       \
+  V(AllocateObject)                             \
   V(ApplyArguments)                             \
   V(ArgumentsElements)                          \
   V(ArgumentsLength)                            \
@@ -1917,6 +1918,13 @@
 };
 
 
+class LAllocateObject: public LTemplateInstruction<1, 0, 0> {
+ public:
+  DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+  DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
 class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
  public:
   DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
diff --git a/src/objects-inl.h b/src/objects-inl.h
index bc6217b..176be9b 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -1988,7 +1988,8 @@
 
 
 bool DescriptorArray::IsProperty(int descriptor_number) {
-  return IsRealProperty(GetType(descriptor_number));
+  Entry entry(this, descriptor_number);
+  return IsPropertyDescriptor(&entry);
 }
 
 
diff --git a/src/objects.cc b/src/objects.cc
index 8941151..538986d 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -8257,9 +8257,15 @@
           break;
         }
 
-        case Translation::ARGUMENTS_ADAPTOR_FRAME: {
+        case Translation::ARGUMENTS_ADAPTOR_FRAME:
+        case Translation::CONSTRUCT_STUB_FRAME: {
+          int function_id = iterator.Next();
+          JSFunction* function =
+              JSFunction::cast(LiteralArray()->get(function_id));
           unsigned height = iterator.Next();
-          PrintF(out, "{arguments adaptor, height=%d}", height);
+          PrintF(out, "{function=");
+          function->PrintName(out);
+          PrintF(out, ", height=%u}", height);
           break;
         }
 
diff --git a/src/objects.h b/src/objects.h
index be75fab..4e02d6f 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -2589,6 +2589,20 @@
   static const int kMaxNumberOfDescriptors = 1024 + 512;
 
  private:
+  // An entry in a DescriptorArray, represented as an (array, index) pair.
+  class Entry {
+   public:
+    inline explicit Entry(DescriptorArray* descs, int index) :
+        descs_(descs), index_(index) { }
+
+    inline PropertyType type() { return descs_->GetType(index_); }
+    inline Object* GetCallbackObject() { return descs_->GetValue(index_); }
+
+   private:
+    DescriptorArray* descs_;
+    int index_;
+  };
+
   // Conversion from descriptor number to array indices.
   static int ToKeyIndex(int descriptor_number) {
     return descriptor_number+kFirstIndex;
@@ -7919,6 +7933,10 @@
     }
   }
 
+  bool ContainsAccessor() {
+    return IsJSAccessor(getter()) || IsJSAccessor(setter());
+  }
+
 #ifdef OBJECT_PRINT
   void AccessorPairPrint(FILE* out = stdout);
 #endif
@@ -7931,6 +7949,15 @@
   static const int kSize = kSetterOffset + kPointerSize;
 
  private:
+  // Strangely enough, in addition to functions and harmony proxies, the spec
+  // requires us to consider undefined as a kind of accessor, too:
+  //    var obj = {};
+  //    Object.defineProperty(obj, "foo", {get: undefined});
+  //    assertTrue("foo" in obj);
+  bool IsJSAccessor(Object* obj) {
+    return obj->IsSpecFunction() || obj->IsUndefined();
+  }
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(AccessorPair);
 };
 
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index 156fbc7..dd2f701 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -971,7 +971,7 @@
                      int retainers_count) {
   snapshot_ = snapshot;
   type_ = type;
-  painted_ = kUnpainted;
+  painted_ = false;
   name_ = name;
   self_size_ = self_size;
   retained_size_ = 0;
@@ -1013,56 +1013,15 @@
 }
 
 
-int HeapEntry::RetainedSize(bool exact) {
-  if (exact && (retained_size_ & kExactRetainedSizeTag) == 0) {
-    CalculateExactRetainedSize();
-  }
-  return retained_size_ & (~kExactRetainedSizeTag);
-}
-
-
 Handle<HeapObject> HeapEntry::GetHeapObject() {
   return snapshot_->collection()->FindHeapObjectById(id());
 }
 
 
-template<class Visitor>
-void HeapEntry::ApplyAndPaintAllReachable(Visitor* visitor) {
-  List<HeapEntry*> list(10);
-  list.Add(this);
-  this->paint_reachable();
-  visitor->Apply(this);
-  while (!list.is_empty()) {
-    HeapEntry* entry = list.RemoveLast();
-    Vector<HeapGraphEdge> children = entry->children();
-    for (int i = 0; i < children.length(); ++i) {
-      if (children[i].type() == HeapGraphEdge::kShortcut) continue;
-      HeapEntry* child = children[i].to();
-      if (!child->painted_reachable()) {
-        list.Add(child);
-        child->paint_reachable();
-        visitor->Apply(child);
-      }
-    }
-  }
-}
-
-
-class NullClass {
- public:
-  void Apply(HeapEntry* entry) { }
-};
-
-void HeapEntry::PaintAllReachable() {
-  NullClass null;
-  ApplyAndPaintAllReachable(&null);
-}
-
-
 void HeapEntry::Print(
     const char* prefix, const char* edge_name, int max_depth, int indent) {
   OS::Print("%6d %7d @%6llu %*c %s%s: ",
-            self_size(), RetainedSize(false), id(),
+            self_size(), retained_size(), id(),
             indent, ' ', prefix, edge_name);
   if (type() != kString) {
     OS::Print("%s %.40s\n", TypeAsString(), name_);
@@ -1146,60 +1105,6 @@
 }
 
 
-class RetainedSizeCalculator {
- public:
-  RetainedSizeCalculator()
-      : retained_size_(0) {
-  }
-
-  int retained_size() const { return retained_size_; }
-
-  void Apply(HeapEntry** entry_ptr) {
-    if ((*entry_ptr)->painted_reachable()) {
-      retained_size_ += (*entry_ptr)->self_size();
-    }
-  }
-
- private:
-  int retained_size_;
-};
-
-
-void HeapEntry::CalculateExactRetainedSize() {
-  // To calculate retained size, first we paint all reachable nodes in
-  // one color, then we paint (or re-paint) all nodes reachable from
-  // other nodes with a different color. Then we sum up self sizes of
-  // nodes painted with the first color.
-  snapshot()->ClearPaint();
-  PaintAllReachable();
-
-  List<HeapEntry*> list(10);
-  HeapEntry* root = snapshot()->root();
-  if (this != root) {
-    list.Add(root);
-    root->paint_reachable_from_others();
-  }
-  while (!list.is_empty()) {
-    HeapEntry* curr = list.RemoveLast();
-    Vector<HeapGraphEdge> children = curr->children();
-    for (int i = 0; i < children.length(); ++i) {
-      if (children[i].type() == HeapGraphEdge::kShortcut) continue;
-      HeapEntry* child = children[i].to();
-      if (child != this && child->not_painted_reachable_from_others()) {
-        list.Add(child);
-        child->paint_reachable_from_others();
-      }
-    }
-  }
-
-  RetainedSizeCalculator ret_size_calc;
-  snapshot()->IterateEntries(&ret_size_calc);
-  retained_size_ = ret_size_calc.retained_size();
-  ASSERT((retained_size_ & kExactRetainedSizeTag) == 0);
-  retained_size_ |= kExactRetainedSizeTag;
-}
-
-
 // It is very important to keep objects that form a heap snapshot
 // as small as possible.
 namespace {  // Avoid littering the global namespace.
@@ -3189,7 +3094,7 @@
   if (!FillReferences()) return false;
 
   if (!SetEntriesDominators()) return false;
-  if (!ApproximateRetainedSizes()) return false;
+  if (!CalculateRetainedSizes()) return false;
 
   progress_counter_ = progress_total_;
   if (!ProgressReport(true)) return false;
@@ -3247,7 +3152,7 @@
   int current_entry = 0;
   List<HeapEntry*> nodes_to_visit;
   nodes_to_visit.Add(snapshot_->root());
-  snapshot_->root()->paint_reachable();
+  snapshot_->root()->paint();
   while (!nodes_to_visit.is_empty()) {
     HeapEntry* entry = nodes_to_visit.last();
     Vector<HeapGraphEdge> children = entry->children();
@@ -3255,9 +3160,9 @@
     for (int i = 0; i < children.length(); ++i) {
       if (children[i].type() == HeapGraphEdge::kShortcut) continue;
       HeapEntry* child = children[i].to();
-      if (!child->painted_reachable()) {
+      if (!child->painted()) {
         nodes_to_visit.Add(child);
-        child->paint_reachable();
+        child->paint();
         has_new_edges = true;
       }
     }
@@ -3293,15 +3198,13 @@
   for (int i = 0; i < root_index; ++i) (*dominators)[i] = kNoDominator;
   (*dominators)[root_index] = root_index;
 
-  // The affected array is used to mark those entries that may
-  // be affected because of dominators change among their retainers.
-  ScopedVector<bool> affected(entries_length);
-  for (int i = 0; i < entries_length; ++i) affected[i] = false;
+  // The painted flag is used to mark entries that need to be recalculated
+  // because of dominators change among their retainers.
+  for (int i = 0; i < entries_length; ++i) entries[i]->clear_paint();
+
+  // Mark the root direct children as affected.
   Vector<HeapGraphEdge> children = entries[root_index]->children();
-  for (int i = 0; i < children.length(); ++i) {
-    // Mark the root direct children as affected.
-    affected[children[i].to()->ordered_index()] = true;
-  }
+  for (int i = 0; i < children.length(); ++i) children[i].to()->paint();
 
   bool changed = true;
   while (changed) {
@@ -3310,10 +3213,11 @@
       // If dominator of the entry has already been set to root,
       // then it can't propagate any further.
       if ((*dominators)[i] == root_index) continue;
-      if (!affected[i]) continue;
-      affected[i] = false;
+      HeapEntry* entry = entries[i];
+      if (!entry->painted()) continue;
+      entry->clear_paint();
       int new_idom_index = kNoDominator;
-      Vector<HeapGraphEdge*> rets = entries[i]->retainers();
+      Vector<HeapGraphEdge*> rets = entry->retainers();
       for (int j = 0; j < rets.length(); ++j) {
         if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
         int ret_index = rets[j]->From()->ordered_index();
@@ -3331,9 +3235,7 @@
         (*dominators)[i] = new_idom_index;
         changed = true;
         Vector<HeapGraphEdge> children = entries[i]->children();
-        for (int j = 0; j < children.length(); ++j) {
-          affected[children[j].to()->ordered_index()] = true;
-        }
+        for (int j = 0; j < children.length(); ++j) children[j].to()->paint();
       }
     }
   }
@@ -3355,7 +3257,7 @@
 }
 
 
-bool HeapSnapshotGenerator::ApproximateRetainedSizes() {
+bool HeapSnapshotGenerator::CalculateRetainedSizes() {
   // As for the dominators tree we only know parent nodes, not
   // children, to sum up total sizes we "bubble" node's self size
   // adding it to all of its parents.
@@ -3607,7 +3509,7 @@
       GetStringId(entry->name()),
       entry->id(),
       entry->self_size(),
-      entry->RetainedSize(false),
+      entry->retained_size(),
       GetNodeId(entry->dominator()),
       children.length());
   USE(result);
diff --git a/src/profile-generator.h b/src/profile-generator.h
index 13c6b2d..22a0473 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -558,22 +558,9 @@
     ASSERT(entry != NULL);
     dominator_ = entry;
   }
-
-  void clear_paint() { painted_ = kUnpainted; }
-  bool painted_reachable() { return painted_ == kPainted; }
-  void paint_reachable() {
-    ASSERT(painted_ == kUnpainted);
-    painted_ = kPainted;
-  }
-  bool not_painted_reachable_from_others() {
-    return painted_ != kPaintedReachableFromOthers;
-  }
-  void paint_reachable_from_others() {
-    painted_ = kPaintedReachableFromOthers;
-  }
-  template<class Visitor>
-  void ApplyAndPaintAllReachable(Visitor* visitor);
-  void PaintAllReachable();
+  void clear_paint() { painted_ = false; }
+  bool painted() { return painted_; }
+  void paint() { painted_ = true; }
 
   void SetIndexedReference(HeapGraphEdge::Type type,
                            int child_index,
@@ -588,7 +575,6 @@
   void SetUnidirElementReference(int child_index, int index, HeapEntry* entry);
 
   int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); }
-  int RetainedSize(bool exact);
 
   void Print(
       const char* prefix, const char* edge_name, int max_depth, int indent);
@@ -606,12 +592,11 @@
   HeapGraphEdge** retainers_arr() {
     return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_);
   }
-  void CalculateExactRetainedSize();
   const char* TypeAsString();
 
-  unsigned painted_: 2;
+  unsigned painted_: 1;
   unsigned type_: 4;
-  int children_count_: 26;
+  int children_count_: 27;
   int retainers_count_;
   int self_size_;
   union {
@@ -626,13 +611,6 @@
   } id_;  // This is to avoid extra padding of 64-bit value.
   const char* name_;
 
-  // Paints used for exact retained sizes calculation.
-  static const unsigned kUnpainted = 0;
-  static const unsigned kPainted = 1;
-  static const unsigned kPaintedReachableFromOthers = 2;
-
-  static const int kExactRetainedSizeTag = 1;
-
   DISALLOW_COPY_AND_ASSIGN(HeapEntry);
 };
 
@@ -1099,9 +1077,9 @@
   bool GenerateSnapshot();
 
  private:
-  bool ApproximateRetainedSizes();
   bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
                           Vector<int>* dominators);
+  bool CalculateRetainedSizes();
   bool CountEntriesAndReferences();
   bool FillReferences();
   void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
diff --git a/src/property-details.h b/src/property-details.h
index 81f521a..c79aa96 100644
--- a/src/property-details.h
+++ b/src/property-details.h
@@ -73,26 +73,6 @@
 };
 
 
-inline bool IsRealProperty(PropertyType type) {
-  switch (type) {
-    case NORMAL:
-    case FIELD:
-    case CONSTANT_FUNCTION:
-    case CALLBACKS:
-    case HANDLER:
-    case INTERCEPTOR:
-      return true;
-    case MAP_TRANSITION:
-    case ELEMENTS_TRANSITION:
-    case CONSTANT_TRANSITION:
-    case NULL_DESCRIPTOR:
-      return false;
-  }
-  UNREACHABLE();  // keep the compiler happy
-  return false;
-}
-
-
 // PropertyDetails captures type and attributes for a property.
 // They are used both in property dictionaries and instance descriptors.
 class PropertyDetails BASE_EMBEDDED {
diff --git a/src/property.h b/src/property.h
index d5efb7f..9235c32 100644
--- a/src/property.h
+++ b/src/property.h
@@ -164,6 +164,35 @@
 };
 
 
+template <class T>
+bool IsPropertyDescriptor(T* desc) {
+  switch (desc->type()) {
+    case NORMAL:
+    case FIELD:
+    case CONSTANT_FUNCTION:
+    case HANDLER:
+    case INTERCEPTOR:
+      return true;
+    case CALLBACKS: {
+      Object* callback_object = desc->GetCallbackObject();
+      // Non-JavaScript (i.e. native) accessors are always a property, otherwise
+      // either the getter or the setter must be an accessor. Put another way:
+      // If we only see map transitions and holes in a pair, this is not a
+      // property.
+      return (!callback_object->IsAccessorPair() ||
+              AccessorPair::cast(callback_object)->ContainsAccessor());
+    }
+    case MAP_TRANSITION:
+    case ELEMENTS_TRANSITION:
+    case CONSTANT_TRANSITION:
+    case NULL_DESCRIPTOR:
+      return false;
+  }
+  UNREACHABLE();  // keep the compiler happy
+  return false;
+}
+
+
 class LookupResult BASE_EMBEDDED {
  public:
   explicit LookupResult(Isolate* isolate)
@@ -261,10 +290,9 @@
   bool IsFound() { return lookup_type_ != NOT_FOUND; }
   bool IsHandler() { return lookup_type_ == HANDLER_TYPE; }
 
-  // Is the result is a property excluding transitions and the null
-  // descriptor?
+  // Is the result is a property excluding transitions and the null descriptor?
   bool IsProperty() {
-    return IsFound() && IsRealProperty(GetPropertyDetails().type());
+    return IsFound() && IsPropertyDescriptor(this);
   }
 
   bool IsCacheable() { return cacheable_; }
diff --git a/src/runtime.cc b/src/runtime.cc
index b377e6e..fdbe1f5 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -882,14 +882,6 @@
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_IsConstructCall) {
-  NoHandleAllocation ha;
-  ASSERT(args.length() == 0);
-  JavaScriptFrameIterator it(isolate);
-  return isolate->heap()->ToBoolean(it.frame()->IsConstructor());
-}
-
-
 // Recursively traverses hidden prototypes if property is not found
 static void GetOwnPropertyImplementation(JSObject* obj,
                                          String* name,
@@ -10640,6 +10632,7 @@
           frame, inlined_jsframe_index, isolate);
     }
     has_adapted_arguments_ = frame_->has_adapted_arguments();
+    is_bottommost_ = inlined_jsframe_index == 0;
     is_optimized_ = frame_->is_optimized();
   }
 
@@ -10677,6 +10670,11 @@
         ? deoptimized_frame_->GetSourcePosition()
         : frame_->LookupCode()->SourcePosition(frame_->pc());
   }
+  bool IsConstructor() {
+    return is_optimized_ && !is_bottommost_
+        ? deoptimized_frame_->HasConstructStub()
+        : frame_->IsConstructor();
+  }
 
   // To inspect all the provided arguments the frame might need to be
   // replaced with the arguments frame.
@@ -10692,6 +10690,7 @@
   DeoptimizedFrameInfo* deoptimized_frame_;
   Isolate* isolate_;
   bool is_optimized_;
+  bool is_bottommost_;
   bool has_adapted_arguments_;
 
   DISALLOW_COPY_AND_ASSIGN(FrameInspector);
@@ -10721,6 +10720,16 @@
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_IsConstructCall) {
+  NoHandleAllocation ha;
+  ASSERT(args.length() == 0);
+  JavaScriptFrameIterator it(isolate);
+  JavaScriptFrame* frame = it.frame();
+  FrameInspector frame_inspector(frame, frame->GetInlineCount() - 1, isolate);
+  return isolate->heap()->ToBoolean(frame_inspector.IsConstructor());
+}
+
+
 // Return an array with frame details
 // args[0]: number: break id
 // args[1]: number: frame index
@@ -10785,9 +10794,8 @@
   // Find source position in unoptimized code.
   int position = frame_inspector.GetSourcePosition();
 
-  // Check for constructor frame. Inlined frames cannot be construct calls.
-  bool inlined_frame = is_optimized && inlined_jsframe_index != 0;
-  bool constructor = !inlined_frame && it.frame()->IsConstructor();
+  // Check for constructor frame.
+  bool constructor = frame_inspector.IsConstructor();
 
   // Get scope info and read from it for local variable information.
   Handle<JSFunction> function(JSFunction::cast(frame_inspector.GetFunction()));
diff --git a/src/type-info.cc b/src/type-info.cc
index 2fb4e2a..81cbff3 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -256,6 +256,11 @@
 }
 
 
+Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(CallNew* expr) {
+  return Handle<JSFunction>::cast(GetInfo(expr->id()));
+}
+
+
 bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
   return *GetInfo(expr->id()) ==
       isolate_->builtins()->builtin(id);
diff --git a/src/type-info.h b/src/type-info.h
index 9b8b431..19a309b 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -267,6 +267,7 @@
   Handle<JSObject> GetPrototypeForPrimitiveCheck(CheckType check);
 
   Handle<JSFunction> GetCallTarget(Call* expr);
+  Handle<JSFunction> GetCallNewTarget(CallNew* expr);
 
   bool LoadIsBuiltin(Property* expr, Builtins::Name id);
 
diff --git a/src/v8.cc b/src/v8.cc
index e4b37b1..003c75c 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -146,6 +146,12 @@
 }
 
 
+void V8::SetReturnAddressLocationResolver(
+      ReturnAddressLocationResolver resolver) {
+  StackFrame::SetReturnAddressLocationResolver(resolver);
+}
+
+
 // Used by JavaScript APIs
 uint32_t V8::Random(Context* context) {
   ASSERT(context->IsGlobalContext());
diff --git a/src/v8.h b/src/v8.h
index adfdb3e..699c5a0 100644
--- a/src/v8.h
+++ b/src/v8.h
@@ -95,6 +95,9 @@
   // Allows an entropy source to be provided for use in random number
   // generation.
   static void SetEntropySource(EntropySource source);
+  // Support for return-address rewriting profilers.
+  static void SetReturnAddressLocationResolver(
+      ReturnAddressLocationResolver resolver);
   // Random number generation support. Not cryptographically safe.
   static uint32_t Random(Context* context);
   // We use random numbers internally in memory allocation and in the
diff --git a/src/version.cc b/src/version.cc
index f0af68d..fbd6d93 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     9
-#define BUILD_NUMBER      11
+#define BUILD_NUMBER      12
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 2ea68b3..abf9b56 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -329,6 +329,14 @@
                         NullCallWrapper(), CALL_AS_METHOD);
     }
 
+    // Store offset of return address for deoptimizer.
+    // TODO(849): Once Generate_StringConstructCode doesn't reuse this
+    // generator, we can drop the third condition below!
+    if (!is_api_function && !count_constructions &&
+        masm->isolate()->heap()->construct_stub_deopt_pc_offset() == 0) {
+      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+    }
+
     // Restore context from the frame.
     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
 
@@ -1538,7 +1546,9 @@
   __ bind(&invoke);
   __ call(rdx);
 
+  // Store offset of return address for deoptimizer.
   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
   // Leave frame and return.
   LeaveArgumentsAdaptorFrame(masm);
   __ ret(0);
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 61404fa..5587609 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -5753,7 +5753,7 @@
   // not equal to the name and kProbes-th slot is not used (its name is the
   // undefined value), it guarantees the hash table doesn't contain the
   // property. It's true even if some slots represent deleted properties
-  // (their names are the null value).
+  // (their names are the hole value).
   for (int i = 0; i < kInlinedProbes; i++) {
     // r0 points to properties hash.
     // Compute the masked index: (hash + i + i * i) & mask.
@@ -5782,11 +5782,18 @@
     __ Cmp(entity_name, Handle<String>(name));
     __ j(equal, miss);
 
+    Label the_hole;
+    // Check for the hole and skip.
+    __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
+    __ j(equal, &the_hole, Label::kNear);
+
     // Check if the entry name is not a symbol.
     __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
     __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
              Immediate(kIsSymbolMask));
     __ j(zero, miss);
+
+    __ bind(&the_hole);
   }
 
   StringDictionaryLookupStub stub(properties,
diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc
index efa9888..2adf587 100644
--- a/src/x64/deoptimizer-x64.cc
+++ b/src/x64/deoptimizer-x64.cc
@@ -347,7 +347,6 @@
   }
 
   unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
-  unsigned input_frame_size = input_->GetFrameSize();
   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
 
   // Allocate and store the output frame description.
@@ -369,16 +368,13 @@
   // Compute the incoming parameter translation.
   int parameter_count = height;
   unsigned output_offset = output_frame_size;
-  unsigned input_offset = input_frame_size;
   for (int i = 0; i < parameter_count; ++i) {
     output_offset -= kPointerSize;
     DoTranslateCommand(iterator, frame_index, output_offset);
   }
-  input_offset -= (parameter_count * kPointerSize);
 
   // Read caller's PC from the previous frame.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   output_frame->SetFrameSlot(output_offset, callers_pc);
   if (FLAG_trace_deopt) {
@@ -389,7 +385,6 @@
 
   // Read caller's FP from the previous frame, and set this frame's FP.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t value = output_[frame_index - 1]->GetFp();
   output_frame->SetFrameSlot(output_offset, value);
   intptr_t fp_value = top_address + output_offset;
@@ -402,7 +397,6 @@
 
   // A marker value is used in place of the context.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   intptr_t context = reinterpret_cast<intptr_t>(
       Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   output_frame->SetFrameSlot(output_offset, context);
@@ -414,7 +408,6 @@
 
   // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   value = reinterpret_cast<intptr_t>(function);
   output_frame->SetFrameSlot(output_offset, value);
   if (FLAG_trace_deopt) {
@@ -425,7 +418,6 @@
 
   // Number of incoming arguments.
   output_offset -= kPointerSize;
-  input_offset -= kPointerSize;
   value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
   output_frame->SetFrameSlot(output_offset, value);
   if (FLAG_trace_deopt) {
@@ -446,6 +438,116 @@
 }
 
 
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+                                              int frame_index) {
+  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+  unsigned height = iterator->Next();
+  unsigned height_in_bytes = height * kPointerSize;
+  if (FLAG_trace_deopt) {
+    PrintF("  translating construct stub => height=%d\n", height_in_bytes);
+  }
+
+  unsigned fixed_frame_size = 6 * kPointerSize;
+  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+  // Allocate and store the output frame description.
+  FrameDescription* output_frame =
+      new(output_frame_size) FrameDescription(output_frame_size, function);
+  output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+  // Construct stub can not be topmost or bottommost.
+  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+  ASSERT(output_[frame_index] == NULL);
+  output_[frame_index] = output_frame;
+
+  // The top address of the frame is computed from the previous
+  // frame's top and this frame's size.
+  intptr_t top_address;
+  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+  output_frame->SetTop(top_address);
+
+  // Compute the incoming parameter translation.
+  int parameter_count = height;
+  unsigned output_offset = output_frame_size;
+  for (int i = 0; i < parameter_count; ++i) {
+    output_offset -= kPointerSize;
+    DoTranslateCommand(iterator, frame_index, output_offset);
+  }
+
+  // Read caller's PC from the previous frame.
+  output_offset -= kPointerSize;
+  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+  output_frame->SetFrameSlot(output_offset, callers_pc);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+           V8PRIxPTR " ; caller's pc\n",
+           top_address + output_offset, output_offset, callers_pc);
+  }
+
+  // Read caller's FP from the previous frame, and set this frame's FP.
+  output_offset -= kPointerSize;
+  intptr_t value = output_[frame_index - 1]->GetFp();
+  output_frame->SetFrameSlot(output_offset, value);
+  intptr_t fp_value = top_address + output_offset;
+  output_frame->SetFp(fp_value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+           V8PRIxPTR " ; caller's fp\n",
+           fp_value, output_offset, value);
+  }
+
+  // The context can be gotten from the previous frame.
+  output_offset -= kPointerSize;
+  value = output_[frame_index - 1]->GetContext();
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+           V8PRIxPTR " ; context\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // A marker value is used in place of the function.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+           V8PRIxPTR " ; function (construct sentinel)\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  // Number of incoming arguments.
+  output_offset -= kPointerSize;
+  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+           V8PRIxPTR " ; argc (%d)\n",
+           top_address + output_offset, output_offset, value, height - 1);
+  }
+
+  // The newly allocated object was passed as receiver in the artificial
+  // constructor stub environment created by HEnvironment::CopyForInlining().
+  output_offset -= kPointerSize;
+  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+  output_frame->SetFrameSlot(output_offset, value);
+  if (FLAG_trace_deopt) {
+    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+           V8PRIxPTR " ; allocated receiver\n",
+           top_address + output_offset, output_offset, value);
+  }
+
+  ASSERT(0 == output_offset);
+
+  Builtins* builtins = isolate_->builtins();
+  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+  intptr_t pc = reinterpret_cast<intptr_t>(
+      construct_stub->instruction_start() +
+      isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+  output_frame->SetPc(pc);
+}
+
+
 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
                                    int frame_index) {
   int node_id = iterator->Next();
@@ -555,6 +657,7 @@
     value = reinterpret_cast<intptr_t>(function->context());
   }
   output_frame->SetFrameSlot(output_offset, value);
+  output_frame->SetContext(value);
   if (is_topmost) output_frame->SetRegister(rsi.code(), value);
   if (FLAG_trace_deopt) {
     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 9c03053..a9bf9a8 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -2280,6 +2280,7 @@
 
   CallConstructStub stub(flags);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(rax);
 }
 
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 2151cf4..cf2c1b0 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -368,10 +368,18 @@
 
   WriteTranslation(environment->outer(), translation);
   int closure_id = DefineDeoptimizationLiteral(environment->closure());
-  if (environment->is_arguments_adaptor()) {
-    translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
-  } else {
-    translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+  switch (environment->frame_type()) {
+    case JS_FUNCTION:
+      translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+      break;
+    case JS_CONSTRUCT:
+      translation->BeginConstructStubFrame(closure_id, translation_size);
+      break;
+    case ARGUMENTS_ADAPTOR:
+      translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+      break;
+    default:
+      UNREACHABLE();
   }
   for (int i = 0; i < translation_size; ++i) {
     LOperand* value = environment->values()->at(i);
@@ -511,7 +519,7 @@
     int jsframe_count = 0;
     for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
       ++frame_count;
-      if (!e->is_arguments_adaptor()) {
+      if (e->frame_type() == JS_FUNCTION) {
         ++jsframe_count;
       }
     }
@@ -3925,6 +3933,43 @@
 }
 
 
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+  class DeferredAllocateObject: public LDeferredCode {
+   public:
+    DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+        : LDeferredCode(codegen), instr_(instr) { }
+    virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+    virtual LInstruction* instr() { return instr_; }
+   private:
+    LAllocateObject* instr_;
+  };
+
+  DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+  // TODO(mstarzinger): Implement inlined version instead of jumping to
+  // deferred runtime call.
+  __ jmp(deferred->entry());
+
+  __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+  Register result = ToRegister(instr->result());
+  Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+  // TODO(3095996): Get rid of this. For now, we need to make the
+  // result register contain a valid pointer because it is already
+  // contained in the register pointer map.
+  __ Set(result, 0);
+
+  PushSafepointRegistersScope scope(this);
+  __ PushHeapObject(constructor);
+  CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+  __ StoreToSafepointRegisterSlot(result, rax);
+}
+
+
 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   Heap* heap = isolate()->heap();
   ElementsKind boilerplate_elements_kind =
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index 2890c53..1a9275b 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -99,6 +99,7 @@
   void DoDeferredStackCheck(LStackCheck* instr);
   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+  void DoDeferredAllocateObject(LAllocateObject* instr);
   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
                                        Label* map_check);
 
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index d373e19..65d57e0 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -990,10 +990,11 @@
   LEnvironment* outer =
       CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
   int ast_id = hydrogen_env->ast_id();
-  ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+  ASSERT(ast_id != AstNode::kNoNumber ||
+         hydrogen_env->frame_type() != JS_FUNCTION);
   int value_count = hydrogen_env->length();
   LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
-                                          hydrogen_env->is_arguments_adaptor(),
+                                          hydrogen_env->frame_type(),
                                           ast_id,
                                           hydrogen_env->parameter_count(),
                                           argument_count_,
@@ -1015,7 +1016,7 @@
     result->AddValue(op, value->representation());
   }
 
-  if (!hydrogen_env->is_arguments_adaptor()) {
+  if (hydrogen_env->frame_type() == JS_FUNCTION) {
     *argument_index_accumulator = argument_index;
   }
 
@@ -2094,6 +2095,12 @@
 }
 
 
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+  LAllocateObject* result = new LAllocateObject();
+  return AssignPointerMap(DefineAsRegister(result));
+}
+
+
 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
   return MarkAsCall(DefineFixed(new LFastLiteral, rax), instr);
 }
@@ -2246,7 +2253,8 @@
                                                instr->arguments_count(),
                                                instr->function(),
                                                undefined,
-                                               instr->call_kind());
+                                               instr->call_kind(),
+                                               instr->is_construct());
   current_block_->UpdateEnvironment(inner);
   chunk_->AddInlinedClosure(instr->closure());
   return NULL;
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index b91cbc4..b8a8c8a 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -49,6 +49,7 @@
 #define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)    \
   V(AccessArgumentsAt)                          \
   V(AddI)                                       \
+  V(AllocateObject)                             \
   V(ApplyArguments)                             \
   V(ArgumentsElements)                          \
   V(ArgumentsLength)                            \
@@ -1910,6 +1911,13 @@
 };
 
 
+class LAllocateObject: public LTemplateInstruction<1, 0, 0> {
+ public:
+  DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+  DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
 class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
  public:
   DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc
index a6f04b3..f707850 100644
--- a/test/cctest/test-heap-profiler.cc
+++ b/test/cctest/test-heap-profiler.cc
@@ -18,14 +18,30 @@
       : has_A2(false), has_B2(false), has_C2(false) {
   }
 
-  void Apply(i::HeapEntry** entry_ptr) {
-    if (IsReachableNodeWithName(*entry_ptr, "A2")) has_A2 = true;
-    if (IsReachableNodeWithName(*entry_ptr, "B2")) has_B2 = true;
-    if (IsReachableNodeWithName(*entry_ptr, "C2")) has_C2 = true;
+  void CheckEntry(i::HeapEntry* entry) {
+    if (strcmp(entry->name(), "A2") == 0) has_A2 = true;
+    if (strcmp(entry->name(), "B2") == 0) has_B2 = true;
+    if (strcmp(entry->name(), "C2") == 0) has_C2 = true;
   }
 
-  static bool IsReachableNodeWithName(i::HeapEntry* entry, const char* name) {
-    return strcmp(name, entry->name()) == 0 && entry->painted_reachable();
+  void CheckAllReachables(i::HeapEntry* root) {
+    i::List<i::HeapEntry*> list(10);
+    list.Add(root);
+    root->paint();
+    CheckEntry(root);
+    while (!list.is_empty()) {
+      i::HeapEntry* entry = list.RemoveLast();
+      i::Vector<i::HeapGraphEdge> children = entry->children();
+      for (int i = 0; i < children.length(); ++i) {
+        if (children[i].type() == i::HeapGraphEdge::kShortcut) continue;
+        i::HeapEntry* child = children[i].to();
+        if (!child->painted()) {
+          list.Add(child);
+          child->paint();
+          CheckEntry(child);
+        }
+      }
+    }
   }
 
   bool has_A2;
@@ -90,10 +106,6 @@
       const_cast<i::HeapSnapshot*>(
           reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2));
   const v8::HeapGraphNode* global_env2 = GetGlobalObject(snapshot_env2);
-  // Paint all nodes reachable from global object.
-  i_snapshot_env2->ClearPaint();
-  const_cast<i::HeapEntry*>(
-      reinterpret_cast<const i::HeapEntry*>(global_env2))->PaintAllReachable();
 
   // Verify, that JS global object of env2 has '..2' properties.
   const v8::HeapGraphNode* a2_node =
@@ -105,8 +117,11 @@
       NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "b2_2"));
   CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "c2"));
 
+  // Paint all nodes reachable from global object.
   NamedEntriesDetector det;
-  i_snapshot_env2->IterateEntries(&det);
+  i_snapshot_env2->ClearPaint();
+  det.CheckAllReachables(const_cast<i::HeapEntry*>(
+      reinterpret_cast<const i::HeapEntry*>(global_env2)));
   CHECK(det.has_A2);
   CHECK(det.has_B2);
   CHECK(det.has_C2);
@@ -136,14 +151,10 @@
       GetProperty(x, v8::HeapGraphEdge::kProperty, "b");
   CHECK_NE(NULL, x2);
 
-  // Test approximate sizes.
-  CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize(false));
-  CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize(false));
-  CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize(false));
-  // Test exact sizes.
-  CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize(true));
-  CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize(true));
-  CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize(true));
+  // Test sizes.
+  CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize());
+  CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize());
+  CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize());
 }
 
 
diff --git a/test/mjsunit/compiler/inline-construct.js b/test/mjsunit/compiler/inline-construct.js
new file mode 100644
index 0000000..16e19a7
--- /dev/null
+++ b/test/mjsunit/compiler/inline-construct.js
@@ -0,0 +1,170 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --inline-construct
+
+// Test inlining of constructor calls.
+
+function TestInlinedConstructor(closure) {
+  var result;
+  var counter = { value:0 };
+  result = closure(11, 12, counter);
+  assertEquals(23, result);
+  assertEquals(1, counter.value);
+  result = closure(23, 19, counter);
+  assertEquals(42, result);
+  assertEquals(2, counter.value);
+  %OptimizeFunctionOnNextCall(closure);
+  result = closure(1, 42, counter)
+  assertEquals(43, result);
+  assertEquals(3, counter.value);
+  result = closure("foo", "bar", counter)
+  assertEquals("foobar", result)
+  assertEquals(4, counter.value);
+}
+
+
+// Test constructor returning nothing in all contexts.
+function c1(a, b, counter) {
+  this.x = a + b;
+  counter.value++;
+}
+function c1_value_context(a, b, counter) {
+  var obj = new c1(a, b, counter);
+  return obj.x;
+}
+function c1_test_context(a, b, counter) {
+  if (!new c1(a, b, counter)) {
+    assertUnreachable("should not happen");
+  }
+  return a + b;
+}
+function c1_effect_context(a, b, counter) {
+  new c1(a, b, counter);
+  return a + b;
+}
+TestInlinedConstructor(c1_value_context);
+TestInlinedConstructor(c1_test_context);
+TestInlinedConstructor(c1_effect_context);
+
+
+// Test constructor returning an object in all contexts.
+function c2(a, b, counter) {
+  var obj = new Object();
+  obj.x = a + b;
+  counter.value++;
+  return obj;
+}
+function c2_value_context(a, b, counter) {
+  var obj = new c2(a, b, counter);
+  return obj.x;
+}
+function c2_test_context(a, b, counter) {
+  if (!new c2(a, b, counter)) {
+    assertUnreachable("should not happen");
+  }
+  return a + b;
+}
+function c2_effect_context(a, b, counter) {
+  new c2(a, b, counter);
+  return a + b;
+}
+TestInlinedConstructor(c2_value_context);
+TestInlinedConstructor(c2_test_context);
+TestInlinedConstructor(c2_effect_context);
+
+
+// Test constructor returning a primitive value in all contexts.
+function c3(a, b, counter) {
+  this.x = a + b;
+  counter.value++;
+  return "not an object";
+}
+function c3_value_context(a, b, counter) {
+  var obj = new c3(a, b, counter);
+  return obj.x;
+}
+function c3_test_context(a, b, counter) {
+  if (!new c3(a, b, counter)) {
+    assertUnreachable("should not happen");
+  }
+  return a + b;
+}
+function c3_effect_context(a, b, counter) {
+  new c3(a, b, counter);
+  return a + b;
+}
+TestInlinedConstructor(c3_value_context);
+TestInlinedConstructor(c3_test_context);
+TestInlinedConstructor(c3_effect_context);
+
+
+// Test constructor called with too many arguments.
+function c_too_many(a, b) {
+  this.x = a + b;
+}
+function f_too_many(a, b, c) {
+  var obj = new c_too_many(a, b, c);
+  return obj.x;
+}
+assertEquals(23, f_too_many(11, 12, 1));
+assertEquals(42, f_too_many(23, 19, 1));
+%OptimizeFunctionOnNextCall(f_too_many);
+assertEquals(43, f_too_many(1, 42, 1));
+assertEquals("foobar", f_too_many("foo", "bar", "baz"))
+
+
+// Test constructor called with too few arguments.
+function c_too_few(a, b) {
+  assertSame(undefined, b);
+  this.x = a + 1;
+}
+function f_too_few(a) {
+  var obj = new c_too_few(a);
+  return obj.x;
+}
+assertEquals(12, f_too_few(11));
+assertEquals(24, f_too_few(23));
+%OptimizeFunctionOnNextCall(f_too_few);
+assertEquals(2, f_too_few(1));
+assertEquals("foo1", f_too_few("foo"))
+
+
+// Test constructor that cannot be inlined.
+function c_unsupported_syntax(a, b, counter) {
+  try {
+    this.x = a + b;
+    counter.value++;
+  } catch(e) {
+    throw new Error();
+  }
+}
+function f_unsupported_syntax(a, b, counter) {
+  var obj = new c_unsupported_syntax(a, b, counter);
+  return obj.x;
+}
+TestInlinedConstructor(f_unsupported_syntax);
diff --git a/test/mjsunit/compiler/optimized-for-in.js b/test/mjsunit/compiler/optimized-for-in.js
index 8b16101..cb8c66d 100644
--- a/test/mjsunit/compiler/optimized-for-in.js
+++ b/test/mjsunit/compiler/optimized-for-in.js
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --allow-natives-syntax
+// Flags: --optimize-for-in --allow-natives-syntax
 
 // Test for-in support in Crankshaft.  For simplicity this tests assumes certain
 // fixed iteration order for properties and will have to be adjusted if V8
@@ -247,13 +247,15 @@
 function osr_inner(t, limit) {
   var r = 1;
   for (var x in t) {
-    for (var i = 0; i < t[x].length; i++) {
-      r += t[x][i];
-      if (i === limit) {
-        %OptimizeFunctionOnNextCall(osr_inner, "osr");
+    if (t.hasOwnProperty(x)) {
+      for (var i = 0; i < t[x].length; i++) {
+        r += t[x][i];
+        if (i === limit) {
+          %OptimizeFunctionOnNextCall(osr_inner, "osr");
+        }
       }
+      r += x;
     }
-    r += x;
   }
   return r;
 }
@@ -290,7 +292,7 @@
     arr[i] = i + 1;
   }
   arr.push(":");  // Force deopt at the end of the loop.
-  assertEquals("211:x", osr_inner({x: arr}, (arr.length / 2) | 0));
+  assertEquals("211:x1234567891011121314151617181920:y", osr_inner({x: arr, y: arr}, (arr.length / 2) | 0));
   assertEquals("7x456y", osr_outer({x: [1,2,3], y: [4,5,6]}, "x"));
   assertEquals("101234567", osr_outer_and_deopt([1,2,3,4,5,6,7,8], "5"));
 }
diff --git a/test/mjsunit/debug-evaluate-locals-optimized-double.js b/test/mjsunit/debug-evaluate-locals-optimized-double.js
index 7178661..cf25c0c 100644
--- a/test/mjsunit/debug-evaluate-locals-optimized-double.js
+++ b/test/mjsunit/debug-evaluate-locals-optimized-double.js
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
 // Get the Debug object exposed from the debug context global object.
 Debug = debug.Debug
 
@@ -140,7 +140,13 @@
         }
 
         // Check for construct call.
-        assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+        if (i == 4) {
+          assertEquals(testingConstructCall, frame.isConstructCall());
+        } else if (i == 2) {
+          assertTrue(frame.isConstructCall());
+        } else {
+          assertFalse(frame.isConstructCall());
+        }
 
         // When function f is optimized (1 means YES, see runtime.cc) we
         // expect an optimized frame for f with g1, g2 and g3 inlined.
@@ -204,7 +210,7 @@
   var b3 = input[i].b;
   a3 = a3 + a3 / 100;
   b3 = b3 + b3 / 100;
-  g2(i - 1, a3, b3);
+  new g2(i - 1, a3, b3);
 };
 
 function f(i, x4, y4) {
@@ -222,8 +228,11 @@
 new f(input.length - 1, 11.11, 12.12);
 new f(input.length - 1, 11.11, 12.12, "");
 
-// Make sure that the debug event listener vas invoked.
+// Make sure that the debug event listener was invoked.
 assertFalse(exception, "exception in listener " + exception)
 assertTrue(listenerComplete);
 
+//Throw away type information for next run.
+gc();
+
 Debug.setListener(null);
diff --git a/test/mjsunit/debug-evaluate-locals-optimized.js b/test/mjsunit/debug-evaluate-locals-optimized.js
index 485f752..c88a683 100644
--- a/test/mjsunit/debug-evaluate-locals-optimized.js
+++ b/test/mjsunit/debug-evaluate-locals-optimized.js
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
 // Get the Debug object exposed from the debug context global object.
 Debug = debug.Debug
 
@@ -130,7 +130,13 @@
         }
 
         // Check for construct call.
-        assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+        if (i == 4) {
+          assertEquals(testingConstructCall, frame.isConstructCall());
+        } else if (i == 2) {
+          assertTrue(frame.isConstructCall());
+        } else {
+          assertFalse(frame.isConstructCall());
+        }
 
         // When function f is optimized (1 means YES, see runtime.cc) we
         // expect an optimized frame for f with g1, g2 and g3 inlined.
@@ -185,7 +191,7 @@
 function g1(i, x3, y3, z3) {
   var a3 = expected[i].locals.a3;
   var b3 = expected[i].locals.b3;
-  g2(i - 1, a3, b3);
+  new g2(i - 1, a3, b3);
 }
 
 function f(i, x4, y4) {
@@ -201,8 +207,11 @@
 new f(expected.length - 1, 11, 12);
 new f(expected.length - 1, 11, 12, 0);
 
-// Make sure that the debug event listener vas invoked.
+// Make sure that the debug event listener was invoked.
 assertFalse(exception, "exception in listener " + exception)
 assertTrue(listenerComplete);
 
+// Throw away type information for next run.
+gc();
+
 Debug.setListener(null);
diff --git a/test/mjsunit/regress/regress-1229.js b/test/mjsunit/regress/regress-1229.js
index 3d166d5..a52e92a 100644
--- a/test/mjsunit/regress/regress-1229.js
+++ b/test/mjsunit/regress/regress-1229.js
@@ -126,19 +126,39 @@
 // Check that %_IsConstructCall returns correct value when inlined
 var NON_CONSTRUCT_MARKER = {};
 var CONSTRUCT_MARKER = {};
-function baz(x) {
+function baz1(x) {
   return (!%_IsConstructCall()) ? NON_CONSTRUCT_MARKER : CONSTRUCT_MARKER;
 }
 
-function bar(x, y, z) {
-  var non_construct = baz(0); /* baz should be inlined */
+function bar1(x, y, z) {
+  var non_construct = baz1(0); /* baz should be inlined */
   assertSame(non_construct, NON_CONSTRUCT_MARKER);
-  var non_construct = baz(); /* baz should be inlined */
+  var non_construct = baz1(); /* baz should be inlined */
   assertSame(non_construct, NON_CONSTRUCT_MARKER);
-  var non_construct = baz(0, 0); /* baz should be inlined */
+  var non_construct = baz1(0, 0); /* baz should be inlined */
   assertSame(non_construct, NON_CONSTRUCT_MARKER);
-  var construct = new baz(0);
+  var construct = new baz1(0);
+  assertSame(construct, CONSTRUCT_MARKER);
+  var construct = new baz1(0, 0);
   assertSame(construct, CONSTRUCT_MARKER);
 }
 
-invoke(bar, [1, 2, 3]);
+function baz2(x) {
+  return (!%IsConstructCall()) ? NON_CONSTRUCT_MARKER : CONSTRUCT_MARKER;
+}
+
+function bar2(x, y, z) {
+  var non_construct = baz2(0); /* baz should be inlined */
+  assertSame(non_construct, NON_CONSTRUCT_MARKER);
+  var non_construct = baz2(); /* baz should be inlined */
+  assertSame(non_construct, NON_CONSTRUCT_MARKER);
+  var non_construct = baz2(0, 0); /* baz should be inlined */
+  assertSame(non_construct, NON_CONSTRUCT_MARKER);
+  var construct = new baz2(0);
+  assertSame(construct, CONSTRUCT_MARKER);
+  var construct = new baz2(0, 0);
+  assertSame(construct, CONSTRUCT_MARKER);
+}
+
+invoke(bar1, [1, 2, 3]);
+invoke(bar2, [1, 2, 3]);
diff --git a/test/mjsunit/regress/regress-1853.js b/test/mjsunit/regress/regress-1853.js
new file mode 100644
index 0000000..f80bade
--- /dev/null
+++ b/test/mjsunit/regress/regress-1853.js
@@ -0,0 +1,116 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test whether scripts compiled after setting the break point are
+// updated correctly.
+
+Debug = debug.Debug;
+
+var break_count = 0;
+var test_break_1 = false;
+var test_break_2 = false;
+
+function sendCommand(state, cmd) {
+  // Get the debug command processor in paused state.
+  var dcp = state.debugCommandProcessor(false);
+  var request = JSON.stringify(cmd);
+  var response = dcp.processDebugJSONRequest(request);
+  return JSON.parse(response);
+}
+
+function setBreakPointByName(state) {
+  sendCommand(state, {
+    seq: 0,
+    type: "request",
+    command: "setbreakpoint",
+    arguments: {
+      type: "script",
+      target: "testScriptOne",
+      line: 2
+    }
+  });
+}
+
+function setBreakPointByRegExp(state) {
+  sendCommand(state, {
+    seq: 0,
+    type: "request",
+    command: "setbreakpoint",
+    arguments: {
+      type: "scriptRegExp",
+      target: "Scrip.Two",
+      line: 2
+    }
+  });
+}
+
+function listener(event, exec_state, event_data, data) {
+  try {
+    if (event == Debug.DebugEvent.Break) {
+      switch (break_count) {
+        case 0:
+          // Set break points before the code has been compiled.
+          setBreakPointByName(exec_state);
+          setBreakPointByRegExp(exec_state);
+          break;
+        case 1:
+          // Set the flag to prove that we hit the first break point.
+          test_break_1 = true;
+          break;
+        case 2:
+          // Set the flag to prove that we hit the second break point.
+          test_break_2 = true;
+          break;
+      }
+      break_count++;
+    }
+  } catch (e) {
+    print(e);
+  }
+}
+
+Debug.setListener(listener);
+debugger;
+
+eval('function test1() {                \n' +
+     '  assertFalse(test_break_1);      \n' +
+     '  assertTrue(test_break_1);       \n' +
+     '}                                 \n' +
+     '//@ sourceURL=testScriptOne');
+
+eval('function test2() {                \n' +
+     '  assertFalse(test_break_2);      \n' +
+     '  assertTrue(test_break_2);       \n' +
+     '}                                 \n' +
+     '//@ sourceURL=testScriptTwo');
+
+test1();
+test2();
+assertEquals(3, break_count);
+