Update V8 to r5214 as required by WebKit r65072.

Change-Id: I387277a00cc0949597c0f69a8e4f2da60213c8f2
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index 01c60aa..c8abd22 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -201,14 +201,6 @@
     Memory::Address_at(pc_) += static_cast<int32_t>(delta);
   } else if (IsCodeTarget(rmode_)) {
     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
-  } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
-    // Special handling of js_return when a break point is set (call
-    // instruction has been inserted).
-    Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta);  // relocate entry
-  } else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) {
-    // Special handling of debug break slot when a break point is set (call
-    // instruction has been inserted).
-    Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta);  // relocate entry
   }
 }
 
@@ -303,33 +295,34 @@
 
 
 Address RelocInfo::call_address() {
-  ASSERT(IsPatchedReturnSequence());
+  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
+         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
   return Memory::Address_at(
       pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
 }
 
 
 void RelocInfo::set_call_address(Address target) {
-  ASSERT(IsPatchedReturnSequence());
+  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
+         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
   Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
       target;
 }
 
 
 Object* RelocInfo::call_object() {
-  ASSERT(IsPatchedReturnSequence());
   return *call_object_address();
 }
 
 
 void RelocInfo::set_call_object(Object* target) {
-  ASSERT(IsPatchedReturnSequence());
   *call_object_address() = target;
 }
 
 
 Object** RelocInfo::call_object_address() {
-  ASSERT(IsPatchedReturnSequence());
+  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
+         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
   return reinterpret_cast<Object**>(
       pc_ + Assembler::kPatchReturnSequenceAddressOffset);
 }
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index c66666a..d90655b 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -1496,12 +1496,8 @@
 
 
 void Assembler::movq(Register dst, ExternalReference ref) {
-  EnsureSpace ensure_space(this);
-  last_pc_ = pc_;
-  emit_rex_64(dst);
-  emit(0xB8 | dst.low_bits());
-  emitq(reinterpret_cast<uintptr_t>(ref.address()),
-        RelocInfo::EXTERNAL_REFERENCE);
+  int64_t value = reinterpret_cast<int64_t>(ref.address());
+  movq(dst, value, RelocInfo::EXTERNAL_REFERENCE);
 }
 
 
@@ -2529,10 +2525,10 @@
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit(0x66);
-  emit_optional_rex_32(dst, src);
+  emit_optional_rex_32(src, dst);
   emit(0x0F);
   emit(0x7E);
-  emit_sse_operand(dst, src);
+  emit_sse_operand(src, dst);
 }
 
 
@@ -2551,10 +2547,10 @@
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit(0x66);
-  emit_rex_64(dst, src);
+  emit_rex_64(src, dst);
   emit(0x0F);
   emit(0x7E);
-  emit_sse_operand(dst, src);
+  emit_sse_operand(src, dst);
 }
 
 
@@ -2945,8 +2941,7 @@
 
 
 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
-                                  1 << RelocInfo::INTERNAL_REFERENCE |
-                                  1 << RelocInfo::JS_RETURN;
+                                  1 << RelocInfo::INTERNAL_REFERENCE;
 
 
 bool RelocInfo::IsCodedSpecially() {
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 213db2c..7082af7 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -509,7 +509,6 @@
   void push(Immediate value);
   void push(Register src);
   void push(const Operand& src);
-  void push(Label* label, RelocInfo::Mode relocation_mode);
 
   void pop(Register dst);
   void pop(const Operand& dst);
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index 35c1a3d..b6256fa 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -4813,6 +4813,30 @@
 }
 
 
+class DeferredAllocateInNewSpace: public DeferredCode {
+ public:
+  DeferredAllocateInNewSpace(int size, Register target)
+    : size_(size), target_(target) {
+    ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
+    set_comment("[ DeferredAllocateInNewSpace");
+  }
+  void Generate();
+
+ private:
+  int size_;
+  Register target_;
+};
+
+
+void DeferredAllocateInNewSpace::Generate() {
+  __ Push(Smi::FromInt(size_));
+  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+  if (!target_.is(rax)) {
+    __ movq(target_, rax);
+  }
+}
+
+
 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
   Comment cmnt(masm_, "[ RegExp Literal");
 
@@ -4842,10 +4866,33 @@
   __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
   deferred->Branch(equal);
   deferred->BindExit();
-  literals.Unuse();
 
-  // Push the boilerplate object.
+  // Register of boilerplate contains RegExp object.
+
+  Result tmp = allocator()->Allocate();
+  ASSERT(tmp.is_valid());
+
+  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+
+  DeferredAllocateInNewSpace* allocate_fallback =
+      new DeferredAllocateInNewSpace(size, literals.reg());
   frame_->Push(&boilerplate);
+  frame_->SpillTop();
+  __ AllocateInNewSpace(size,
+                        literals.reg(),
+                        tmp.reg(),
+                        no_reg,
+                        allocate_fallback->entry_label(),
+                        TAG_OBJECT);
+  allocate_fallback->BindExit();
+  boilerplate = frame_->Pop();
+  // Copy from boilerplate to clone and return clone.
+
+  for (int i = 0; i < size; i += kPointerSize) {
+    __ movq(tmp.reg(), FieldOperand(boilerplate.reg(), i));
+    __ movq(FieldOperand(literals.reg(), i), tmp.reg());
+  }
+  frame_->Push(&literals);
 }
 
 
@@ -7014,6 +7061,40 @@
 }
 
 
+void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
+  ASSERT_EQ(2, args->length());
+  Load(args->at(0));
+  Load(args->at(1));
+  Result right_res = frame_->Pop();
+  Result left_res = frame_->Pop();
+  right_res.ToRegister();
+  left_res.ToRegister();
+  Result tmp_res = allocator()->Allocate();
+  ASSERT(tmp_res.is_valid());
+  Register right = right_res.reg();
+  Register left = left_res.reg();
+  Register tmp = tmp_res.reg();
+  right_res.Unuse();
+  left_res.Unuse();
+  tmp_res.Unuse();
+  __ cmpq(left, right);
+  destination()->true_target()->Branch(equal);
+  // Fail if either is a non-HeapObject.
+  Condition either_smi =
+      masm()->CheckEitherSmi(left, right, tmp);
+  destination()->false_target()->Branch(either_smi);
+  __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
+  __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
+          Immediate(JS_REGEXP_TYPE));
+  destination()->false_target()->Branch(not_equal);
+  __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
+  destination()->false_target()->Branch(not_equal);
+  __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
+  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
+  destination()->Split(equal);
+}
+
+
 void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
   if (CheckForInlineRuntimeCall(node)) {
     return;
@@ -8071,6 +8152,18 @@
     result = allocator()->Allocate();
     ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
 
+    // Cannot use r12 for receiver, because that changes
+    // the distance between a call and a fixup location,
+    // due to a special encoding of r12 as r/m in a ModR/M byte.
+    if (receiver.reg().is(r12)) {
+      frame()->Spill(receiver.reg());  // It will be overwritten with result.
+      // Swap receiver and value.
+      __ movq(result.reg(), receiver.reg());
+      Result temp = receiver;
+      receiver = result;
+      result = temp;
+    }
+
     // Check that the receiver is a heap object.
     Condition is_smi = __ CheckSmi(receiver.reg());
     slow.Branch(is_smi, &value, &receiver);
@@ -10890,7 +10983,48 @@
 
 
 void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
-  UNREACHABLE();
+  Label empty_result;
+  Label prologue;
+  Label promote_scheduled_exception;
+  __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, 0);
+  ASSERT_EQ(kArgc, 4);
+#ifdef _WIN64
+  // All the parameters should be set up by a caller.
+#else
+  // Set 1st parameter register with property name.
+  __ movq(rsi, rdx);
+  // Second parameter register rdi should be set with pointer to AccessorInfo
+  // by a caller.
+#endif
+  // Call the api function!
+  __ movq(rax,
+          reinterpret_cast<int64_t>(fun()->address()),
+          RelocInfo::RUNTIME_ENTRY);
+  __ call(rax);
+  // Check if the function scheduled an exception.
+  ExternalReference scheduled_exception_address =
+      ExternalReference::scheduled_exception_address();
+  __ movq(rsi, scheduled_exception_address);
+  __ Cmp(Operand(rsi, 0), Factory::the_hole_value());
+  __ j(not_equal, &promote_scheduled_exception);
+#ifdef _WIN64
+  // rax keeps a pointer to v8::Handle, unpack it.
+  __ movq(rax, Operand(rax, 0));
+#endif
+  // Check if the result handle holds 0.
+  __ testq(rax, rax);
+  __ j(zero, &empty_result);
+  // It was non-zero.  Dereference to get the result value.
+  __ movq(rax, Operand(rax, 0));
+  __ bind(&prologue);
+  __ LeaveExitFrame(ExitFrame::MODE_NORMAL);
+  __ ret(0);
+  __ bind(&promote_scheduled_exception);
+  __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+  __ bind(&empty_result);
+  // It was zero; the result is undefined.
+  __ Move(rax, Factory::undefined_value());
+  __ jmp(&prologue);
 }
 
 
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index f694dde..2806f56 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -677,6 +677,8 @@
   void GenerateMathCos(ZoneList<Expression*>* args);
   void GenerateMathSqrt(ZoneList<Expression*>* args);
 
+  void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
+
 // Simple condition analysis.
   enum ConditionAnalysis {
     ALWAYS_TRUE,
diff --git a/src/x64/debug-x64.cc b/src/x64/debug-x64.cc
index 9659254..2aa77e7 100644
--- a/src/x64/debug-x64.cc
+++ b/src/x64/debug-x64.cc
@@ -213,9 +213,10 @@
 #undef __
 
 
-void Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
-                                   Handle<Code> code) {
+Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame,
+                                       Handle<Code> code) {
   UNREACHABLE();
+  return NULL;
 }
 const int Debug::kFrameDropperFrameSize = -1;
 
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 06a8c79..7c9dfc1 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -1019,10 +1019,10 @@
                        NameOfXMMRegister(regop));
         current += PrintRightOperand(current);
       } else if (opcode == 0x7E) {
-        AppendToBuffer("mov%c %s,",
-                       rex_w() ? 'q' : 'd',
-                       NameOfCPURegister(regop));
-        current += PrintRightXMMOperand(current);
+        AppendToBuffer("mov%c ",
+                       rex_w() ? 'q' : 'd');
+        current += PrintRightOperand(current);
+        AppendToBuffer(", %s", NameOfXMMRegister(regop));
       } else {
         const char* mnemonic = "?";
         if (opcode == 0x57) {
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 0b3b7c4..4d74735 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -207,7 +207,7 @@
     Label check_exit_codesize;
     masm_->bind(&check_exit_codesize);
 #endif
-    CodeGenerator::RecordPositions(masm_, function()->end_position());
+    CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
     __ RecordJSReturn();
     // Do not use the leave instruction here because it is too short to
     // patch with the code required by the debugger.
@@ -1199,26 +1199,54 @@
 
 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   Comment cmnt(masm_, "[ RegExpLiteral");
-  Label done;
+  Label materialized;
   // Registers will be used as follows:
   // rdi = JS function.
-  // rbx = literals array.
-  // rax = regexp literal.
+  // rcx = literals array.
+  // rbx = regexp literal.
+  // rax = regexp literal clone.
   __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
-  __ movq(rbx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
+  __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
   int literal_offset =
     FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
-  __ movq(rax, FieldOperand(rbx, literal_offset));
-  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
-  __ j(not_equal, &done);
+  __ movq(rbx, FieldOperand(rcx, literal_offset));
+  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
+  __ j(not_equal, &materialized);
+
   // Create regexp literal using runtime function
   // Result will be in rax.
-  __ push(rbx);
+  __ push(rcx);
   __ Push(Smi::FromInt(expr->literal_index()));
   __ Push(expr->pattern());
   __ Push(expr->flags());
   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
-  __ bind(&done);
+  __ movq(rbx, rax);
+
+  __ bind(&materialized);
+  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+  Label allocated, runtime_allocate;
+  __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
+  __ jmp(&allocated);
+
+  __ bind(&runtime_allocate);
+  __ push(rbx);
+  __ Push(Smi::FromInt(size));
+  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+  __ pop(rbx);
+
+  __ bind(&allocated);
+  // Copy the content into the newly allocated memory.
+  // (Unroll copy loop once for better throughput).
+  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
+    __ movq(rdx, FieldOperand(rbx, i));
+    __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
+    __ movq(FieldOperand(rax, i), rdx);
+    __ movq(FieldOperand(rax, i + kPointerSize), rcx);
+  }
+  if ((size % (2 * kPointerSize)) != 0) {
+    __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
+    __ movq(FieldOperand(rax, size - kPointerSize), rdx);
+  }
   Apply(context_, rax);
 }
 
@@ -2644,6 +2672,44 @@
 }
 
 
+void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
+  ASSERT_EQ(2, args->length());
+
+  Register right = rax;
+  Register left = rbx;
+  Register tmp = rcx;
+
+  VisitForValue(args->at(0), kStack);
+  VisitForValue(args->at(1), kAccumulator);
+  __ pop(left);
+
+  Label done, fail, ok;
+  __ cmpq(left, right);
+  __ j(equal, &ok);
+  // Fail if either is a non-HeapObject.
+  Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
+  __ j(either_smi, &fail);
+  __ j(zero, &fail);
+  __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
+  __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
+          Immediate(JS_REGEXP_TYPE));
+  __ j(not_equal, &fail);
+  __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
+  __ j(not_equal, &fail);
+  __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
+  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
+  __ j(equal, &ok);
+  __ bind(&fail);
+  __ Move(rax, Factory::false_value());
+  __ jmp(&done);
+  __ bind(&ok);
+  __ Move(rax, Factory::true_value());
+  __ bind(&done);
+
+  Apply(context_, rax);
+}
+
+
 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   Handle<String> name = expr->name();
   if (name->length() > 0 && name->Get(0) == '_') {
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index b6957b2..a8971f5 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -599,7 +599,7 @@
   //  -- rdx    : receiver
   //  -- rsp[0] : return address
   // -----------------------------------
-  Label slow, check_string, index_smi, index_string;
+  Label slow, check_string, index_smi, index_string, property_array_property;
   Label check_pixel_array, probe_dictionary, check_number_dictionary;
 
   // Check that the key is a smi.
@@ -692,15 +692,14 @@
   __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize));
   __ j(not_equal, &slow);
 
-  // Get field offset which is a 32-bit integer and check that it is
-  // an in-object property.
+  // Get field offset, which is a 32-bit integer.
   ExternalReference cache_field_offsets
       = ExternalReference::keyed_lookup_cache_field_offsets();
   __ movq(kScratchRegister, cache_field_offsets);
   __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
   __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
   __ subq(rdi, rcx);
-  __ j(above_equal, &slow);
+  __ j(above_equal, &property_array_property);
 
   // Load in-object property.
   __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
@@ -709,6 +708,14 @@
   __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
   __ ret(0);
 
+  // Load property array property.
+  __ bind(&property_array_property);
+  __ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
+  __ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
+                            FixedArray::kHeaderSize));
+  __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
+  __ ret(0);
+
   // Do a quick inline probe of the receiver's dictionary, if it
   // exists.
   __ bind(&probe_dictionary);
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index a5634a7..bab0199 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -336,12 +336,32 @@
 }
 
 
+Object* MacroAssembler::TryCallStub(CodeStub* stub) {
+  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
+  Object* result = stub->TryGetCode();
+  if (!result->IsFailure()) {
+    call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
+  }
+  return result;
+}
+
+
 void MacroAssembler::TailCallStub(CodeStub* stub) {
   ASSERT(allow_stub_calls());  // calls are not allowed in some stubs
   Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
 }
 
 
+Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
+  ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
+  Object* result = stub->TryGetCode();
+  if (!result->IsFailure()) {
+    jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
+  }
+  return result;
+}
+
+
 void MacroAssembler::StubReturn(int argc) {
   ASSERT(argc >= 1 && generating_stub());
   ret((argc - 1) * kPointerSize);
@@ -361,6 +381,12 @@
 }
 
 
+Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
+                                       int num_arguments) {
+  return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
+}
+
+
 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
   // If the expected number of arguments of the runtime function is
   // constant, we check that the actual number of arguments match the
@@ -381,6 +407,26 @@
 }
 
 
+Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
+                                       int num_arguments) {
+  if (f->nargs >= 0 && f->nargs != num_arguments) {
+    IllegalOperation(num_arguments);
+    // Since we did not call the stub, there was no allocation failure.
+    // Return some non-failure object.
+    return Heap::undefined_value();
+  }
+
+  // TODO(1236192): Most runtime routines don't need the number of
+  // arguments passed in because it is constant. At some point we
+  // should remove this need and make the runtime routine entry code
+  // smarter.
+  Set(rax, num_arguments);
+  movq(rbx, ExternalReference(f));
+  CEntryStub ces(f->result_size);
+  return TryCallStub(&ces);
+}
+
+
 void MacroAssembler::CallExternalReference(const ExternalReference& ext,
                                            int num_arguments) {
   Set(rax, num_arguments);
@@ -417,6 +463,87 @@
 }
 
 
+static int Offset(ExternalReference ref0, ExternalReference ref1) {
+  int64_t offset = (ref0.address() - ref1.address());
+  // Check that fits into int.
+  ASSERT(static_cast<int>(offset) == offset);
+  return static_cast<int>(offset);
+}
+
+
+void MacroAssembler::PushHandleScope(Register scratch) {
+  ExternalReference extensions_address =
+      ExternalReference::handle_scope_extensions_address();
+  const int kExtensionsOffset = 0;
+  const int kNextOffset = Offset(
+      ExternalReference::handle_scope_next_address(),
+      extensions_address);
+  const int kLimitOffset = Offset(
+      ExternalReference::handle_scope_limit_address(),
+      extensions_address);
+
+  // Push the number of extensions, smi-tagged so the gc will ignore it.
+  movq(kScratchRegister, extensions_address);
+  movq(scratch, Operand(kScratchRegister, kExtensionsOffset));
+  movq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
+  Integer32ToSmi(scratch, scratch);
+  push(scratch);
+  // Push next and limit pointers which will be wordsize aligned and
+  // hence automatically smi tagged.
+  push(Operand(kScratchRegister, kNextOffset));
+  push(Operand(kScratchRegister, kLimitOffset));
+}
+
+
+Object* MacroAssembler::PopHandleScopeHelper(Register saved,
+                                             Register scratch,
+                                             bool gc_allowed) {
+  ExternalReference extensions_address =
+      ExternalReference::handle_scope_extensions_address();
+  const int kExtensionsOffset = 0;
+  const int kNextOffset = Offset(
+      ExternalReference::handle_scope_next_address(),
+      extensions_address);
+  const int kLimitOffset = Offset(
+      ExternalReference::handle_scope_limit_address(),
+      extensions_address);
+
+  Object* result = NULL;
+  Label write_back;
+  movq(kScratchRegister, extensions_address);
+  cmpq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0));
+  j(equal, &write_back);
+  push(saved);
+  if (gc_allowed) {
+    CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
+  } else {
+    result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
+    if (result->IsFailure()) return result;
+  }
+  pop(saved);
+  movq(kScratchRegister, extensions_address);
+
+  bind(&write_back);
+  pop(Operand(kScratchRegister, kLimitOffset));
+  pop(Operand(kScratchRegister, kNextOffset));
+  pop(scratch);
+  SmiToInteger32(scratch, scratch);
+  movq(Operand(kScratchRegister, kExtensionsOffset), scratch);
+
+  return result;
+}
+
+
+void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
+  PopHandleScopeHelper(saved, scratch, true);
+}
+
+
+Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
+  return PopHandleScopeHelper(saved, scratch, false);
+}
+
+
 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
                                              int result_size) {
   // Set the entry point and jump to the C entry runtime stub.
@@ -755,13 +882,21 @@
 }
 
 
-Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
+Condition MacroAssembler::CheckEitherSmi(Register first,
+                                         Register second,
+                                         Register scratch) {
   if (first.is(second)) {
     return CheckSmi(first);
   }
-  movl(kScratchRegister, first);
-  andl(kScratchRegister, second);
-  testb(kScratchRegister, Immediate(kSmiTagMask));
+  if (scratch.is(second)) {
+    andl(scratch, first);
+  } else {
+    if (!scratch.is(first)) {
+      movl(scratch, first);
+    }
+    andl(scratch, second);
+  }
+  testb(scratch, Immediate(kSmiTagMask));
   return zero;
 }
 
@@ -2208,7 +2343,8 @@
 }
 
 
-void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
+void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode,
+                                            bool save_rax) {
   // Setup the frame structure on the stack.
   // All constants are relative to the frame pointer of the exit frame.
   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
@@ -2226,18 +2362,19 @@
   // Save the frame pointer and the context in top.
   ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
   ExternalReference context_address(Top::k_context_address);
-  movq(r14, rax);  // Backup rax before we use it.
+  if (save_rax) {
+    movq(r14, rax);  // Backup rax before we use it.
+  }
 
   movq(rax, rbp);
   store_rax(c_entry_fp_address);
   movq(rax, rsi);
   store_rax(context_address);
+}
 
-  // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
-  // so it must be retained across the C-call.
-  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
-  lea(r12, Operand(rbp, r14, times_pointer_size, offset));
-
+void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode,
+                                            int result_size,
+                                            int argc) {
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // Save the state of all registers to the stack from the memory
   // location. This is needed to allow nested break points.
@@ -2258,7 +2395,7 @@
   // Reserve space for the Arguments object.  The Windows 64-bit ABI
   // requires us to pass this structure as a pointer to its location on
   // the stack.  The structure contains 2 values.
-  int argument_stack_space = 2 * kPointerSize;
+  int argument_stack_space = argc * kPointerSize;
   // We also need backing space for 4 parameters, even though
   // we only pass one or two parameter, and it is in a register.
   int argument_mirror_space = 4 * kPointerSize;
@@ -2280,6 +2417,33 @@
 }
 
 
+void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
+  EnterExitFramePrologue(mode, true);
+
+  // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
+  // so it must be retained across the C-call.
+  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
+  lea(r12, Operand(rbp, r14, times_pointer_size, offset));
+
+  EnterExitFrameEpilogue(mode, result_size, 2);
+}
+
+
+void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
+                                       int stack_space,
+                                       int argc,
+                                       int result_size) {
+  EnterExitFramePrologue(mode, false);
+
+  // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
+  // so it must be retained across the C-call.
+  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
+  lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
+
+  EnterExitFrameEpilogue(mode, result_size, argc);
+}
+
+
 void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
   // Registers:
   // r12 : argv
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 64f35e1..a294ad6 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -163,6 +163,11 @@
   // to the first argument in register rsi.
   void EnterExitFrame(ExitFrame::Mode mode, int result_size = 1);
 
+  void EnterApiExitFrame(ExitFrame::Mode mode,
+                         int stack_space,
+                         int argc,
+                         int result_size = 1);
+
   // Leave the current exit frame. Expects/provides the return value in
   // register rax:rdx (untouched) and the pointer to the first
   // argument in register rsi.
@@ -279,7 +284,9 @@
   Condition CheckBothPositiveSmi(Register first, Register second);
 
   // Are either value a tagged smi.
-  Condition CheckEitherSmi(Register first, Register second);
+  Condition CheckEitherSmi(Register first,
+                           Register second,
+                           Register scratch = kScratchRegister);
 
   // Is the value the minimum smi value (since we are using
   // two's complement numbers, negating the value is known to yield
@@ -719,18 +726,36 @@
   // Call a code stub.
   void CallStub(CodeStub* stub);
 
+  // Call a code stub and return the code object called.  Try to generate
+  // the code if necessary.  Do not perform a GC but instead return a retry
+  // after GC failure.
+  Object* TryCallStub(CodeStub* stub);
+
   // Tail call a code stub (jump).
   void TailCallStub(CodeStub* stub);
 
+  // Tail call a code stub (jump) and return the code object called.  Try to
+  // generate the code if necessary.  Do not perform a GC but instead return
+  // a retry after GC failure.
+  Object* TryTailCallStub(CodeStub* stub);
+
   // Return from a code stub after popping its arguments.
   void StubReturn(int argc);
 
   // Call a runtime routine.
   void CallRuntime(Runtime::Function* f, int num_arguments);
 
+  // Call a runtime function, returning the CodeStub object called.
+  // Try to generate the stub code if necessary.  Do not perform a GC
+  // but instead return a retry after GC failure.
+  Object* TryCallRuntime(Runtime::Function* f, int num_arguments);
+
   // Convenience function: Same as above, but takes the fid instead.
   void CallRuntime(Runtime::FunctionId id, int num_arguments);
 
+  // Convenience function: Same as above, but takes the fid instead.
+  Object* TryCallRuntime(Runtime::FunctionId id, int num_arguments);
+
   // Convenience function: call an external reference.
   void CallExternalReference(const ExternalReference& ext,
                              int num_arguments);
@@ -747,6 +772,16 @@
                        int num_arguments,
                        int result_size);
 
+  void PushHandleScope(Register scratch);
+
+  // Pops a handle scope using the specified scratch register and
+  // ensuring that saved register is left unchanged.
+  void PopHandleScope(Register saved, Register scratch);
+
+  // As PopHandleScope, but does not perform a GC.  Instead, returns a
+  // retry after GC failure object if GC is necessary.
+  Object* TryPopHandleScope(Register saved, Register scratch);
+
   // Jump to a runtime routine.
   void JumpToExternalReference(const ExternalReference& ext, int result_size);
 
@@ -835,6 +870,9 @@
   void EnterFrame(StackFrame::Type type);
   void LeaveFrame(StackFrame::Type type);
 
+  void EnterExitFramePrologue(ExitFrame::Mode mode, bool save_rax);
+  void EnterExitFrameEpilogue(ExitFrame::Mode mode, int result_size, int argc);
+
   // Allocation support helpers.
   // Loads the top of new-space into the result register.
   // If flags contains RESULT_CONTAINS_TOP then result_end is valid and
@@ -848,6 +886,13 @@
   // Update allocation top with value in result_end register.
   // If scratch is valid, it contains the address of the allocation top.
   void UpdateAllocationTopHelper(Register result_end, Register scratch);
+
+  // Helper for PopHandleScope.  Allowed to perform a GC and returns
+  // NULL if gc_allowed.  Does not perform a GC if !gc_allowed, and
+  // possibly returns a failure object indicating an allocation failure.
+  Object* PopHandleScopeHelper(Register saved,
+                               Register scratch,
+                               bool gc_allowed);
 };
 
 
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 2a918f1..4c15715 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -820,9 +820,11 @@
 }
 
 
-void CallStubCompiler::GenerateMissBranch() {
-  Handle<Code> ic = ComputeCallMiss(arguments().immediate(), kind_);
-  __ Jump(ic, RelocInfo::CODE_TARGET);
+Object* CallStubCompiler::GenerateMissBranch() {
+  Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
+  if (obj->IsFailure()) return obj;
+  __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
+  return obj;
 }
 
 
@@ -975,7 +977,8 @@
 
   // Handle call cache miss.
   __ bind(&miss_in_smi_check);
-  GenerateMissBranch();
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
 
   // Return the generated code.
   return GetCode(function);
@@ -1029,7 +1032,8 @@
 
   // Handle call cache miss.
   __ bind(&miss);
-  GenerateMissBranch();
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
 
   // Return the generated code.
   return GetCode(FIELD, name);
@@ -1186,8 +1190,8 @@
   }
 
   __ bind(&miss);
-
-  GenerateMissBranch();
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
 
   // Return the generated code.
   return GetCode(function);
@@ -1270,8 +1274,8 @@
                                argc + 1,
                                1);
   __ bind(&miss);
-
-  GenerateMissBranch();
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
 
   // Return the generated code.
   return GetCode(function);
@@ -1357,7 +1361,8 @@
 
   // Handle load cache miss.
   __ bind(&miss);
-  GenerateMissBranch();
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
 
   // Return the generated code.
   return GetCode(INTERCEPTOR, name);
@@ -1442,7 +1447,8 @@
   // Handle call cache miss.
   __ bind(&miss);
   __ IncrementCounter(&Counters::call_global_inline_miss, 1);
-  GenerateMissBranch();
+  Object* obj = GenerateMissBranch();
+  if (obj->IsFailure()) return obj;
 
   // Return the generated code.
   return GetCode(NORMAL, name);
@@ -2211,23 +2217,80 @@
 
   // Check that the maps haven't changed.
   Register reg =
-      CheckPrototypes(object, receiver, holder,
-                      scratch1, scratch2, scratch3, name, miss);
+      CheckPrototypes(object, receiver, holder, scratch1,
+                      scratch2, scratch3, name, miss);
 
-  // Push the arguments on the JS stack of the caller.
-  __ pop(scratch2);  // remove return address
+  Handle<AccessorInfo> callback_handle(callback);
+
+  __ EnterInternalFrame();
+  __ PushHandleScope(scratch2);
+  // Push the stack address where the list of arguments ends.
+  __ movq(scratch2, rsp);
+  __ subq(scratch2, Immediate(2 * kPointerSize));
+  __ push(scratch2);
   __ push(receiver);  // receiver
   __ push(reg);  // holder
-  __ Move(reg, Handle<AccessorInfo>(callback));  // callback data
-  __ push(reg);
-  __ push(FieldOperand(reg, AccessorInfo::kDataOffset));
+  if (Heap::InNewSpace(callback_handle->data())) {
+    __ Move(scratch2, callback_handle);
+    __ push(FieldOperand(scratch2, AccessorInfo::kDataOffset));  // data
+  } else {
+    __ Push(Handle<Object>(callback_handle->data()));
+  }
   __ push(name_reg);  // name
-  __ push(scratch2);  // restore return address
+  // Save a pointer to where we pushed the arguments pointer.
+  // This will be passed as the const AccessorInfo& to the C++ callback.
 
-  // Do tail-call to the runtime system.
-  ExternalReference load_callback_property =
-      ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
-  __ TailCallExternalReference(load_callback_property, 5, 1);
+#ifdef _WIN64
+  // Win64 uses first register--rcx--for returned value.
+  Register accessor_info_arg = r8;
+  Register name_arg = rdx;
+#else
+  Register accessor_info_arg = rdx;  // temporary, copied to rsi by the stub.
+  Register name_arg = rdi;
+#endif
+
+  __ movq(accessor_info_arg, rsp);
+  __ addq(accessor_info_arg, Immediate(4 * kPointerSize));
+  __ movq(name_arg, rsp);
+
+  // Do call through the api.
+  ASSERT_EQ(5, ApiGetterEntryStub::kStackSpace);
+  Address getter_address = v8::ToCData<Address>(callback->getter());
+  ApiFunction fun(getter_address);
+  ApiGetterEntryStub stub(callback_handle, &fun);
+#ifdef _WIN64
+  // We need to prepare a slot for result handle on stack and put
+  // a pointer to it into 1st arg register.
+  __ push(Immediate(0));
+  __ movq(rcx, rsp);
+#endif
+  // Emitting a stub call may try to allocate (if the code is not
+  // already generated).  Do not allow the assembler to perform a
+  // garbage collection but instead return the allocation failure
+  // object.
+  Object* result = masm()->TryCallStub(&stub);
+  if (result->IsFailure()) {
+    *failure = Failure::cast(result);
+    return false;
+  }
+#ifdef _WIN64
+  // Discard allocated slot.
+  __ addq(rsp, Immediate(kPointerSize));
+#endif
+
+  // We need to avoid using rax since that now holds the result.
+  Register tmp = scratch2.is(rax) ? reg : scratch2;
+  // Emitting PopHandleScope may try to allocate.  Do not allow the
+  // assembler to perform a garbage collection but instead return a
+  // failure object.
+  result = masm()->TryPopHandleScope(rax, tmp);
+  if (result->IsFailure()) {
+    *failure = Failure::cast(result);
+    return false;
+  }
+  __ LeaveInternalFrame();
+
+  __ ret(0);
 
   return true;
 }