Revert "Revert "Upgrade to 5.0.71.48"" DO NOT MERGE

This reverts commit f2e3994fa5148cc3d9946666f0b0596290192b0e,
and updates the x64 makefile properly so it doesn't break that
build.

FPIIM-449

Change-Id: Ib83e35bfbae6af627451c926a9650ec57c045605
(cherry picked from commit 109988c7ccb6f3fd1a58574fa3dfb88beaef6632)
diff --git a/src/isolate.cc b/src/isolate.cc
index 4e42b43..8116f14 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -390,8 +390,9 @@
       }
       DCHECK(cursor + 4 <= elements->length());
 
-      Handle<Code> code = frames[i].code();
-      Handle<Smi> offset(Smi::FromInt(frames[i].offset()), this);
+      Handle<AbstractCode> abstract_code = frames[i].abstract_code();
+
+      Handle<Smi> offset(Smi::FromInt(frames[i].code_offset()), this);
       // The stack trace API should not expose receivers and function
       // objects on frames deeper than the top-most one with a strict
       // mode function.  The number of sloppy frames is stored as
@@ -405,7 +406,7 @@
       }
       elements->set(cursor++, *recv);
       elements->set(cursor++, *fun);
-      elements->set(cursor++, *code);
+      elements->set(cursor++, *abstract_code);
       elements->set(cursor++, *offset);
       frames_seen++;
     }
@@ -594,9 +595,9 @@
   if (maybe_code->IsSmi()) {
     return Smi::cast(maybe_code)->value();
   } else {
-    Code* code = Code::cast(maybe_code);
-    Address pc = code->address() + Smi::cast(elements->get(index + 3))->value();
-    return code->SourcePosition(pc);
+    AbstractCode* abstract_code = AbstractCode::cast(maybe_code);
+    int code_offset = Smi::cast(elements->get(index + 3))->value();
+    return abstract_code->SourcePosition(code_offset);
   }
 }
 
@@ -661,7 +662,8 @@
       // Filter frames from other security contexts.
       if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
           !this->context()->HasSameSecurityTokenAs(fun->context())) continue;
-      int position = frames[i].code()->SourcePosition(frames[i].pc());
+      int position =
+          frames[i].abstract_code()->SourcePosition(frames[i].code_offset());
       Handle<JSObject> stack_frame =
           helper.NewStackFrameObject(fun, position, frames[i].is_constructor());
 
@@ -780,14 +782,6 @@
 }
 
 
-bool Isolate::IsInternallyUsedPropertyName(Handle<Object> name) {
-  if (name->IsSymbol()) {
-    return Handle<Symbol>::cast(name)->is_private();
-  }
-  return name.is_identical_to(factory()->hidden_string());
-}
-
-
 bool Isolate::MayAccess(Handle<Context> accessing_context,
                         Handle<JSObject> receiver) {
   DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
@@ -826,11 +820,11 @@
     if (!access_check_info) return false;
     Object* fun_obj = access_check_info->callback();
     callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
+    data = handle(access_check_info->data(), this);
     if (!callback) {
       fun_obj = access_check_info->named_callback();
       named_callback = v8::ToCData<v8::NamedSecurityCallback>(fun_obj);
       if (!named_callback) return false;
-      data = handle(access_check_info->data(), this);
     }
   }
 
@@ -841,7 +835,7 @@
     VMState<EXTERNAL> state(this);
     if (callback) {
       return callback(v8::Utils::ToLocal(accessing_context),
-                      v8::Utils::ToLocal(receiver));
+                      v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
     }
     Handle<Object> key = factory()->undefined_value();
     return named_callback(v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(key),
@@ -1100,34 +1094,63 @@
     if (frame->is_optimized() && catchable_by_js) {
       OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
       int stack_slots = 0;  // Will contain stack slot count of frame.
-      offset = js_frame->LookupExceptionHandlerInTable(&stack_slots, NULL);
+      offset = js_frame->LookupExceptionHandlerInTable(&stack_slots, nullptr);
       if (offset >= 0) {
         // Compute the stack pointer from the frame pointer. This ensures that
         // argument slots on the stack are dropped as returning would.
-        Address return_sp = frame->fp() -
-                            StandardFrameConstants::kFixedFrameSizeFromFp -
+        Address return_sp = frame->fp() +
+                            StandardFrameConstants::kFixedFrameSizeAboveFp -
                             stack_slots * kPointerSize;
 
         // Gather information from the frame.
         code = frame->LookupCode();
+        if (code->marked_for_deoptimization()) {
+          // If the target code is lazy deoptimized, we jump to the original
+          // return address, but we make a note that we are throwing, so that
+          // the deoptimizer can do the right thing.
+          offset = static_cast<int>(frame->pc() - code->entry());
+          set_deoptimizer_lazy_throw(true);
+        }
         handler_sp = return_sp;
         handler_fp = frame->fp();
         break;
       }
     }
 
+    // For interpreted frame we perform a range lookup in the handler table.
+    if (frame->is_interpreted() && catchable_by_js) {
+      InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
+      int context_reg = 0;  // Will contain register index holding context.
+      offset = js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
+      if (offset >= 0) {
+        // Patch the bytecode offset in the interpreted frame to reflect the
+        // position of the exception handler. The special builtin below will
+        // take care of continuing to dispatch at that position. Also restore
+        // the correct context for the handler from the interpreter register.
+        context = Context::cast(js_frame->GetInterpreterRegister(context_reg));
+        js_frame->PatchBytecodeOffset(static_cast<int>(offset));
+        offset = 0;
+
+        // Gather information from the frame.
+        code = *builtins()->InterpreterEnterBytecodeDispatch();
+        handler_sp = frame->sp();
+        handler_fp = frame->fp();
+        break;
+      }
+    }
+
     // For JavaScript frames we perform a range lookup in the handler table.
     if (frame->is_java_script() && catchable_by_js) {
       JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(frame);
-      int stack_slots = 0;  // Will contain operand stack depth of handler.
-      offset = js_frame->LookupExceptionHandlerInTable(&stack_slots, NULL);
+      int stack_depth = 0;  // Will contain operand stack depth of handler.
+      offset = js_frame->LookupExceptionHandlerInTable(&stack_depth, nullptr);
       if (offset >= 0) {
         // Compute the stack pointer from the frame pointer. This ensures that
         // operand stack slots are dropped for nested statements. Also restore
         // correct context for the handler which is pushed within the try-block.
         Address return_sp = frame->fp() -
                             StandardFrameConstants::kFixedFrameSizeFromFp -
-                            stack_slots * kPointerSize;
+                            stack_depth * kPointerSize;
         STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
         context = Context::cast(Memory::Object_at(return_sp - kPointerSize));
 
@@ -1175,10 +1198,8 @@
     // For JavaScript frames we perform a lookup in the handler table.
     if (frame->is_java_script()) {
       JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(frame);
-      int stack_slots = 0;  // The computed stack slot count is not used.
       HandlerTable::CatchPrediction prediction;
-      if (js_frame->LookupExceptionHandlerInTable(&stack_slots, &prediction) >
-          0) {
+      if (js_frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
         // We are conservative with our prediction: try-finally is considered
         // to always rethrow, to meet the expectation of the debugger.
         if (prediction == HandlerTable::CAUGHT) return CAUGHT_BY_JAVASCRIPT;
@@ -1263,7 +1284,9 @@
     HandleScope scope(this);
     // Find code position if recorded in relocation info.
     JavaScriptFrame* frame = it.frame();
-    int pos = frame->LookupCode()->SourcePosition(frame->pc());
+    Code* code = frame->LookupCode();
+    int offset = static_cast<int>(frame->pc() - code->instruction_start());
+    int pos = frame->LookupCode()->SourcePosition(offset);
     Handle<Object> pos_obj(Smi::FromInt(pos), this);
     // Fetch function and receiver.
     Handle<JSFunction> fun(frame->function());
@@ -1298,7 +1321,7 @@
       List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
       it.frame()->Summarize(&frames);
       FrameSummary& summary = frames.last();
-      int pos = summary.code()->SourcePosition(summary.pc());
+      int pos = summary.abstract_code()->SourcePosition(summary.code_offset());
       *target = MessageLocation(casted_script, pos, pos + 1, handle(fun));
       return true;
     }
@@ -1586,8 +1609,7 @@
   if (PredictExceptionCatcher() != CAUGHT_BY_JAVASCRIPT) return undefined;
   for (JavaScriptFrameIterator it(this); !it.done(); it.Advance()) {
     JavaScriptFrame* frame = it.frame();
-    int stack_slots = 0;  // The computed stack slot count is not used.
-    if (frame->LookupExceptionHandlerInTable(&stack_slots, NULL) > 0) {
+    if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
       // Throwing inside a Promise only leads to a reject if not caught by an
       // inner try-catch or try-finally.
       if (frame->function() == *promise_function) {
@@ -1732,7 +1754,6 @@
 #define TRACE_ISOLATE(tag)
 #endif
 
-
 Isolate::Isolate(bool enable_serializer)
     : embedder_data_(),
       entry_stack_(NULL),
@@ -1748,6 +1769,7 @@
       stub_cache_(NULL),
       code_aging_helper_(NULL),
       deoptimizer_data_(NULL),
+      deoptimizer_lazy_throw_(false),
       materialized_object_store_(NULL),
       capture_stack_trace_for_uncaught_exceptions_(false),
       stack_trace_for_uncaught_exceptions_frame_limit_(0),
@@ -1903,9 +1925,6 @@
   Sampler* sampler = logger_->sampler();
   if (sampler && sampler->IsActive()) sampler->Stop();
 
-  delete interpreter_;
-  interpreter_ = NULL;
-
   delete deoptimizer_data_;
   deoptimizer_data_ = NULL;
   builtins_.TearDown();
@@ -1919,13 +1938,17 @@
   delete basic_block_profiler_;
   basic_block_profiler_ = NULL;
 
+  delete heap_profiler_;
+  heap_profiler_ = NULL;
+
   heap_.TearDown();
   logger_->TearDown();
 
+  delete interpreter_;
+  interpreter_ = NULL;
+
   cancelable_task_manager()->CancelAndWait();
 
-  delete heap_profiler_;
-  heap_profiler_ = NULL;
   delete cpu_profiler_;
   cpu_profiler_ = NULL;
 
@@ -2376,6 +2399,11 @@
   turbo_statistics_ = nullptr;
   delete hstatistics_;
   hstatistics_ = nullptr;
+  if (FLAG_runtime_call_stats) {
+    OFStream os(stdout);
+    counters()->runtime_call_stats()->Print(os);
+    counters()->runtime_call_stats()->Reset();
+  }
 }
 
 
@@ -2483,8 +2511,37 @@
   return cell_reports_intact;
 }
 
+bool Isolate::IsArraySpeciesLookupChainIntact() {
+  if (!FLAG_harmony_species) return true;
+  // Note: It would be nice to have debug checks to make sure that the
+  // species protector is accurate, but this would be hard to do for most of
+  // what the protector stands for:
+  // - You'd need to traverse the heap to check that no Array instance has
+  //   a constructor property or a modified __proto__
+  // - To check that Array[Symbol.species] == Array, JS code has to execute,
+  //   but JS cannot be invoked in callstack overflow situations
+  // All that could be checked reliably is that
+  // Array.prototype.constructor == Array. Given that limitation, no check is
+  // done here. In place, there are mjsunit tests harmony/array-species* which
+  // ensure that behavior is correct in various invalid protector cases.
+
+  PropertyCell* species_cell = heap()->species_protector();
+  return species_cell->value()->IsSmi() &&
+         Smi::cast(species_cell->value())->value() == kArrayProtectorValid;
+}
+
+void Isolate::InvalidateArraySpeciesProtector() {
+  if (!FLAG_harmony_species) return;
+  DCHECK(factory()->species_protector()->value()->IsSmi());
+  DCHECK(IsArraySpeciesLookupChainIntact());
+  PropertyCell::SetValueWithInvalidation(
+      factory()->species_protector(),
+      handle(Smi::FromInt(kArrayProtectorInvalid), this));
+  DCHECK(!IsArraySpeciesLookupChainIntact());
+}
 
 void Isolate::UpdateArrayProtectorOnSetElement(Handle<JSObject> object) {
+  DisallowHeapAllocation no_gc;
   if (IsFastArrayConstructorPrototypeChainIntact() &&
       object->map()->is_prototype_map()) {
     Object* context = heap()->native_contexts_list();
@@ -2494,6 +2551,7 @@
               *object ||
           current_context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX) ==
               *object) {
+        CountUsage(v8::Isolate::UseCounterFeature::kArrayProtectorDirtied);
         PropertyCell::SetValueWithInvalidation(
             factory()->array_protector(),
             handle(Smi::FromInt(kArrayProtectorInvalid), this));
@@ -2580,6 +2638,31 @@
 }
 
 
+void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
+  for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
+    if (callback == before_call_entered_callbacks_.at(i)) return;
+  }
+  before_call_entered_callbacks_.Add(callback);
+}
+
+
+void Isolate::RemoveBeforeCallEnteredCallback(
+    BeforeCallEnteredCallback callback) {
+  for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
+    if (callback == before_call_entered_callbacks_.at(i)) {
+      before_call_entered_callbacks_.Remove(i);
+    }
+  }
+}
+
+
+void Isolate::FireBeforeCallEnteredCallback() {
+  for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
+    before_call_entered_callbacks_.at(i)(reinterpret_cast<v8::Isolate*>(this));
+  }
+}
+
+
 void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
   for (int i = 0; i < call_completed_callbacks_.length(); i++) {
     if (callback == call_completed_callbacks_.at(i)) return;
@@ -2605,10 +2688,10 @@
   if (!handle_scope_implementer()->CallDepthIsZero()) return;
   if (run_microtasks) RunMicrotasks();
   // Fire callbacks.  Increase call depth to prevent recursive callbacks.
-  v8::Isolate::SuppressMicrotaskExecutionScope suppress(
-      reinterpret_cast<v8::Isolate*>(this));
+  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
+  v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
   for (int i = 0; i < call_completed_callbacks_.length(); i++) {
-    call_completed_callbacks_.at(i)();
+    call_completed_callbacks_.at(i)(isolate);
   }
 }