Upgrade V8 to 5.1.281.57 DO NOT MERGE
FPIIM-449
Change-Id: Id981b686b4d587ac31697662eb98bb34be42ad90
(cherry picked from commit 3b9bc31999c9787eb726ecdbfd5796bfdec32a18)
diff --git a/src/frames.cc b/src/frames.cc
index 50a2e21..0e57429 100644
--- a/src/frames.cc
+++ b/src/frames.cc
@@ -134,12 +134,10 @@
#undef FRAME_TYPE_CASE
}
-
// -------------------------------------------------------------------------
-
-JavaScriptFrameIterator::JavaScriptFrameIterator(
- Isolate* isolate, StackFrame::Id id)
+JavaScriptFrameIterator::JavaScriptFrameIterator(Isolate* isolate,
+ StackFrame::Id id)
: iterator_(isolate) {
while (!done()) {
Advance();
@@ -216,9 +214,9 @@
// we check only that kMarkerOffset is within the stack bounds and do
// compile time check that kContextOffset slot is pushed on the stack before
// kMarkerOffset.
- STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
+ STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
StandardFrameConstants::kContextOffset);
- Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
+ Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
if (IsValidStackAddress(frame_marker)) {
type = StackFrame::ComputeType(this, &state);
top_frame_type_ = type;
@@ -403,11 +401,29 @@
return_address_location_resolver_ = resolver;
}
+static bool IsInterpreterFramePc(Isolate* isolate, Address pc) {
+ Code* interpreter_entry_trampoline =
+ isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
+ Code* interpreter_bytecode_dispatch =
+ isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
+
+ return (pc >= interpreter_entry_trampoline->instruction_start() &&
+ pc < interpreter_entry_trampoline->instruction_end()) ||
+ (pc >= interpreter_bytecode_dispatch->instruction_start() &&
+ pc < interpreter_bytecode_dispatch->instruction_end());
+}
StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
State* state) {
DCHECK(state->fp != NULL);
+#if defined(USE_SIMULATOR)
+ MSAN_MEMORY_IS_INITIALIZED(
+ state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
+ kPointerSize);
+#endif
+ Object* marker = Memory::Object_at(
+ state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
if (!iterator->can_access_heap_objects_) {
// TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
// means that we are being called from the profiler, which can interrupt
@@ -416,66 +432,81 @@
// reliable.
#if defined(USE_SIMULATOR)
MSAN_MEMORY_IS_INITIALIZED(
- state->fp + StandardFrameConstants::kContextOffset, kPointerSize);
- MSAN_MEMORY_IS_INITIALIZED(
- state->fp + StandardFrameConstants::kMarkerOffset, kPointerSize);
+ state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
#endif
- if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
- // An adapter frame has a special SMI constant for the context and
- // is not distinguished through the marker.
- return ARGUMENTS_ADAPTOR;
- }
- Object* marker =
- Memory::Object_at(state->fp + StandardFrameConstants::kMarkerOffset);
- if (marker->IsSmi()) {
- return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
- } else {
- return JAVA_SCRIPT;
- }
- }
-
- // Look up the code object to figure out the type of the stack frame.
- Code* code_obj = GetContainingCode(iterator->isolate(), *(state->pc_address));
-
- Object* marker =
- Memory::Object_at(state->fp + StandardFrameConstants::kMarkerOffset);
- if (code_obj != nullptr) {
- switch (code_obj->kind()) {
- case Code::FUNCTION:
+ Object* maybe_function =
+ Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
+ if (!marker->IsSmi()) {
+ if (maybe_function->IsSmi()) {
+ return NONE;
+ } else if (FLAG_ignition && IsInterpreterFramePc(iterator->isolate(),
+ *(state->pc_address))) {
+ return INTERPRETED;
+ } else {
return JAVA_SCRIPT;
- case Code::OPTIMIZED_FUNCTION:
- return OPTIMIZED;
- case Code::WASM_FUNCTION:
- return STUB;
- case Code::BUILTIN:
- if (!marker->IsSmi()) {
- if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
- // An adapter frame has a special SMI constant for the context and
- // is not distinguished through the marker.
- return ARGUMENTS_ADAPTOR;
- } else {
- // The interpreter entry trampoline has a non-SMI marker.
- DCHECK(code_obj->is_interpreter_entry_trampoline() ||
- code_obj->is_interpreter_enter_bytecode_dispatch());
- return INTERPRETED;
- }
- }
- break; // Marker encodes the frame type.
- case Code::HANDLER:
- if (!marker->IsSmi()) {
- // Only hydrogen code stub handlers can have a non-SMI marker.
- DCHECK(code_obj->is_hydrogen_stub());
+ }
+ }
+ } else {
+ // Look up the code object to figure out the type of the stack frame.
+ Code* code_obj =
+ GetContainingCode(iterator->isolate(), *(state->pc_address));
+ if (code_obj != nullptr) {
+ if (code_obj->is_interpreter_entry_trampoline() ||
+ code_obj->is_interpreter_enter_bytecode_dispatch()) {
+ return INTERPRETED;
+ }
+ switch (code_obj->kind()) {
+ case Code::BUILTIN:
+ if (marker->IsSmi()) break;
+ // We treat frames for BUILTIN Code objects as OptimizedFrame for now
+ // (all the builtins with JavaScript linkage are actually generated
+ // with TurboFan currently, so this is sound).
return OPTIMIZED;
- }
- break; // Marker encodes the frame type.
- default:
- break; // Marker encodes the frame type.
+ case Code::FUNCTION:
+ return JAVA_SCRIPT;
+ case Code::OPTIMIZED_FUNCTION:
+ return OPTIMIZED;
+ case Code::WASM_FUNCTION:
+ return WASM;
+ case Code::WASM_TO_JS_FUNCTION:
+ return WASM_TO_JS;
+ case Code::JS_TO_WASM_FUNCTION:
+ return JS_TO_WASM;
+ default:
+ // All other types should have an explicit marker
+ break;
+ }
+ } else {
+ return NONE;
}
}
- // Didn't find a code object, or the code kind wasn't specific enough.
- // The marker should encode the frame type.
- return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
+ DCHECK(marker->IsSmi());
+ StackFrame::Type candidate =
+ static_cast<StackFrame::Type>(Smi::cast(marker)->value());
+ switch (candidate) {
+ case ENTRY:
+ case ENTRY_CONSTRUCT:
+ case EXIT:
+ case STUB:
+ case STUB_FAILURE_TRAMPOLINE:
+ case INTERNAL:
+ case CONSTRUCT:
+ case ARGUMENTS_ADAPTOR:
+ return candidate;
+ case JS_TO_WASM:
+ case WASM_TO_JS:
+ case WASM:
+ case JAVA_SCRIPT:
+ case OPTIMIZED:
+ case INTERPRETED:
+ default:
+ // Unoptimized and optimized JavaScript frames, including
+ // interpreted frames, should never have a StackFrame::Type
+ // marker. If we find one, we're likely being called from the
+ // profiler in a bogus stack frame.
+ return NONE;
+ }
}
@@ -493,16 +524,7 @@
Address StackFrame::UnpaddedFP() const {
-#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
- if (!is_optimized()) return fp();
- int32_t alignment_state = Memory::int32_at(
- fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
-
- return (alignment_state == kAlignmentPaddingPushed) ?
- (fp() + kPointerSize) : fp();
-#else
return fp();
-#endif
}
@@ -572,7 +594,7 @@
Address ExitFrame::GetCallerStackPointer() const {
- return fp() + ExitFrameConstants::kCallerSPDisplacement;
+ return fp() + ExitFrameConstants::kCallerSPOffset;
}
@@ -648,13 +670,50 @@
SafepointEntry safepoint_entry;
Code* code = StackFrame::GetSafepointData(
isolate(), pc(), &safepoint_entry, &stack_slots);
- unsigned slot_space =
- stack_slots * kPointerSize - StandardFrameConstants::kFixedFrameSize;
+ unsigned slot_space = stack_slots * kPointerSize;
- // Visit the outgoing parameters.
+ // Determine the fixed header and spill slot area size.
+ int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
+ Object* marker =
+ Memory::Object_at(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
+ if (marker->IsSmi()) {
+ StackFrame::Type candidate =
+ static_cast<StackFrame::Type>(Smi::cast(marker)->value());
+ switch (candidate) {
+ case ENTRY:
+ case ENTRY_CONSTRUCT:
+ case EXIT:
+ case STUB_FAILURE_TRAMPOLINE:
+ case ARGUMENTS_ADAPTOR:
+ case STUB:
+ case INTERNAL:
+ case CONSTRUCT:
+ case JS_TO_WASM:
+ case WASM_TO_JS:
+ case WASM:
+ frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
+ break;
+ case JAVA_SCRIPT:
+ case OPTIMIZED:
+ case INTERPRETED:
+ // These frame types have a context, but they are actually stored
+ // in the place on the stack that one finds the frame type.
+ UNREACHABLE();
+ break;
+ case NONE:
+ case NUMBER_OF_TYPES:
+ case MANUAL:
+ UNREACHABLE();
+ break;
+ }
+ }
+ slot_space -=
+ (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
+
+ Object** frame_header_base = &Memory::Object_at(fp() - frame_header_size);
+ Object** frame_header_limit = &Memory::Object_at(fp());
Object** parameters_base = &Memory::Object_at(sp());
- Object** parameters_limit = &Memory::Object_at(
- fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
+ Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
// Visit the parameters that may be on top of the saved registers.
if (safepoint_entry.argument_count() > 0) {
@@ -690,7 +749,10 @@
safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
// Visit the rest of the parameters.
- v->VisitPointers(parameters_base, parameters_limit);
+ if (!is_js_to_wasm() && !is_wasm()) {
+ // Non-WASM frames have tagged values as parameters.
+ v->VisitPointers(parameters_base, parameters_limit);
+ }
// Visit pointer spill slots and locals.
for (unsigned index = 0; index < stack_slots; index++) {
@@ -704,12 +766,11 @@
// Visit the return address in the callee and incoming arguments.
IteratePc(v, pc_address(), constant_pool_address(), code);
- // Visit the context in stub frame and JavaScript frame.
- // Visit the function in JavaScript frame.
- Object** fixed_base = &Memory::Object_at(
- fp() + StandardFrameConstants::kMarkerOffset);
- Object** fixed_limit = &Memory::Object_at(fp());
- v->VisitPointers(fixed_base, fixed_limit);
+ if (!is_wasm() && !is_wasm_to_js()) {
+ // Visit the context in stub frame and JavaScript frame.
+ // Visit the function in JavaScript frame.
+ v->VisitPointers(frame_header_base, frame_header_limit);
+ }
}
@@ -724,7 +785,7 @@
Address StubFrame::GetCallerStackPointer() const {
- return fp() + ExitFrameConstants::kCallerSPDisplacement;
+ return fp() + ExitFrameConstants::kCallerSPOffset;
}
@@ -893,6 +954,15 @@
}
}
+namespace {
+
+bool CannotDeoptFromAsmCode(Code* code, JSFunction* function) {
+ return code->is_turbofanned() && function->shared()->asm_function() &&
+ !FLAG_turbo_asm_deoptimization;
+}
+
+} // namespace
+
FrameSummary::FrameSummary(Object* receiver, JSFunction* function,
AbstractCode* abstract_code, int code_offset,
bool is_constructor)
@@ -900,7 +970,11 @@
function_(function),
abstract_code_(abstract_code),
code_offset_(code_offset),
- is_constructor_(is_constructor) {}
+ is_constructor_(is_constructor) {
+ DCHECK(abstract_code->IsBytecodeArray() ||
+ Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION ||
+ CannotDeoptFromAsmCode(Code::cast(abstract_code), function));
+}
void FrameSummary::Print() {
PrintF("receiver: ");
@@ -912,7 +986,10 @@
if (abstract_code_->IsCode()) {
Code* code = abstract_code_->GetCode();
if (code->kind() == Code::FUNCTION) PrintF(" UNOPT ");
- if (code->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT ");
+ if (code->kind() == Code::OPTIMIZED_FUNCTION) {
+ DCHECK(CannotDeoptFromAsmCode(code, *function()));
+ PrintF(" ASM ");
+ }
} else {
PrintF(" BYTECODE ");
}
@@ -926,8 +1003,9 @@
// Delegate to JS frame in absence of turbofan deoptimization.
// TODO(turbofan): Revisit once we support deoptimization across the board.
- if (LookupCode()->is_turbofanned() && function()->shared()->asm_function() &&
- !FLAG_turbo_asm_deoptimization) {
+ Code* code = LookupCode();
+ if (code->kind() == Code::BUILTIN ||
+ CannotDeoptFromAsmCode(code, function())) {
return JavaScriptFrame::Summarize(frames);
}
@@ -952,7 +1030,7 @@
if (frame_opcode == Translation::JS_FRAME ||
frame_opcode == Translation::INTERPRETED_FRAME) {
jsframe_count--;
- BailoutId const ast_id = BailoutId(it.Next());
+ BailoutId const bailout_id = BailoutId(it.Next());
SharedFunctionInfo* const shared_info =
SharedFunctionInfo::cast(literal_array->get(it.Next()));
it.Next(); // Skip height.
@@ -999,14 +1077,14 @@
DeoptimizationOutputData* const output_data =
DeoptimizationOutputData::cast(code->deoptimization_data());
unsigned const entry =
- Deoptimizer::GetOutputInfo(output_data, ast_id, shared_info);
+ Deoptimizer::GetOutputInfo(output_data, bailout_id, shared_info);
code_offset = FullCodeGenerator::PcField::decode(entry);
abstract_code = AbstractCode::cast(code);
} else {
- // TODO(rmcilroy): Modify FrameSummary to enable us to summarize
- // based on the BytecodeArray and bytecode offset.
DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME);
- code_offset = 0;
+ // BailoutId points to the next bytecode in the bytecode aray. Subtract
+ // 1 to get the end of current bytecode.
+ code_offset = bailout_id.ToInt() - 1;
abstract_code = AbstractCode::cast(shared_info->bytecode_array());
}
FrameSummary summary(receiver, function, abstract_code, code_offset,
@@ -1030,7 +1108,6 @@
int OptimizedFrame::LookupExceptionHandlerInTable(
int* stack_slots, HandlerTable::CatchPrediction* prediction) {
Code* code = LookupCode();
- DCHECK(code->is_optimized_code());
HandlerTable* table = HandlerTable::cast(code->handler_table());
int pc_offset = static_cast<int>(pc() - code->entry());
if (stack_slots) *stack_slots = code->stack_slots();
@@ -1069,8 +1146,9 @@
// Delegate to JS frame in absence of turbofan deoptimization.
// TODO(turbofan): Revisit once we support deoptimization across the board.
- if (LookupCode()->is_turbofanned() && function()->shared()->asm_function() &&
- !FLAG_turbo_asm_deoptimization) {
+ Code* code = LookupCode();
+ if (code->kind() == Code::BUILTIN ||
+ CannotDeoptFromAsmCode(code, function())) {
return JavaScriptFrame::GetFunctions(functions);
}
@@ -1222,6 +1300,20 @@
accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
}
+void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
+ int index) const {
+ accumulator->Add("wasm frame");
+}
+
+Code* WasmFrame::unchecked_code() const {
+ return static_cast<Code*>(isolate()->FindCodeObject(pc()));
+}
+
+void WasmFrame::Iterate(ObjectVisitor* v) const { IterateCompiledFrame(v); }
+
+Address WasmFrame::GetCallerStackPointer() const {
+ return fp() + ExitFrameConstants::kCallerSPOffset;
+}
namespace {
@@ -1437,10 +1529,10 @@
void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
Object** base = &Memory::Object_at(sp());
- Object** limit = &Memory::Object_at(fp() +
- kFirstRegisterParameterFrameOffset);
+ Object** limit = &Memory::Object_at(
+ fp() + StubFailureTrampolineFrameConstants::kFixedHeaderBottomOffset);
v->VisitPointers(base, limit);
- base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
+ base = &Memory::Object_at(fp() + StandardFrameConstants::kFunctionOffset);
const int offset = StandardFrameConstants::kLastObjectOffset;
limit = &Memory::Object_at(fp() + offset) + 1;
v->VisitPointers(base, limit);