Upgrade V8 to version 4.9.385.28
https://chromium.googlesource.com/v8/v8/+/4.9.385.28
FPIIM-449
Change-Id: I4b2e74289d4bf3667f2f3dc8aa2e541f63e26eb4
diff --git a/src/heap/objects-visiting.cc b/src/heap/objects-visiting.cc
index 20d92de..315c897 100644
--- a/src/heap/objects-visiting.cc
+++ b/src/heap/objects-visiting.cc
@@ -2,14 +2,21 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "src/v8.h"
-
#include "src/heap/objects-visiting.h"
+#include "src/heap/mark-compact-inl.h"
+#include "src/heap/objects-visiting-inl.h"
+
namespace v8 {
namespace internal {
+StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
+ return GetVisitorId(map->instance_type(), map->instance_size(),
+ FLAG_unbox_double_fields && !map->HasFastPointerLayout());
+}
+
+
StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
int instance_type, int instance_size, bool has_unboxed_fields) {
if (instance_type < FIRST_NONSTRING_TYPE) {
@@ -42,6 +49,9 @@
case BYTE_ARRAY_TYPE:
return kVisitByteArray;
+ case BYTECODE_ARRAY_TYPE:
+ return kVisitBytecodeArray;
+
case FREE_SPACE_TYPE:
return kVisitFreeSpace;
@@ -51,9 +61,6 @@
case FIXED_DOUBLE_ARRAY_TYPE:
return kVisitFixedDoubleArray;
- case CONSTANT_POOL_ARRAY_TYPE:
- return kVisitConstantPoolArray;
-
case ODDBALL_TYPE:
return kVisitOddball;
@@ -72,13 +79,8 @@
case WEAK_CELL_TYPE:
return kVisitWeakCell;
- case JS_SET_TYPE:
- return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
- JSSet::kSize, has_unboxed_fields);
-
- case JS_MAP_TYPE:
- return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
- JSMap::kSize, has_unboxed_fields);
+ case TRANSITION_ARRAY_TYPE:
+ return kVisitTransitionArray;
case JS_WEAK_MAP_TYPE:
case JS_WEAK_SET_TYPE:
@@ -92,31 +94,14 @@
case JS_PROXY_TYPE:
return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
- JSProxy::kSize, has_unboxed_fields);
-
- case JS_FUNCTION_PROXY_TYPE:
- return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
- JSFunctionProxy::kSize, has_unboxed_fields);
-
- case FOREIGN_TYPE:
- return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
- Foreign::kSize, has_unboxed_fields);
+ instance_size, has_unboxed_fields);
case SYMBOL_TYPE:
return kVisitSymbol;
- case FILLER_TYPE:
- return kVisitDataObjectGeneric;
-
case JS_ARRAY_BUFFER_TYPE:
return kVisitJSArrayBuffer;
- case JS_TYPED_ARRAY_TYPE:
- return kVisitJSTypedArray;
-
- case JS_DATA_VIEW_TYPE:
- return kVisitJSDataView;
-
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_GENERATOR_OBJECT_TYPE:
@@ -126,25 +111,31 @@
case JS_ARRAY_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
- case JS_BUILTINS_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
+ case JS_TYPED_ARRAY_TYPE:
+ case JS_DATA_VIEW_TYPE:
+ case JS_SET_TYPE:
+ case JS_MAP_TYPE:
case JS_SET_ITERATOR_TYPE:
case JS_MAP_ITERATOR_TYPE:
+ case JS_ITERATOR_RESULT_TYPE:
+ case JS_PROMISE_TYPE:
+ case JS_BOUND_FUNCTION_TYPE:
return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
instance_size, has_unboxed_fields);
case JS_FUNCTION_TYPE:
return kVisitJSFunction;
+ case FILLER_TYPE:
+ if (instance_size == kPointerSize) return kVisitDataObjectGeneric;
+ // Fall through.
+ case FOREIGN_TYPE:
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
-#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
- case EXTERNAL_##TYPE##_ARRAY_TYPE:
-
- TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
+ case SIMD128_VALUE_TYPE:
return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
instance_size, has_unboxed_fields);
-#undef EXTERNAL_ARRAY_CASE
case FIXED_UINT8_ARRAY_TYPE:
case FIXED_INT8_ARRAY_TYPE:
@@ -197,9 +188,11 @@
T* tail = NULL;
MarkCompactCollector* collector = heap->mark_compact_collector();
bool record_slots = MustRecordSlots(heap);
+
while (list != undefined) {
// Check whether to keep the candidate in the list.
T* candidate = reinterpret_cast<T*>(list);
+
Object* retained = retainer->RetainAs(list);
if (retained != NULL) {
if (head == undefined) {
@@ -212,7 +205,7 @@
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
- collector->RecordSlot(next_slot, next_slot, retained);
+ collector->RecordSlot(tail, next_slot, retained);
}
}
// Retained object is new tail.
@@ -220,9 +213,9 @@
candidate = reinterpret_cast<T*>(retained);
tail = candidate;
-
// tail is a live object, visit it.
WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
+
} else {
WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
}
@@ -232,9 +225,7 @@
}
// Terminate the list if there is one or more elements.
- if (tail != NULL) {
- WeakListVisitor<T>::SetWeakNext(tail, undefined);
- }
+ if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
return head;
}
@@ -253,7 +244,7 @@
template <>
struct WeakListVisitor<JSFunction> {
static void SetWeakNext(JSFunction* function, Object* next) {
- function->set_next_function_link(next);
+ function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object* WeakNext(JSFunction* function) {
@@ -271,7 +262,7 @@
template <>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code* code, Object* next) {
- code->set_next_code_link(next);
+ code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object* WeakNext(Code* code) { return code->next_code_link(); }
@@ -287,7 +278,7 @@
template <>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context* context, Object* next) {
- context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
+ context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object* WeakNext(Context* context) {
@@ -303,8 +294,21 @@
// Process the three weak lists linked off the context.
DoWeakList<JSFunction>(heap, context, retainer,
Context::OPTIMIZED_FUNCTIONS_LIST);
- DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
- DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
+
+ if (heap->gc_state() == Heap::MARK_COMPACT) {
+ // Record the slots of the weak entries in the native context.
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ for (int idx = Context::FIRST_WEAK_SLOT;
+ idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
+ Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
+ collector->RecordSlot(context, slot, *slot);
+ }
+ // Code objects are always allocated in Code space, we do not have to
+ // visit
+ // them during scavenges.
+ DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
+ DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
+ }
}
template <class T>
@@ -320,8 +324,7 @@
// Record the updated slot if necessary.
Object** head_slot =
HeapObject::RawField(context, FixedArray::SizeFor(index));
- heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
- list_head);
+ heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
}
}
@@ -335,53 +338,9 @@
template <>
-struct WeakListVisitor<JSArrayBufferView> {
- static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
- obj->set_weak_next(next);
- }
-
- static Object* WeakNext(JSArrayBufferView* obj) { return obj->weak_next(); }
-
- static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; }
-
- static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
-
- static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
-};
-
-
-template <>
-struct WeakListVisitor<JSArrayBuffer> {
- static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
- obj->set_weak_next(next);
- }
-
- static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
-
- static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
-
- static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
- WeakObjectRetainer* retainer) {
- Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
- heap, array_buffer->weak_first_view(), retainer);
- array_buffer->set_weak_first_view(typed_array_obj);
- if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
- Object** slot = HeapObject::RawField(array_buffer,
- JSArrayBuffer::kWeakFirstViewOffset);
- heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
- }
- }
-
- static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
- Runtime::FreeArrayBuffer(heap->isolate(), phantom);
- }
-};
-
-
-template <>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) {
- obj->set_weak_next(next);
+ obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
@@ -394,23 +353,10 @@
};
-template Object* VisitWeakList<Code>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer);
-
-
template Object* VisitWeakList<Context>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
-
-template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer);
-
-
template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
-}
-} // namespace v8::internal
+} // namespace internal
+} // namespace v8