Upgrade to V8 3.3
Merge V8 at 3.3.10.39
Simple merge required updates to makefiles only.
Bug: 5688872
Change-Id: I14703f418235f5ce6013b9b3e2e502407a9f6dfd
diff --git a/src/heap.cc b/src/heap.cc
index 2b6c11f..f82c83c 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -33,8 +33,8 @@
#include "codegen.h"
#include "compilation-cache.h"
#include "debug.h"
-#include "heap-profiler.h"
#include "global-handles.h"
+#include "heap-profiler.h"
#include "liveobjectlist-inl.h"
#include "mark-compact.h"
#include "natives.h"
@@ -69,11 +69,11 @@
: isolate_(NULL),
// semispace_size_ should be a power of 2 and old_generation_size_ should be
// a multiple of Page::kPageSize.
-#if defined(ANDROID)
+#if 0//defined(ANDROID)
reserved_semispace_size_(2*MB),
max_semispace_size_(2*MB),
initial_semispace_size_(128*KB),
- max_old_generation_size_(192*MB),
+ max_old_generation_size_(512*MB),
max_executable_size_(max_old_generation_size_),
code_range_size_(0),
#elif defined(V8_TARGET_ARCH_X64)
@@ -96,6 +96,7 @@
// Will be 4 * reserved_semispace_size_ to ensure that young
// generation can be aligned to its size.
survived_since_last_expansion_(0),
+ sweep_generation_(0),
always_allocate_scope_depth_(0),
linear_allocation_scope_depth_(0),
contexts_disposed_(0),
@@ -736,7 +737,7 @@
if (collector == MARK_COMPACTOR) {
// Perform mark-sweep with optional compaction.
MarkCompact(tracer);
-
+ sweep_generation_++;
bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
IsStableOrIncreasingSurvivalTrend();
@@ -771,11 +772,10 @@
isolate_->counters()->objs_since_last_young()->Set(0);
- if (collector == MARK_COMPACTOR) {
- DisableAssertNoAllocation allow_allocation;
+ { DisableAssertNoAllocation allow_allocation;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
next_gc_likely_to_collect_more =
- isolate_->global_handles()->PostGarbageCollectionProcessing();
+ isolate_->global_handles()->PostGarbageCollectionProcessing(collector);
}
// Update relocatables.
@@ -935,6 +935,12 @@
}
+static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
+ return heap->InNewSpace(*p) &&
+ !HeapObject::cast(*p)->map_word().IsForwardingAddress();
+}
+
+
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
@@ -1029,6 +1035,11 @@
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+ isolate_->global_handles()->IdentifyWeakIndependentHandles(
+ &IsUnscavengedHeapObject);
+ isolate_->global_handles()->IterateWeakIndependentRoots(&scavenge_visitor);
+ new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
@@ -1282,6 +1293,10 @@
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<SharedFunctionInfo::kSize>);
+ table_.Register(kVisitJSRegExp,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ Visit);
+
table_.Register(kVisitJSFunction,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<JSFunction::kSize>);
@@ -1348,7 +1363,7 @@
#if defined(ENABLE_LOGGING_AND_PROFILING)
Isolate* isolate = heap->isolate();
if (isolate->logger()->is_logging() ||
- isolate->cpu_profiler()->is_profiling()) {
+ CpuProfiler::is_profiling(isolate)) {
if (target->IsSharedFunctionInfo()) {
PROFILE(isolate, SharedFunctionInfoMoveEvent(
source->address(), target->address()));
@@ -1523,8 +1538,8 @@
return;
}
- if (isolate()->logger()->is_logging() ||
- isolate()->cpu_profiler()->is_profiling() ||
+ if (isolate()->logger()->is_logging() |
+ CpuProfiler::is_profiling(isolate()) ||
(isolate()->heap_profiler() != NULL &&
isolate()->heap_profiler()->is_profiling())) {
// If one of the isolates is doing scavenge at this moment of time
@@ -1593,7 +1608,7 @@
map->set_instance_size(instance_size);
map->set_inobject_properties(0);
map->set_pre_allocated_property_fields(0);
- map->set_instance_descriptors(empty_descriptor_array());
+ map->init_instance_descriptors();
map->set_code_cache(empty_fixed_array());
map->set_prototype_transitions(empty_fixed_array());
map->set_unused_property_fields(0);
@@ -1686,15 +1701,15 @@
set_empty_descriptor_array(DescriptorArray::cast(obj));
// Fix the instance_descriptors for the existing maps.
- meta_map()->set_instance_descriptors(empty_descriptor_array());
+ meta_map()->init_instance_descriptors();
meta_map()->set_code_cache(empty_fixed_array());
meta_map()->set_prototype_transitions(empty_fixed_array());
- fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
+ fixed_array_map()->init_instance_descriptors();
fixed_array_map()->set_code_cache(empty_fixed_array());
fixed_array_map()->set_prototype_transitions(empty_fixed_array());
- oddball_map()->set_instance_descriptors(empty_descriptor_array());
+ oddball_map()->init_instance_descriptors();
oddball_map()->set_code_cache(empty_fixed_array());
oddball_map()->set_prototype_transitions(empty_fixed_array());
@@ -1720,10 +1735,10 @@
}
set_heap_number_map(Map::cast(obj));
- { MaybeObject* maybe_obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
+ { MaybeObject* maybe_obj = AllocateMap(FOREIGN_TYPE, Foreign::kSize);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_proxy_map(Map::cast(obj));
+ set_foreign_map(Map::cast(obj));
for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
const StringTypeTable& entry = string_type_table[i];
@@ -1805,6 +1820,12 @@
}
set_external_float_array_map(Map::cast(obj));
+ { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_DOUBLE_ARRAY_TYPE,
+ ExternalArray::kAlignedSize);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_external_double_array_map(Map::cast(obj));
+
{ MaybeObject* maybe_obj = AllocateMap(CODE_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
@@ -2102,12 +2123,12 @@
}
hidden_symbol_ = String::cast(obj);
- // Allocate the proxy for __proto__.
+ // Allocate the foreign for __proto__.
{ MaybeObject* maybe_obj =
- AllocateProxy((Address) &Accessors::ObjectPrototype);
+ AllocateForeign((Address) &Accessors::ObjectPrototype);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_prototype_accessors(Proxy::cast(obj));
+ set_prototype_accessors(Foreign::cast(obj));
// Allocate the code_stubs dictionary. The initial size is set to avoid
// expanding the dictionary during bootstrapping.
@@ -2293,6 +2314,8 @@
return kExternalUnsignedIntArrayMapRootIndex;
case kExternalFloatArray:
return kExternalFloatArrayMapRootIndex;
+ case kExternalDoubleArray:
+ return kExternalDoubleArrayMapRootIndex;
case kExternalPixelArray:
return kExternalPixelArrayMapRootIndex;
default:
@@ -2323,16 +2346,16 @@
}
-MaybeObject* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
- // Statically ensure that it is safe to allocate proxies in paged spaces.
- STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
+MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
+ // Statically ensure that it is safe to allocate foreigns in paged spaces.
+ STATIC_ASSERT(Foreign::kSize <= Page::kMaxHeapObjectSize);
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Object* result;
- { MaybeObject* maybe_result = Allocate(proxy_map(), space);
+ { MaybeObject* maybe_result = Allocate(foreign_map(), space);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- Proxy::cast(result)->set_proxy(proxy);
+ Foreign::cast(result)->set_address(address);
return result;
}
@@ -2370,6 +2393,7 @@
share->set_num_literals(0);
share->set_end_position(0);
share->set_function_token_position(0);
+ share->set_es5_native(false);
return result;
}
@@ -2792,6 +2816,7 @@
code->set_check_type(RECEIVER_MAP_CHECK);
}
code->set_deoptimization_data(empty_fixed_array());
+ code->set_next_code_flushing_candidate(undefined_value());
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_reference.is_null()) {
@@ -3204,6 +3229,26 @@
}
+MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
+ // Allocate map.
+ // TODO(rossberg): Once we optimize proxies, think about a scheme to share
+ // maps. Will probably depend on the identity of the handler object, too.
+ Map* map;
+ MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize);
+ if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
+ map->set_prototype(prototype);
+ map->set_pre_allocated_property_fields(1);
+ map->set_inobject_properties(1);
+
+ // Allocate the proxy object.
+ Object* result;
+ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ JSProxy::cast(result)->set_handler(handler);
+ return result;
+}
+
+
MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
ASSERT(constructor->has_initial_map());
Map* map = constructor->initial_map();
@@ -3267,7 +3312,7 @@
// Setup the global object as a normalized object.
global->set_map(new_map);
- global->map()->set_instance_descriptors(empty_descriptor_array());
+ global->map()->clear_instance_descriptors();
global->set_properties(dictionary);
// Make sure result is a global object with properties in dictionary.
@@ -4139,6 +4184,26 @@
}
+MaybeObject* Heap::LookupAsciiSymbol(Handle<SeqAsciiString> string,
+ int from,
+ int length) {
+ Object* symbol = NULL;
+ Object* new_table;
+ { MaybeObject* maybe_new_table =
+ symbol_table()->LookupSubStringAsciiSymbol(string,
+ from,
+ length,
+ &symbol);
+ if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
+ }
+ // Can't use set_symbol_table because SymbolTable::cast knows that
+ // SymbolTable is a singleton and checks for identity.
+ roots_[kSymbolTableRootIndex] = new_table;
+ ASSERT(symbol != NULL);
+ return symbol;
+}
+
+
MaybeObject* Heap::LookupTwoByteSymbol(Vector<const uc16> string) {
Object* symbol = NULL;
Object* new_table;
@@ -4461,7 +4526,8 @@
void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
v->Synchronize("symbol_table");
- if (mode != VISIT_ALL_IN_SCAVENGE) {
+ if (mode != VISIT_ALL_IN_SCAVENGE &&
+ mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
// Scavenge collections have special processing for this.
external_string_table_.Iterate(v);
}
@@ -4497,16 +4563,24 @@
// Iterate over the builtin code objects and code stubs in the
// heap. Note that it is not necessary to iterate over code objects
// on scavenge collections.
- if (mode != VISIT_ALL_IN_SCAVENGE) {
+ if (mode != VISIT_ALL_IN_SCAVENGE &&
+ mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
isolate_->builtins()->IterateBuiltins(v);
}
v->Synchronize("builtins");
// Iterate over global handles.
- if (mode == VISIT_ONLY_STRONG) {
- isolate_->global_handles()->IterateStrongRoots(v);
- } else {
- isolate_->global_handles()->IterateAllRoots(v);
+ switch (mode) {
+ case VISIT_ONLY_STRONG:
+ isolate_->global_handles()->IterateStrongRoots(v);
+ break;
+ case VISIT_ALL_IN_SCAVENGE:
+ isolate_->global_handles()->IterateStrongAndDependentRoots(v);
+ break;
+ case VISIT_ALL_IN_SWEEP_NEWSPACE:
+ case VISIT_ALL:
+ isolate_->global_handles()->IterateAllRoots(v);
+ break;
}
v->Synchronize("globalhandles");