Merge "Assume all x86/x86_64 hosts support at least sse4.x/popcount."
diff --git a/build/art.go b/build/art.go
index 6c9aa89..19b39cd 100644
--- a/build/art.go
+++ b/build/art.go
@@ -153,6 +153,11 @@
cflags = append(cflags, "-DART_BASE_ADDRESS_MIN_DELTA="+minDelta)
cflags = append(cflags, "-DART_BASE_ADDRESS_MAX_DELTA="+maxDelta)
+ if len(ctx.AConfig().SanitizeHost()) > 0 && !envFalse(ctx, "ART_ENABLE_ADDRESS_SANITIZER") {
+ // We enable full sanitization on the host by default.
+ cflags = append(cflags, "-DART_ENABLE_ADDRESS_SANITIZER=1")
+ }
+
return cflags
}
diff --git a/compiler/dex/inline_method_analyser.cc b/compiler/dex/inline_method_analyser.cc
index 2572291..e5ff7fc 100644
--- a/compiler/dex/inline_method_analyser.cc
+++ b/compiler/dex/inline_method_analyser.cc
@@ -145,9 +145,8 @@
DCHECK_EQ(invoke_direct->VRegC_35c(),
method->GetCodeItem()->registers_size_ - method->GetCodeItem()->ins_size_);
uint32_t method_index = invoke_direct->VRegB_35c();
- PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
- ArtMethod* target_method =
- method->GetDexCache()->GetResolvedMethod(method_index, pointer_size);
+ ArtMethod* target_method = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
+ method_index, method->GetDexCache(), method->GetClassLoader());
if (kIsDebugBuild && target_method != nullptr) {
CHECK(!target_method->IsStatic());
CHECK(target_method->IsConstructor());
diff --git a/compiler/driver/compiler_driver_test.cc b/compiler/driver/compiler_driver_test.cc
index b4ad325..5d1d972 100644
--- a/compiler/driver/compiler_driver_test.cc
+++ b/compiler/driver/compiler_driver_test.cc
@@ -118,10 +118,12 @@
EXPECT_TRUE(type != nullptr) << "type_idx=" << i
<< " " << dex.GetTypeDescriptor(dex.GetTypeId(dex::TypeIndex(i)));
}
- EXPECT_EQ(dex.NumMethodIds(), dex_cache->NumResolvedMethods());
+ EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
+ || dex.NumMethodIds() == dex_cache->NumResolvedMethods());
auto* cl = Runtime::Current()->GetClassLinker();
auto pointer_size = cl->GetImagePointerSize();
for (size_t i = 0; i < dex_cache->NumResolvedMethods(); i++) {
+ // FIXME: This is outdated for hash-based method array.
ArtMethod* method = dex_cache->GetResolvedMethod(i, pointer_size);
EXPECT_TRUE(method != nullptr) << "method_idx=" << i
<< " " << dex.GetMethodDeclaringClassDescriptor(dex.GetMethodId(i))
@@ -133,6 +135,7 @@
EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields()
|| dex.NumFieldIds() == dex_cache->NumResolvedFields());
for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
+ // FIXME: This is outdated for hash-based field array.
ArtField* field = dex_cache->GetResolvedField(i, cl->GetImagePointerSize());
EXPECT_TRUE(field != nullptr) << "field_idx=" << i
<< " " << dex.GetFieldDeclaringClassDescriptor(dex.GetFieldId(i))
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index f92bf95..51730cf 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -1023,41 +1023,58 @@
Runtime* runtime = Runtime::Current();
ClassLinker* class_linker = runtime->GetClassLinker();
- ArtMethod* resolution_method = runtime->GetResolutionMethod();
const DexFile& dex_file = *dex_cache->GetDexFile();
// Prune methods.
- ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
- ArtMethod* method =
- mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
- DCHECK(method != nullptr) << "Expected resolution method instead of null method";
+ mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
+ dex::TypeIndex last_class_idx; // Initialized to invalid index.
+ ObjPtr<mirror::Class> last_class = nullptr;
+ for (size_t i = 0, num = dex_cache->GetDexFile()->NumMethodIds(); i != num; ++i) {
+ uint32_t slot_idx = dex_cache->MethodSlotIndex(i);
+ auto pair =
+ mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_);
+ uint32_t stored_index = pair.index;
+ ArtMethod* method = pair.object;
+ if (method != nullptr && i > stored_index) {
+ continue; // Already checked.
+ }
// Check if the referenced class is in the image. Note that we want to check the referenced
// class rather than the declaring class to preserve the semantics, i.e. using a MethodId
// results in resolving the referenced class and that can for example throw OOME.
- ObjPtr<mirror::Class> referencing_class = class_linker->LookupResolvedType(
- dex_file,
- dex_file.GetMethodId(i).class_idx_,
- dex_cache,
- class_loader);
- // Copied methods may be held live by a class which was not an image class but have a
- // declaring class which is an image class. Set it to the resolution method to be safe and
- // prevent dangling pointers.
- if (method->IsCopied() || !KeepClass(referencing_class)) {
- mirror::DexCache::SetElementPtrSize(resolved_methods,
- i,
- resolution_method,
- target_ptr_size_);
- } else if (kIsDebugBuild) {
- // Check that the class is still in the classes table.
- ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
- CHECK(class_linker->ClassInClassTable(referencing_class)) << "Class "
- << Class::PrettyClass(referencing_class) << " not in class linker table";
+ const DexFile::MethodId& method_id = dex_file.GetMethodId(i);
+ if (method_id.class_idx_ != last_class_idx) {
+ last_class_idx = method_id.class_idx_;
+ last_class = class_linker->LookupResolvedType(
+ dex_file, last_class_idx, dex_cache, class_loader);
+ if (last_class != nullptr && !KeepClass(last_class)) {
+ last_class = nullptr;
+ }
+ }
+ if (method == nullptr || i < stored_index) {
+ if (last_class != nullptr) {
+ const char* name = dex_file.StringDataByIdx(method_id.name_idx_);
+ Signature signature = dex_file.GetMethodSignature(method_id);
+ if (last_class->IsInterface()) {
+ method = last_class->FindInterfaceMethod(name, signature, target_ptr_size_);
+ } else {
+ method = last_class->FindClassMethod(name, signature, target_ptr_size_);
+ }
+ if (method != nullptr) {
+ // If the referenced class is in the image, the defining class must also be there.
+ DCHECK(KeepClass(method->GetDeclaringClass()));
+ dex_cache->SetResolvedMethod(i, method, target_ptr_size_);
+ }
+ }
+ } else {
+ DCHECK_EQ(i, stored_index);
+ if (last_class == nullptr) {
+ dex_cache->ClearResolvedMethod(stored_index, target_ptr_size_);
+ }
}
}
// Prune fields and make the contents of the field array deterministic.
mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
- dex::TypeIndex last_class_idx; // Initialized to invalid index.
- ObjPtr<mirror::Class> last_class = nullptr;
+ last_class_idx = dex::TypeIndex(); // Initialized to invalid index.
+ last_class = nullptr;
for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
@@ -2401,17 +2418,19 @@
orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
fixup_visitor);
}
- ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* orig_methods = orig_dex_cache->GetResolvedMethods();
if (orig_methods != nullptr) {
copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
NativeLocationInImage(orig_methods),
PointerSize::k64);
- ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
+ mirror::MethodDexCacheType* copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
- ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
+ mirror::MethodDexCachePair orig_pair =
+ mirror::DexCache::GetNativePairPtrSize(orig_methods, i, target_ptr_size_);
// NativeLocationInImage also handles runtime methods since these have relocation info.
- ArtMethod* copy = NativeLocationInImage(orig);
- mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
+ mirror::MethodDexCachePair copy_pair(NativeLocationInImage(orig_pair.object),
+ orig_pair.index);
+ mirror::DexCache::SetNativePairPtrSize(copy_methods, i, copy_pair, target_ptr_size_);
}
}
mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
@@ -2552,7 +2571,8 @@
CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
- ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
+ mirror::MethodDexCacheType* orig_resolved_methods =
+ orig->GetDexCacheResolvedMethods(target_ptr_size_);
copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
// OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc
index f8bb417..4d258af 100644
--- a/compiler/oat_writer.cc
+++ b/compiler/oat_writer.cc
@@ -1116,6 +1116,7 @@
const std::vector<const DexFile*>* dex_files)
: OatDexMethodVisitor(writer, offset),
pointer_size_(GetInstructionSetPointerSize(writer_->compiler_driver_->GetInstructionSet())),
+ class_loader_(writer->HasImage() ? writer->image_writer_->GetClassLoader() : nullptr),
dex_files_(dex_files),
class_linker_(Runtime::Current()->GetClassLinker()) {}
@@ -1131,10 +1132,7 @@
if (!IsImageClass()) {
return true;
}
- ScopedObjectAccessUnchecked soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
- Handle<mirror::DexCache> dex_cache = hs.NewHandle(
- class_linker_->FindDexCache(Thread::Current(), *dex_file));
+ ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(Thread::Current(), *dex_file);
const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index);
mirror::Class* klass = dex_cache->GetResolvedType(class_def.class_idx_);
if (klass != nullptr) {
@@ -1182,36 +1180,36 @@
++method_offsets_index_;
}
- // Unchecked as we hold mutator_lock_ on entry.
- ScopedObjectAccessUnchecked soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
- Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker_->FindDexCache(
- Thread::Current(), *dex_file_)));
+ Thread* self = Thread::Current();
+ ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(self, *dex_file_);
ArtMethod* method;
if (writer_->HasBootImage()) {
const InvokeType invoke_type = it.GetMethodInvokeType(
dex_file_->GetClassDef(class_def_index_));
+ // Unchecked as we hold mutator_lock_ on entry.
+ ScopedObjectAccessUnchecked soa(self);
+ StackHandleScope<1> hs(self);
method = class_linker_->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
*dex_file_,
it.GetMemberIndex(),
- dex_cache,
+ hs.NewHandle(dex_cache),
ScopedNullHandle<mirror::ClassLoader>(),
nullptr,
invoke_type);
if (method == nullptr) {
LOG(FATAL_WITHOUT_ABORT) << "Unexpected failure to resolve a method: "
<< dex_file_->PrettyMethod(it.GetMemberIndex(), true);
- soa.Self()->AssertPendingException();
- mirror::Throwable* exc = soa.Self()->GetException();
+ self->AssertPendingException();
+ mirror::Throwable* exc = self->GetException();
std::string dump = exc->Dump();
LOG(FATAL) << dump;
UNREACHABLE();
}
} else {
- // Should already have been resolved by the compiler, just peek into the dex cache.
+ // Should already have been resolved by the compiler.
// It may not be resolved if the class failed to verify, in this case, don't set the
- // entrypoint. This is not fatal since the dex cache will contain a resolution method.
- method = dex_cache->GetResolvedMethod(it.GetMemberIndex(), pointer_size_);
+ // entrypoint. This is not fatal since we shall use a resolution method.
+ method = class_linker_->LookupResolvedMethod(it.GetMemberIndex(), dex_cache, class_loader_);
}
if (method != nullptr &&
compiled_method != nullptr &&
@@ -1252,6 +1250,7 @@
private:
const PointerSize pointer_size_;
+ ObjPtr<mirror::ClassLoader> class_loader_;
const std::vector<const DexFile*>* dex_files_;
ClassLinker* const class_linker_;
std::vector<std::pair<ArtMethod*, ArtMethod*>> methods_to_process_;
@@ -1471,7 +1470,8 @@
ObjPtr<mirror::DexCache> dex_cache =
(dex_file_ == ref.dex_file) ? dex_cache_ : class_linker_->FindDexCache(
Thread::Current(), *ref.dex_file);
- ArtMethod* method = dex_cache->GetResolvedMethod(ref.dex_method_index, pointer_size_);
+ ArtMethod* method =
+ class_linker_->LookupResolvedMethod(ref.dex_method_index, dex_cache, class_loader_);
CHECK(method != nullptr);
return method;
}
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index ecbf52b..f172e16 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -525,7 +525,7 @@
// Use a null loader. We should probably use the compiling method's class loader,
// but then we would need to pass it to RTPVisitor just for this debug check. Since
// the method is from the String class, the null loader is good enough.
- Handle<mirror::ClassLoader> loader;
+ Handle<mirror::ClassLoader> loader(hs.NewHandle<mirror::ClassLoader>(nullptr));
ArtMethod* method = cl->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
dex_file, invoke->GetDexMethodIndex(), dex_cache, loader, nullptr, kDirect);
DCHECK(method != nullptr);
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index ea74f29..dadea76 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -1522,15 +1522,13 @@
// Verification results are null since we don't know if we will need them yet as the compler
// filter may change.
- // This needs to be done before PrepareRuntimeOptions since the callbacks are passed to the
- // runtime.
callbacks_.reset(new QuickCompilerCallbacks(
IsBootImage() ?
CompilerCallbacks::CallbackMode::kCompileBootImage :
CompilerCallbacks::CallbackMode::kCompileApp));
RuntimeArgumentMap runtime_options;
- if (!PrepareRuntimeOptions(&runtime_options)) {
+ if (!PrepareRuntimeOptions(&runtime_options, callbacks_.get())) {
return dex2oat::ReturnCode::kOther;
}
@@ -2464,7 +2462,8 @@
}
}
- bool PrepareRuntimeOptions(RuntimeArgumentMap* runtime_options) {
+ bool PrepareRuntimeOptions(RuntimeArgumentMap* runtime_options,
+ QuickCompilerCallbacks* callbacks) {
RuntimeOptions raw_options;
if (boot_image_filename_.empty()) {
std::string boot_class_path = "-Xbootclasspath:";
@@ -2482,7 +2481,7 @@
raw_options.push_back(std::make_pair(runtime_args_[i], nullptr));
}
- raw_options.push_back(std::make_pair("compilercallbacks", callbacks_.get()));
+ raw_options.push_back(std::make_pair("compilercallbacks", callbacks));
raw_options.push_back(
std::make_pair("imageinstructionset", GetInstructionSetString(instruction_set_)));
@@ -2554,7 +2553,6 @@
runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
}
}
- runtime_->GetClassLinker()->FixupDexCaches(runtime_->GetResolutionMethod());
// Initialize maps for unstarted runtime. This needs to be here, as running clinits needs this
// set up.
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 0a95d49..99168c9 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -2233,16 +2233,15 @@
if (num_methods != 0u) {
os << "Methods (size=" << num_methods << "):\n";
ScopedIndentation indent2(&vios_);
- auto* resolved_methods = dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
for (size_t i = 0, length = dex_cache->NumResolvedMethods(); i < length; ++i) {
- auto* elem = mirror::DexCache::GetElementPtrSize(resolved_methods,
- i,
- image_pointer_size);
+ ArtMethod* elem = mirror::DexCache::GetNativePairPtrSize(
+ resolved_methods, i, image_pointer_size).object;
size_t run = 0;
for (size_t j = i + 1;
- j != length && elem == mirror::DexCache::GetElementPtrSize(resolved_methods,
- j,
- image_pointer_size);
+ j != length &&
+ elem == mirror::DexCache::GetNativePairPtrSize(
+ resolved_methods, j, image_pointer_size).object;
++j) {
++run;
}
@@ -2270,7 +2269,7 @@
ScopedIndentation indent2(&vios_);
auto* resolved_fields = dex_cache->GetResolvedFields();
for (size_t i = 0, length = dex_cache->NumResolvedFields(); i < length; ++i) {
- auto* elem = mirror::DexCache::GetNativePairPtrSize(
+ ArtField* elem = mirror::DexCache::GetNativePairPtrSize(
resolved_fields, i, image_pointer_size).object;
size_t run = 0;
for (size_t j = i + 1;
diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc
index a93969f..1ee2fbd 100644
--- a/patchoat/patchoat.cc
+++ b/patchoat/patchoat.cc
@@ -535,17 +535,18 @@
orig_dex_cache->FixupResolvedTypes(RelocatedCopyOf(orig_types),
RelocatedPointerVisitor(this));
}
- ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
- ArtMethod** relocated_methods = RelocatedAddressOfPointer(orig_methods);
+ mirror::MethodDexCacheType* orig_methods = orig_dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* relocated_methods = RelocatedAddressOfPointer(orig_methods);
copy_dex_cache->SetField64<false>(
mirror::DexCache::ResolvedMethodsOffset(),
static_cast<int64_t>(reinterpret_cast<uintptr_t>(relocated_methods)));
if (orig_methods != nullptr) {
- ArtMethod** copy_methods = RelocatedCopyOf(orig_methods);
+ mirror::MethodDexCacheType* copy_methods = RelocatedCopyOf(orig_methods);
for (size_t j = 0, num = orig_dex_cache->NumResolvedMethods(); j != num; ++j) {
- ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, j, pointer_size);
- ArtMethod* copy = RelocatedAddressOfPointer(orig);
- mirror::DexCache::SetElementPtrSize(copy_methods, j, copy, pointer_size);
+ mirror::MethodDexCachePair orig =
+ mirror::DexCache::GetNativePairPtrSize(orig_methods, j, pointer_size);
+ mirror::MethodDexCachePair copy(RelocatedAddressOfPointer(orig.object), orig.index);
+ mirror::DexCache::SetNativePairPtrSize(copy_methods, j, copy, pointer_size);
}
}
mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 0de5905..375768e 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -1585,31 +1585,98 @@
*
* Note that this stub writes to r0, r4, and r12.
*/
+ .extern artLookupResolvedMethod
ENTRY art_quick_imt_conflict_trampoline
- ldr r4, [sp, #0] // Load referrer
- ldr r4, [r4, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_32] // Load dex cache methods array
- ldr r12, [r4, r12, lsl #POINTER_SIZE_SHIFT] // Load interface method
- ldr r0, [r0, #ART_METHOD_JNI_OFFSET_32] // Load ImtConflictTable
- ldr r4, [r0] // Load first entry in ImtConflictTable.
+ push {r1-r2}
+ .cfi_adjust_cfa_offset (2 * 4)
+ .cfi_rel_offset r1, 0
+ .cfi_rel_offset r2, 4
+ ldr r4, [sp, #(2 * 4)] // Load referrer.
+ ubfx r1, r12, #0, #METHOD_DEX_CACHE_HASH_BITS // Calculate DexCache method slot index.
+ ldr r4, [r4, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_32] // Load dex cache methods array
+ add r4, r4, r1, lsl #(POINTER_SIZE_SHIFT + 1) // Load DexCache method slot address.
+ ldr r2, [r0, #ART_METHOD_JNI_OFFSET_32] // Load ImtConflictTable
+
+// FIXME: Configure the build to use the faster code when appropriate.
+// Currently we fall back to the slower version.
+#if HAS_ATOMIC_LDRD
+ ldrd r0, r1, [r4]
+#else
+ push {r3}
+ .cfi_adjust_cfa_offset 4
+ .cfi_rel_offset r3, 0
+.Limt_conflict_trampoline_retry_load:
+ ldrexd r0, r1, [r4]
+ strexd r3, r0, r1, [r4]
+ cmp r3, #0
+ bne .Limt_conflict_trampoline_retry_load
+ pop {r3}
+ .cfi_adjust_cfa_offset -4
+ .cfi_restore r3
+#endif
+
+ ldr r4, [r2] // Load first entry in ImtConflictTable.
+ cmp r1, r12 // Compare method index to see if we had a DexCache method hit.
+ bne .Limt_conflict_trampoline_dex_cache_miss
.Limt_table_iterate:
- cmp r4, r12
+ cmp r4, r0
// Branch if found. Benchmarks have shown doing a branch here is better.
- beq .Limt_table_found
+ beq .Limt_table_found
// If the entry is null, the interface method is not in the ImtConflictTable.
- cbz r4, .Lconflict_trampoline
+ cbz r4, .Lconflict_trampoline
// Iterate over the entries of the ImtConflictTable.
- ldr r4, [r0, #(2 * __SIZEOF_POINTER__)]!
+ ldr r4, [r2, #(2 * __SIZEOF_POINTER__)]!
b .Limt_table_iterate
.Limt_table_found:
// We successfully hit an entry in the table. Load the target method
// and jump to it.
- ldr r0, [r0, #__SIZEOF_POINTER__]
- ldr pc, [r0, #ART_METHOD_QUICK_CODE_OFFSET_32]
+ ldr r0, [r2, #__SIZEOF_POINTER__]
+ .cfi_remember_state
+ pop {r1-r2}
+ .cfi_adjust_cfa_offset -(2 * 4)
+ .cfi_restore r1
+ .cfi_restore r2
+ ldr pc, [r0, #ART_METHOD_QUICK_CODE_OFFSET_32]
+ .cfi_restore_state
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- mov r0, r12 // Load interface method
+ .cfi_remember_state
+ pop {r1-r2}
+ .cfi_adjust_cfa_offset -(2 * 4)
+ .cfi_restore r1
+ .cfi_restore r2
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+ .cfi_restore_state
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save ImtConflictTable (r2), remaining arg (r3), first entry (r4), return address (lr).
+ push {r2-r4, lr}
+ .cfi_adjust_cfa_offset (4 * 4)
+ .cfi_rel_offset r3, 4
+ .cfi_rel_offset lr, 12
+ // Save FPR args.
+ vpush {d0-d7}
+ .cfi_adjust_cfa_offset (8 * 8)
+
+ mov r0, ip // Pass method index.
+ ldr r1, [sp, #(8 * 8 + 6 * 4)] // Pass referrer.
+ bl artLookupResolvedMethod // (uint32_t method_index, ArtMethod* referrer)
+
+ // Restore FPR args.
+ vpop {d0-d7}
+ .cfi_adjust_cfa_offset -(8 * 8)
+ // Restore ImtConflictTable (r2), remaining arg (r3), first entry (r4), return address (lr).
+ pop {r2-r4, lr}
+ .cfi_adjust_cfa_offset -(4 * 4)
+ .cfi_restore r3
+ .cfi_restore lr
+
+ cmp r0, #0 // If the method wasn't resolved,
+ beq .Lconflict_trampoline // skip the lookup and go to artInvokeInterfaceTrampoline().
+ b .Limt_table_iterate
END art_quick_imt_conflict_trampoline
.extern artQuickResolutionTrampoline
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index e097a33..d15f5b8 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -2052,17 +2052,28 @@
* x0 is the conflict ArtMethod.
* xIP1 is a hidden argument that holds the target interface method's dex method index.
*
- * Note that this stub writes to xIP0, xIP1, and x0.
+ * Note that this stub writes to xIP0, xIP1, x13-x15, and x0.
*/
- .extern artInvokeInterfaceTrampoline
+ .extern artLookupResolvedMethod
ENTRY art_quick_imt_conflict_trampoline
ldr xIP0, [sp, #0] // Load referrer
+ ubfx x15, xIP1, #0, #METHOD_DEX_CACHE_HASH_BITS // Calculate DexCache method slot index.
ldr xIP0, [xIP0, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_64] // Load dex cache methods array
- ldr xIP0, [xIP0, xIP1, lsl #POINTER_SIZE_SHIFT] // Load interface method
+ add xIP0, xIP0, x15, lsl #(POINTER_SIZE_SHIFT + 1) // Load DexCache method slot address.
+
+ // Relaxed atomic load x14:x15 from the dex cache slot.
+.Limt_conflict_trampoline_retry_load:
+ ldxp x14, x15, [xIP0]
+ stxp w13, x14, x15, [xIP0]
+ cbnz w13, .Limt_conflict_trampoline_retry_load
+
+ cmp x15, xIP1 // Compare method index to see if we had a DexCache method hit.
+ bne .Limt_conflict_trampoline_dex_cache_miss
+.Limt_conflict_trampoline_have_interface_method:
ldr xIP1, [x0, #ART_METHOD_JNI_OFFSET_64] // Load ImtConflictTable
ldr x0, [xIP1] // Load first entry in ImtConflictTable.
.Limt_table_iterate:
- cmp x0, xIP0
+ cmp x0, x14
// Branch if found. Benchmarks have shown doing a branch here is better.
beq .Limt_table_found
// If the entry is null, the interface method is not in the ImtConflictTable.
@@ -2079,8 +2090,46 @@
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- mov x0, xIP0 // Load interface method
+ mov x0, x14 // Load interface method
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save GPR args and return address, allocate space for FPR args, align stack.
+ SAVE_TWO_REGS_INCREASE_FRAME x0, x1, (8 * 8 + 8 * 8 + 8 + 8)
+ SAVE_TWO_REGS x2, x3, 16
+ SAVE_TWO_REGS x4, x5, 32
+ SAVE_TWO_REGS x6, x7, 48
+ SAVE_REG xLR, (8 * 8 + 8 * 8 + 8)
+
+ // Save FPR args.
+ stp d0, d1, [sp, #64]
+ stp d2, d3, [sp, #80]
+ stp d4, d5, [sp, #96]
+ stp d6, d7, [sp, #112]
+
+ mov x0, xIP1 // Pass method index.
+ ldr x1, [sp, #(8 * 8 + 8 * 8 + 8 + 8)] // Pass referrer.
+ bl artLookupResolvedMethod // (uint32_t method_index, ArtMethod* referrer)
+ mov x14, x0 // Move the interface method to x14 where the loop above expects it.
+
+ // Restore FPR args.
+ ldp d0, d1, [sp, #64]
+ ldp d2, d3, [sp, #80]
+ ldp d4, d5, [sp, #96]
+ ldp d6, d7, [sp, #112]
+
+ // Restore GPR args and return address.
+ RESTORE_REG xLR, (8 * 8 + 8 * 8 + 8)
+ RESTORE_TWO_REGS x2, x3, 16
+ RESTORE_TWO_REGS x4, x5, 32
+ RESTORE_TWO_REGS x6, x7, 48
+ RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, (8 * 8 + 8 * 8 + 8 + 8)
+
+ // If the method wasn't resolved, skip the lookup and go to artInvokeInterfaceTrampoline().
+ cbz x14, .Lconflict_trampoline
+ b .Limt_conflict_trampoline_have_interface_method
END art_quick_imt_conflict_trampoline
ENTRY art_quick_resolution_trampoline
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index d9abaa0..974e876 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -2066,6 +2066,10 @@
* Note that this stub writes to a0, t7 and t8.
*/
ENTRY art_quick_imt_conflict_trampoline
+// FIXME: The DexCache method array has been changed to hash-based cache with eviction.
+// We need a relaxed atomic load of a 64-bit location to try and load the method
+// and call artQuickResolutionTrampoline() if the index does not match.
+#if 0
lw $t8, 0($sp) # Load referrer.
lw $t8, ART_METHOD_DEX_CACHE_METHODS_OFFSET_32($t8) # Load dex cache methods array.
sll $t7, $t7, POINTER_SIZE_SHIFT # Calculate offset.
@@ -2095,6 +2099,9 @@
.Lconflict_trampoline:
# Call the runtime stub to populate the ImtConflictTable and jump to the resolved method.
move $a0, $t7 # Load interface method.
+#else
+ move $a0, $zero
+#endif
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
END art_quick_imt_conflict_trampoline
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index fcbed0e..bcb315f 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1989,6 +1989,10 @@
* Mote that this stub writes to a0, t0 and t1.
*/
ENTRY art_quick_imt_conflict_trampoline
+// FIXME: The DexCache method array has been changed to hash-based cache with eviction.
+// We need a relaxed atomic load of a 128-bit location to try and load the method
+// and call artQuickResolutionTrampoline() if the index does not match.
+#if 0
ld $t1, 0($sp) # Load referrer.
ld $t1, ART_METHOD_DEX_CACHE_METHODS_OFFSET_64($t1) # Load dex cache methods array.
dsll $t0, $t0, POINTER_SIZE_SHIFT # Calculate offset.
@@ -2017,6 +2021,9 @@
.Lconflict_trampoline:
# Call the runtime stub to populate the ImtConflictTable and jump to the resolved method.
move $a0, $t0 # Load interface method.
+#else
+ move $a0, $zero
+#endif
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
END art_quick_imt_conflict_trampoline
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 031b36b..48d2de9 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1780,35 +1780,90 @@
*/
DEFINE_FUNCTION art_quick_imt_conflict_trampoline
PUSH EDI
- movl 8(%esp), %edi // Load referrer
- movl ART_METHOD_DEX_CACHE_METHODS_OFFSET_32(%edi), %edi // Load dex cache methods array
+ PUSH ESI
+ PUSH EDX
+ movl 16(%esp), %edi // Load referrer.
+ movl ART_METHOD_DEX_CACHE_METHODS_OFFSET_32(%edi), %edi // Load dex cache methods array.
pushl ART_METHOD_JNI_OFFSET_32(%eax) // Push ImtConflictTable.
CFI_ADJUST_CFA_OFFSET(4)
- movd %xmm7, %eax // get target method index stored in xmm7
- movl 0(%edi, %eax, __SIZEOF_POINTER__), %edi // Load interface method
- popl %eax // Pop ImtConflictTable.
+ movd %xmm7, %eax // Get target method index stored in xmm7.
+ movl %eax, %esi // Remember method index in ESI.
+ andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
+ leal 0(%edi, %eax, 2 * __SIZEOF_POINTER__), %edi // Load DexCache method slot address.
+ mov %ecx, %edx // Make EDX:EAX == ECX:EBX so that LOCK CMPXCHG8B makes no changes.
+ mov %ebx, %eax // (The actual value does not matter.)
+ lock cmpxchg8b (%edi) // Relaxed atomic load EDX:EAX from the dex cache slot.
+ popl %edi // Pop ImtConflictTable.
CFI_ADJUST_CFA_OFFSET(-4)
+ cmp %edx, %esi // Compare method index to see if we had a DexCache method hit.
+ jne .Limt_conflict_trampoline_dex_cache_miss
.Limt_table_iterate:
- cmpl %edi, 0(%eax)
+ cmpl %eax, 0(%edi)
jne .Limt_table_next_entry
// We successfully hit an entry in the table. Load the target method
// and jump to it.
+ movl __SIZEOF_POINTER__(%edi), %eax
+ CFI_REMEMBER_STATE
+ POP EDX
+ POP ESI
POP EDI
- movl __SIZEOF_POINTER__(%eax), %eax
jmp *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
+ CFI_RESTORE_STATE
.Limt_table_next_entry:
// If the entry is null, the interface method is not in the ImtConflictTable.
- cmpl LITERAL(0), 0(%eax)
+ cmpl LITERAL(0), 0(%edi)
jz .Lconflict_trampoline
// Iterate over the entries of the ImtConflictTable.
- addl LITERAL(2 * __SIZEOF_POINTER__), %eax
+ addl LITERAL(2 * __SIZEOF_POINTER__), %edi
jmp .Limt_table_iterate
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- movl %edi, %eax // Load interface method
+ CFI_REMEMBER_STATE
+ POP EDX
+ POP ESI
POP EDI
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+ CFI_RESTORE_STATE
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save core register args; EDX is already saved.
+ PUSH ebx
+ PUSH ecx
+
+ // Save FPR args.
+ subl MACRO_LITERAL(32), %esp
+ CFI_ADJUST_CFA_OFFSET(32)
+ movsd %xmm0, 0(%esp)
+ movsd %xmm1, 8(%esp)
+ movsd %xmm2, 16(%esp)
+ movsd %xmm3, 24(%esp)
+
+ pushl 32+8+16(%esp) // Pass referrer.
+ CFI_ADJUST_CFA_OFFSET(4)
+ pushl %esi // Pass method index.
+ CFI_ADJUST_CFA_OFFSET(4)
+ call SYMBOL(artLookupResolvedMethod) // (uint32_t method_index, ArtMethod* referrer)
+ addl LITERAL(8), %esp // Pop arguments.
+ CFI_ADJUST_CFA_OFFSET(-8)
+
+ // Restore FPR args.
+ movsd 0(%esp), %xmm0
+ movsd 8(%esp), %xmm1
+ movsd 16(%esp), %xmm2
+ movsd 24(%esp), %xmm3
+ addl MACRO_LITERAL(32), %esp
+ CFI_ADJUST_CFA_OFFSET(-32)
+
+ // Restore core register args.
+ POP ecx
+ POP ebx
+
+ cmp LITERAL(0), %eax // If the method wasn't resolved,
+ je .Lconflict_trampoline // skip the lookup and go to artInvokeInterfaceTrampoline().
+ jmp .Limt_table_iterate
END_FUNCTION art_quick_imt_conflict_trampoline
DEFINE_FUNCTION art_quick_resolution_trampoline
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index ad06873..0a9199e 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1641,17 +1641,29 @@
int3
int3
#else
- movq __SIZEOF_POINTER__(%rsp), %r10 // Load referrer
- movq ART_METHOD_DEX_CACHE_METHODS_OFFSET_64(%r10), %r10 // Load dex cache methods array
- movq 0(%r10, %rax, __SIZEOF_POINTER__), %r10 // Load interface method
+ movq __SIZEOF_POINTER__(%rsp), %r10 // Load referrer.
+ movq ART_METHOD_DEX_CACHE_METHODS_OFFSET_64(%r10), %r10 // Load dex cache methods array.
+ mov %eax, %r11d // Remember method index in R11.
+ andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
+ shll LITERAL(1), %eax // Multiply by 2 as entries have size 2 * __SIZEOF_POINTER__.
+ leaq 0(%r10, %rax, __SIZEOF_POINTER__), %r10 // Load DexCache method slot address.
+ PUSH rdx // Preserve RDX as we need to clobber it by LOCK CMPXCHG16B.
+ mov %rcx, %rdx // Make RDX:RAX == RCX:RBX so that LOCK CMPXCHG16B makes no changes.
+ mov %rbx, %rax // (The actual value does not matter.)
+ lock cmpxchg16b (%r10) // Relaxed atomic load RDX:RAX from the dex cache slot.
movq ART_METHOD_JNI_OFFSET_64(%rdi), %rdi // Load ImtConflictTable
+ cmp %rdx, %r11 // Compare method index to see if we had a DexCache method hit.
+ jne .Limt_conflict_trampoline_dex_cache_miss
.Limt_table_iterate:
- cmpq %r10, 0(%rdi)
+ cmpq %rax, 0(%rdi)
jne .Limt_table_next_entry
// We successfully hit an entry in the table. Load the target method
// and jump to it.
movq __SIZEOF_POINTER__(%rdi), %rdi
+ CFI_REMEMBER_STATE
+ POP rdx
jmp *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi)
+ CFI_RESTORE_STATE
.Limt_table_next_entry:
// If the entry is null, the interface method is not in the ImtConflictTable.
cmpq LITERAL(0), 0(%rdi)
@@ -1662,8 +1674,66 @@
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- movq %r10, %rdi // Load interface method
+ CFI_REMEMBER_STATE
+ POP rdx
+ movq %rax, %rdi // Load interface method
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+ CFI_RESTORE_STATE
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save GPR args and ImtConflictTable; RDX is already saved.
+ PUSH r9 // Quick arg 5.
+ PUSH r8 // Quick arg 4.
+ PUSH rsi // Quick arg 1.
+ PUSH rcx // Quick arg 3.
+ PUSH rdi // ImtConflictTable
+ // Save FPR args and callee-saves, align stack to 16B.
+ subq MACRO_LITERAL(12 * 8 + 8), %rsp
+ CFI_ADJUST_CFA_OFFSET(12 * 8 + 8)
+ movq %xmm0, 0(%rsp)
+ movq %xmm1, 8(%rsp)
+ movq %xmm2, 16(%rsp)
+ movq %xmm3, 24(%rsp)
+ movq %xmm4, 32(%rsp)
+ movq %xmm5, 40(%rsp)
+ movq %xmm6, 48(%rsp)
+ movq %xmm7, 56(%rsp)
+ movq %xmm12, 64(%rsp) // XMM12-15 are callee-save in ART compiled code ABI
+ movq %xmm13, 72(%rsp) // but caller-save in native ABI.
+ movq %xmm14, 80(%rsp)
+ movq %xmm15, 88(%rsp)
+
+ movq %r11, %rdi // Pass method index.
+ movq 12 * 8 + 8 + 6 * 8 + 8(%rsp), %rsi // Pass referrer.
+ call SYMBOL(artLookupResolvedMethod) // (uint32_t method_index, ArtMethod* referrer)
+
+ // Restore FPRs.
+ movq 0(%rsp), %xmm0
+ movq 8(%rsp), %xmm1
+ movq 16(%rsp), %xmm2
+ movq 24(%rsp), %xmm3
+ movq 32(%rsp), %xmm4
+ movq 40(%rsp), %xmm5
+ movq 48(%rsp), %xmm6
+ movq 56(%rsp), %xmm7
+ movq 64(%rsp), %xmm12
+ movq 72(%rsp), %xmm13
+ movq 80(%rsp), %xmm14
+ movq 88(%rsp), %xmm15
+ addq MACRO_LITERAL(12 * 8 + 8), %rsp
+ CFI_ADJUST_CFA_OFFSET(-(12 * 8 + 8))
+ // Restore ImtConflictTable and GPR args.
+ POP rdi
+ POP rcx
+ POP rsi
+ POP r8
+ POP r9
+
+ cmp LITERAL(0), %rax // If the method wasn't resolved,
+ je .Lconflict_trampoline // skip the lookup and go to artInvokeInterfaceTrampoline().
+ jmp .Limt_table_iterate
#endif // __APPLE__
END_FUNCTION art_quick_imt_conflict_trampoline
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 40d7e5c..4300544 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -102,20 +102,21 @@
return GetDexMethodIndexUnchecked();
}
-inline ArtMethod** ArtMethod::GetDexCacheResolvedMethods(PointerSize pointer_size) {
- return GetNativePointer<ArtMethod**>(DexCacheResolvedMethodsOffset(pointer_size),
- pointer_size);
+inline mirror::MethodDexCacheType* ArtMethod::GetDexCacheResolvedMethods(PointerSize pointer_size) {
+ return GetNativePointer<mirror::MethodDexCacheType*>(DexCacheResolvedMethodsOffset(pointer_size),
+ pointer_size);
}
inline ArtMethod* ArtMethod::GetDexCacheResolvedMethod(uint16_t method_index,
PointerSize pointer_size) {
// NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here
// without accessing the DexCache and we don't want to do that in release build.
- DCHECK_LT(method_index,
- GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
- ArtMethod* method = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
- method_index,
- pointer_size);
+ DCHECK_LT(method_index, GetInterfaceMethodIfProxy(pointer_size)->GetDexFile()->NumMethodIds());
+ uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize;
+ DCHECK_LT(slot_idx, GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
+ mirror::MethodDexCachePair pair = mirror::DexCache::GetNativePairPtrSize(
+ GetDexCacheResolvedMethods(pointer_size), slot_idx, pointer_size);
+ ArtMethod* method = pair.GetObjectForIndex(method_index);
if (LIKELY(method != nullptr)) {
auto* declaring_class = method->GetDeclaringClass();
if (LIKELY(declaring_class == nullptr || !declaring_class->IsErroneous())) {
@@ -130,29 +131,29 @@
PointerSize pointer_size) {
// NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here
// without accessing the DexCache and we don't want to do that in release build.
- DCHECK_LT(method_index,
- GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
+ DCHECK_LT(method_index, GetInterfaceMethodIfProxy(pointer_size)->GetDexFile()->NumMethodIds());
DCHECK(new_method == nullptr || new_method->GetDeclaringClass() != nullptr);
- mirror::DexCache::SetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
- method_index,
- new_method,
- pointer_size);
+ uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize;
+ DCHECK_LT(slot_idx, GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
+ mirror::MethodDexCachePair pair(new_method, method_index);
+ mirror::DexCache::SetNativePairPtrSize(
+ GetDexCacheResolvedMethods(pointer_size), slot_idx, pair, pointer_size);
}
inline bool ArtMethod::HasDexCacheResolvedMethods(PointerSize pointer_size) {
return GetDexCacheResolvedMethods(pointer_size) != nullptr;
}
-inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod** other_cache,
- PointerSize pointer_size) {
- return GetDexCacheResolvedMethods(pointer_size) == other_cache;
-}
-
inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size) {
return GetDexCacheResolvedMethods(pointer_size) ==
other->GetDexCacheResolvedMethods(pointer_size);
}
+inline bool ArtMethod::HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache,
+ PointerSize pointer_size) {
+ return GetDexCacheResolvedMethods(pointer_size) == other_cache;
+}
+
inline mirror::Class* ArtMethod::GetClassFromTypeIndex(dex::TypeIndex type_idx, bool resolve) {
// TODO: Refactor this function into two functions, Resolve...() and Lookup...()
// so that we can properly annotate it with no-suspension possible / suspension possible.
@@ -381,17 +382,21 @@
if (LIKELY(!IsProxyMethod())) {
return this;
}
- ArtMethod* interface_method = mirror::DexCache::GetElementPtrSize(
- GetDexCacheResolvedMethods(pointer_size),
- GetDexMethodIndex(),
- pointer_size);
- DCHECK(interface_method != nullptr);
- DCHECK_EQ(interface_method,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
+ uint32_t method_index = GetDexMethodIndex();
+ uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize;
+ mirror::MethodDexCachePair pair = mirror::DexCache::GetNativePairPtrSize(
+ GetDexCacheResolvedMethods(pointer_size), slot_idx, pointer_size);
+ ArtMethod* interface_method = pair.GetObjectForIndex(method_index);
+ if (LIKELY(interface_method != nullptr)) {
+ DCHECK_EQ(interface_method, Runtime::Current()->GetClassLinker()->FindMethodForProxy(this));
+ } else {
+ interface_method = Runtime::Current()->GetClassLinker()->FindMethodForProxy(this);
+ DCHECK(interface_method != nullptr);
+ }
return interface_method;
}
-inline void ArtMethod::SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods,
+inline void ArtMethod::SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods,
PointerSize pointer_size) {
SetNativePointer(DexCacheResolvedMethodsOffset(pointer_size),
new_dex_cache_methods,
@@ -462,14 +467,8 @@
if (UNLIKELY(klass->IsProxyClass())) {
// For normal methods, dex cache shortcuts will be visited through the declaring class.
// However, for proxies we need to keep the interface method alive, so we visit its roots.
- ArtMethod* interface_method = mirror::DexCache::GetElementPtrSize(
- GetDexCacheResolvedMethods(pointer_size),
- GetDexMethodIndex(),
- pointer_size);
+ ArtMethod* interface_method = GetInterfaceMethodIfProxy(pointer_size);
DCHECK(interface_method != nullptr);
- DCHECK_EQ(interface_method,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy<kReadBarrierOption>(
- klass, this));
interface_method->VisitRoots(visitor, pointer_size);
}
}
@@ -483,8 +482,8 @@
if (old_class != new_class) {
SetDeclaringClass(new_class);
}
- ArtMethod** old_methods = GetDexCacheResolvedMethods(pointer_size);
- ArtMethod** new_methods = visitor(old_methods);
+ mirror::MethodDexCacheType* old_methods = GetDexCacheResolvedMethods(pointer_size);
+ mirror::MethodDexCacheType* new_methods = visitor(old_methods);
if (old_methods != new_methods) {
SetDexCacheResolvedMethods(new_methods, pointer_size);
}
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index ef9c457..d8984e8 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -216,11 +216,8 @@
} else {
// Method didn't override superclass method so search interfaces
if (IsProxyMethod()) {
- result = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
- GetDexMethodIndex(),
- pointer_size);
- CHECK_EQ(result,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
+ result = GetInterfaceMethodIfProxy(pointer_size);
+ DCHECK(result != nullptr);
} else {
mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable();
for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 4b3e8ef..511ac83 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -53,6 +53,10 @@
template <typename MirrorType> class ObjectArray;
class PointerArray;
class String;
+
+template <typename T> struct NativeDexCachePair;
+using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+using MethodDexCacheType = std::atomic<MethodDexCachePair>;
} // namespace mirror
class ArtMethod FINAL {
@@ -352,7 +356,7 @@
dex_method_index_ = new_idx;
}
- ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(PointerSize pointer_size)
+ ALWAYS_INLINE mirror::MethodDexCacheType* GetDexCacheResolvedMethods(PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index,
PointerSize pointer_size)
@@ -362,13 +366,14 @@
ArtMethod* new_method,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods,
+ ALWAYS_INLINE void SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
bool HasDexCacheResolvedMethods(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
bool HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, PointerSize pointer_size)
+ bool HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache,
+ PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
// Get the Class* from the type index into this method's dex cache.
@@ -714,7 +719,7 @@
// Must be the last fields in the method.
struct PtrSizedFields {
// Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
- ArtMethod** dex_cache_resolved_methods_;
+ mirror::MethodDexCacheType* dex_cache_resolved_methods_;
// Pointer to JNI function registered to this method, or a function to resolve the JNI function,
// or the profiling data for non-native methods, or an ImtConflictTable, or the
diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h
index d29db15..9a73697 100644
--- a/runtime/class_linker-inl.h
+++ b/runtime/class_linker-inl.h
@@ -156,6 +156,29 @@
});
}
+inline ArtMethod* ClassLinker::LookupResolvedMethod(uint32_t method_idx,
+ ObjPtr<mirror::DexCache> dex_cache,
+ ObjPtr<mirror::ClassLoader> class_loader) {
+ PointerSize pointer_size = image_pointer_size_;
+ ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, pointer_size);
+ if (resolved == nullptr) {
+ const DexFile& dex_file = *dex_cache->GetDexFile();
+ const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
+ ObjPtr<mirror::Class> klass = LookupResolvedType(method_id.class_idx_, dex_cache, class_loader);
+ if (klass != nullptr) {
+ if (klass->IsInterface()) {
+ resolved = klass->FindInterfaceMethod(dex_cache, method_idx, pointer_size);
+ } else {
+ resolved = klass->FindClassMethod(dex_cache, method_idx, pointer_size);
+ }
+ if (resolved != nullptr) {
+ dex_cache->SetResolvedMethod(method_idx, resolved, pointer_size);
+ }
+ }
+ }
+ return resolved;
+}
+
template <InvokeType type, ClassLinker::ResolveMode kResolveMode>
inline ArtMethod* ClassLinker::GetResolvedMethod(uint32_t method_idx, ArtMethod* referrer) {
DCHECK(referrer != nullptr);
@@ -164,9 +187,10 @@
// However, we delay the GetInterfaceMethodIfProxy() until needed.
DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor());
ArtMethod* resolved_method = referrer->GetDexCacheResolvedMethod(method_idx, image_pointer_size_);
- if (resolved_method == nullptr || resolved_method->IsRuntimeMethod()) {
+ if (resolved_method == nullptr) {
return nullptr;
}
+ DCHECK(!resolved_method->IsRuntimeMethod());
if (kResolveMode == ResolveMode::kCheckICCEAndIAE) {
referrer = referrer->GetInterfaceMethodIfProxy(image_pointer_size_);
// Check if the invoke type matches the class type.
@@ -203,7 +227,8 @@
DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor());
Thread::PoisonObjectPointersIfDebug();
ArtMethod* resolved_method = referrer->GetDexCacheResolvedMethod(method_idx, image_pointer_size_);
- if (UNLIKELY(resolved_method == nullptr || resolved_method->IsRuntimeMethod())) {
+ DCHECK(resolved_method == nullptr || !resolved_method->IsRuntimeMethod());
+ if (UNLIKELY(resolved_method == nullptr)) {
referrer = referrer->GetInterfaceMethodIfProxy(image_pointer_size_);
ObjPtr<mirror::Class> declaring_class = referrer->GetDeclaringClass();
StackHandleScope<2> hs(self);
@@ -287,35 +312,6 @@
return klass.Ptr();
}
-template<ReadBarrierOption kReadBarrierOption>
-ArtMethod* ClassLinker::FindMethodForProxy(ObjPtr<mirror::Class> proxy_class,
- ArtMethod* proxy_method) {
- DCHECK(proxy_class->IsProxyClass());
- DCHECK(proxy_method->IsProxyMethod());
- {
- Thread* const self = Thread::Current();
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- // Locate the dex cache of the original interface/Object
- for (const DexCacheData& data : dex_caches_) {
- if (!self->IsJWeakCleared(data.weak_root) &&
- proxy_method->HasSameDexCacheResolvedMethods(data.resolved_methods,
- image_pointer_size_)) {
- ObjPtr<mirror::DexCache> dex_cache =
- ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
- if (dex_cache != nullptr) {
- ArtMethod* resolved_method = dex_cache->GetResolvedMethod(
- proxy_method->GetDexMethodIndex(), image_pointer_size_);
- CHECK(resolved_method != nullptr);
- return resolved_method;
- }
- }
- }
- }
- LOG(FATAL) << "Didn't find dex cache for " << proxy_class->PrettyClass() << " "
- << proxy_method->PrettyMethod();
- UNREACHABLE();
-}
-
} // namespace art
#endif // ART_RUNTIME_CLASS_LINKER_INL_H_
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index a227d18..6133dd7 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1114,7 +1114,8 @@
virtual void Visit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
const bool is_copied = method->IsCopied();
- ArtMethod** resolved_methods = method->GetDexCacheResolvedMethods(kRuntimePointerSize);
+ mirror::MethodDexCacheType* resolved_methods =
+ method->GetDexCacheResolvedMethods(kRuntimePointerSize);
if (resolved_methods != nullptr) {
bool in_image_space = false;
if (kIsDebugBuild || is_copied) {
@@ -1284,6 +1285,25 @@
}
}
+template <typename T>
+static void CopyNativeDexCachePairs(std::atomic<mirror::NativeDexCachePair<T>>* src,
+ size_t count,
+ std::atomic<mirror::NativeDexCachePair<T>>* dst,
+ PointerSize pointer_size) {
+ DCHECK_NE(count, 0u);
+ DCHECK(mirror::DexCache::GetNativePairPtrSize(src, 0, pointer_size).object != nullptr ||
+ mirror::DexCache::GetNativePairPtrSize(src, 0, pointer_size).index != 0u);
+ for (size_t i = 0; i < count; ++i) {
+ DCHECK_EQ(mirror::DexCache::GetNativePairPtrSize(dst, i, pointer_size).index, 0u);
+ DCHECK(mirror::DexCache::GetNativePairPtrSize(dst, i, pointer_size).object == nullptr);
+ mirror::NativeDexCachePair<T> source =
+ mirror::DexCache::GetNativePairPtrSize(src, i, pointer_size);
+ if (source.index != 0u || source.object != nullptr) {
+ mirror::DexCache::SetNativePairPtrSize(dst, i, source, pointer_size);
+ }
+ }
+}
+
// new_class_set is the set of classes that were read from the class table section in the image.
// If there was no class table section, it is null.
// Note: using a class here to avoid having to make ClassLinker internals public.
@@ -1363,7 +1383,10 @@
if (dex_file->NumTypeIds() < num_types) {
num_types = dex_file->NumTypeIds();
}
- const size_t num_methods = dex_file->NumMethodIds();
+ size_t num_methods = mirror::DexCache::kDexCacheMethodCacheSize;
+ if (dex_file->NumMethodIds() < num_methods) {
+ num_methods = dex_file->NumMethodIds();
+ }
size_t num_fields = mirror::DexCache::kDexCacheFieldCacheSize;
if (dex_file->NumFieldIds() < num_fields) {
num_fields = dex_file->NumFieldIds();
@@ -1396,37 +1419,18 @@
dex_cache->SetResolvedTypes(types);
}
if (num_methods != 0u) {
- ArtMethod** const methods = reinterpret_cast<ArtMethod**>(
- raw_arrays + layout.MethodsOffset());
- ArtMethod** const image_resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t j = 0; kIsDebugBuild && j < num_methods; ++j) {
- DCHECK(methods[j] == nullptr);
- }
- CopyNonNull(image_resolved_methods,
- num_methods,
- methods,
- [] (const ArtMethod* method) {
- return method == nullptr;
- });
+ mirror::MethodDexCacheType* const image_resolved_methods =
+ dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* const methods =
+ reinterpret_cast<mirror::MethodDexCacheType*>(raw_arrays + layout.MethodsOffset());
+ CopyNativeDexCachePairs(image_resolved_methods, num_methods, methods, image_pointer_size);
dex_cache->SetResolvedMethods(methods);
}
if (num_fields != 0u) {
mirror::FieldDexCacheType* const image_resolved_fields = dex_cache->GetResolvedFields();
mirror::FieldDexCacheType* const fields =
reinterpret_cast<mirror::FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
- for (size_t j = 0; j < num_fields; ++j) {
- DCHECK_EQ(mirror::DexCache::GetNativePairPtrSize(fields, j, image_pointer_size).index,
- 0u);
- DCHECK(mirror::DexCache::GetNativePairPtrSize(fields, j, image_pointer_size).object ==
- nullptr);
- mirror::DexCache::SetNativePairPtrSize(
- fields,
- j,
- mirror::DexCache::GetNativePairPtrSize(image_resolved_fields,
- j,
- image_pointer_size),
- image_pointer_size);
- }
+ CopyNativeDexCachePairs(image_resolved_fields, num_fields, fields, image_pointer_size);
dex_cache->SetResolvedFields(fields);
}
if (num_method_types != 0u) {
@@ -1663,13 +1667,13 @@
heap->VisitObjects(visitor);
}
- static void CheckPointerArray(gc::Heap* heap,
- ClassLinker* class_linker,
- ArtMethod** arr,
- size_t size)
+ static void CheckArtMethodDexCacheArray(gc::Heap* heap,
+ ClassLinker* class_linker,
+ mirror::MethodDexCacheType* arr,
+ size_t size)
REQUIRES_SHARED(Locks::mutator_lock_) {
ImageSanityChecks isc(heap, class_linker);
- isc.SanityCheckArtMethodPointerArray(arr, size);
+ isc.SanityCheckArtMethodDexCacheArray(arr, size);
}
private:
@@ -1724,7 +1728,7 @@
}
}
- void SanityCheckArtMethodPointerArray(ArtMethod** arr, size_t size)
+ void SanityCheckArtMethodDexCacheArray(mirror::MethodDexCacheType* arr, size_t size)
REQUIRES_SHARED(Locks::mutator_lock_) {
CHECK_EQ(arr != nullptr, size != 0u);
if (arr != nullptr) {
@@ -1740,7 +1744,8 @@
CHECK(contains);
}
for (size_t j = 0; j < size; ++j) {
- ArtMethod* method = mirror::DexCache::GetElementPtrSize(arr, j, pointer_size_);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(arr, j, pointer_size_);
+ ArtMethod* method = pair.object;
// expected_class == null means we are a dex cache.
if (method != nullptr) {
SanityCheckArtMethod(method, nullptr);
@@ -1851,10 +1856,10 @@
}
} else {
if (kSanityCheckObjects) {
- ImageSanityChecks::CheckPointerArray(heap,
- this,
- dex_cache->GetResolvedMethods(),
- dex_cache->NumResolvedMethods());
+ ImageSanityChecks::CheckArtMethodDexCacheArray(heap,
+ this,
+ dex_cache->GetResolvedMethods(),
+ dex_cache->NumResolvedMethods());
}
// Register dex files, keep track of existing ones that are conflicts.
AppendToBootClassPath(*dex_file.get(), dex_cache);
@@ -3722,20 +3727,6 @@
return DexCacheData();
}
-void ClassLinker::FixupDexCaches(ArtMethod* resolution_method) {
- Thread* const self = Thread::Current();
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- for (const DexCacheData& data : dex_caches_) {
- if (!self->IsJWeakCleared(data.weak_root)) {
- ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
- self->DecodeJObject(data.weak_root));
- if (dex_cache != nullptr) {
- dex_cache->Fixup(resolution_method, image_pointer_size_);
- }
- }
- }
-}
-
mirror::Class* ClassLinker::CreatePrimitiveClass(Thread* self, Primitive::Type type) {
ObjPtr<mirror::Class> klass =
AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
@@ -6886,7 +6877,8 @@
// Check that there are no stale methods are in the dex cache array.
auto* resolved_methods = klass_->GetDexCache()->GetResolvedMethods();
for (size_t i = 0, count = klass_->GetDexCache()->NumResolvedMethods(); i < count; ++i) {
- auto* m = mirror::DexCache::GetElementPtrSize(resolved_methods, i, pointer_size);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_methods, i, pointer_size);
+ ArtMethod* m = pair.object;
CHECK(move_table_.find(m) == move_table_.end() ||
// The original versions of copied methods will still be present so allow those too.
// Note that if the first check passes this might fail to GetDeclaringClass().
@@ -7949,7 +7941,8 @@
PointerSize pointer_size = image_pointer_size_;
ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, pointer_size);
Thread::PoisonObjectPointersIfDebug();
- bool valid_dex_cache_method = resolved != nullptr && !resolved->IsRuntimeMethod();
+ DCHECK(resolved == nullptr || !resolved->IsRuntimeMethod());
+ bool valid_dex_cache_method = resolved != nullptr;
if (kResolveMode == ResolveMode::kNoChecks && valid_dex_cache_method) {
// We have a valid method from the DexCache and no checks to perform.
DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
@@ -8045,7 +8038,8 @@
Handle<mirror::ClassLoader> class_loader) {
ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, image_pointer_size_);
Thread::PoisonObjectPointersIfDebug();
- if (resolved != nullptr && !resolved->IsRuntimeMethod()) {
+ if (resolved != nullptr) {
+ DCHECK(!resolved->IsRuntimeMethod());
DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
return resolved;
}
@@ -9065,6 +9059,53 @@
ifcount * mirror::IfTable::kMax));
}
+ArtMethod* ClassLinker::FindMethodForProxy(ArtMethod* proxy_method) {
+ DCHECK(proxy_method->IsProxyMethod());
+ {
+ uint32_t method_index = proxy_method->GetDexMethodIndex();
+ PointerSize pointer_size = image_pointer_size_;
+ Thread* const self = Thread::Current();
+ ReaderMutexLock mu(self, *Locks::dex_lock_);
+ // Locate the dex cache of the original interface/Object
+ for (const DexCacheData& data : dex_caches_) {
+ if (!self->IsJWeakCleared(data.weak_root) &&
+ proxy_method->HasSameDexCacheResolvedMethods(data.resolved_methods, pointer_size)) {
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
+ if (dex_cache != nullptr) {
+ // Lookup up the method. Instead of going through LookupResolvedMethod()
+ // and thus LookupResolvedType(), use the ClassTable from the DexCacheData.
+ ArtMethod* resolved_method = dex_cache->GetResolvedMethod(method_index, pointer_size);
+ if (resolved_method == nullptr) {
+ const DexFile::MethodId& method_id = data.dex_file->GetMethodId(method_index);
+ ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(method_id.class_idx_);
+ if (klass == nullptr) {
+ const char* descriptor = data.dex_file->StringByTypeIdx(method_id.class_idx_);
+ klass = data.class_table->Lookup(descriptor, ComputeModifiedUtf8Hash(descriptor));
+ DCHECK(klass != nullptr);
+ dex_cache->SetResolvedType(method_id.class_idx_, klass);
+ }
+ if (klass->IsInterface()) {
+ resolved_method = klass->FindInterfaceMethod(dex_cache, method_index, pointer_size);
+ } else {
+ DCHECK(
+ klass == WellKnownClasses::ToClass(WellKnownClasses::java_lang_reflect_Proxy) ||
+ klass == WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object));
+ resolved_method = klass->FindClassMethod(dex_cache, method_index, pointer_size);
+ }
+ CHECK(resolved_method != nullptr);
+ dex_cache->SetResolvedMethod(method_index, resolved_method, pointer_size);
+ }
+ return resolved_method;
+ }
+ }
+ }
+ }
+ // Note: Do not use proxy_method->PrettyMethod() as it can call back here.
+ LOG(FATAL) << "Didn't find dex cache for " << proxy_method->GetDeclaringClass()->PrettyClass();
+ UNREACHABLE();
+}
+
// Instantiate ResolveMethod.
template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
const DexFile& dex_file,
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 3cf59f0..4a99c66 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -55,6 +55,9 @@
class MethodType;
template<class T> class ObjectArray;
class StackTraceElement;
+ template <typename T> struct NativeDexCachePair;
+ using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+ using MethodDexCacheType = std::atomic<MethodDexCachePair>;
} // namespace mirror
class ClassTable;
@@ -287,6 +290,12 @@
kCheckICCEAndIAE
};
+ // Look up a previously resolved method with the given index.
+ ArtMethod* LookupResolvedMethod(uint32_t method_idx,
+ ObjPtr<mirror::DexCache> dex_cache,
+ ObjPtr<mirror::ClassLoader> class_loader)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
// Resolve a method with a given ID from the DexFile, storing the
// result in DexCache. The ClassLinker and ClassLoader are used as
// in ResolveType. What is unique is the method type argument which
@@ -423,9 +432,6 @@
ClassTable* FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- void FixupDexCaches(ArtMethod* resolution_method)
- REQUIRES(!Locks::dex_lock_)
- REQUIRES_SHARED(Locks::mutator_lock_);
LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self,
LinearAlloc* allocator,
@@ -475,8 +481,7 @@
REQUIRES_SHARED(Locks::mutator_lock_);
std::string GetDescriptorForProxy(ObjPtr<mirror::Class> proxy_class)
REQUIRES_SHARED(Locks::mutator_lock_);
- template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- ArtMethod* FindMethodForProxy(ObjPtr<mirror::Class> proxy_class, ArtMethod* proxy_method)
+ ArtMethod* FindMethodForProxy(ArtMethod* proxy_method)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -692,7 +697,7 @@
// jweak decode that triggers read barriers (and mark them alive unnecessarily and mess with
// class unloading.)
const DexFile* dex_file;
- ArtMethod** resolved_methods;
+ mirror::MethodDexCacheType* resolved_methods;
// Identify the associated class loader's class table. This is used to make sure that
// the Java call to native DexCache.setResolvedType() inserts the resolved type in that
// class table. It is also used to make sure we don't register the same dex cache with
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 98d7c7c..39d77f0 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -440,14 +440,6 @@
}
TestRootVisitor visitor;
class_linker_->VisitRoots(&visitor, kVisitRootFlagAllRoots);
- // Verify the dex cache has resolution methods in all resolved method slots
- ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(Thread::Current(), dex);
- auto* resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t i = 0, num_methods = dex_cache->NumResolvedMethods(); i != num_methods; ++i) {
- EXPECT_TRUE(
- mirror::DexCache::GetElementPtrSize(resolved_methods, i, kRuntimePointerSize) != nullptr)
- << dex.GetLocation() << " i=" << i;
- }
}
class TestRootVisitor : public SingleRootVisitor {
diff --git a/runtime/common_runtime_test.cc b/runtime/common_runtime_test.cc
index a425224..8dda04e 100644
--- a/runtime/common_runtime_test.cc
+++ b/runtime/common_runtime_test.cc
@@ -425,7 +425,6 @@
PostRuntimeCreate();
runtime_.reset(Runtime::Current());
class_linker_ = runtime_->GetClassLinker();
- class_linker_->FixupDexCaches(runtime_->GetResolutionMethod());
// Runtime::Create acquired the mutator_lock_ that is normally given away when we
// Runtime::Start, give it away now and then switch to a more managable ScopedObjectAccess.
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index 828148a..a6c5d6c 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -84,7 +84,8 @@
const DexFile* dex_file = dex_cache->GetDexFile();
const DexFile::MethodId& method_id = dex_file->GetMethodId(method_index);
ArtMethod* inlined_method = caller->GetDexCacheResolvedMethod(method_index, kRuntimePointerSize);
- if (inlined_method != nullptr && !inlined_method->IsRuntimeMethod()) {
+ if (inlined_method != nullptr) {
+ DCHECK(!inlined_method->IsRuntimeMethod());
return inlined_method;
}
const char* descriptor = dex_file->StringByTypeIdx(method_id.class_idx_);
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 6abf7c5..3061365 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -2461,6 +2461,21 @@
return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
}
+// Helper function for art_quick_imt_conflict_trampoline to look up the interface method.
+extern "C" ArtMethod* artLookupResolvedMethod(uint32_t method_index, ArtMethod* referrer)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ScopedAssertNoThreadSuspension ants(__FUNCTION__);
+ DCHECK(!referrer->IsProxyMethod());
+ ArtMethod* result = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
+ method_index, referrer->GetDexCache(), referrer->GetClassLoader());
+ DCHECK(result == nullptr ||
+ result->GetDeclaringClass()->IsInterface() ||
+ result->GetDeclaringClass() ==
+ WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object))
+ << result->PrettyMethod();
+ return result;
+}
+
// Determine target of interface dispatch. The interface method and this object are known non-null.
// The interface method is the method returned by the dex cache in the conflict trampoline.
extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method,
@@ -2468,7 +2483,6 @@
Thread* self,
ArtMethod** sp)
REQUIRES_SHARED(Locks::mutator_lock_) {
- CHECK(interface_method != nullptr);
ObjPtr<mirror::Object> this_object(raw_this_object);
ScopedQuickEntrypointChecks sqec(self);
StackHandleScope<1> hs(self);
@@ -2478,7 +2492,8 @@
ArtMethod* method = nullptr;
ImTable* imt = cls->GetImt(kRuntimePointerSize);
- if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
+ if (LIKELY(interface_method != nullptr)) {
+ DCHECK_NE(interface_method->GetDexMethodIndex(), DexFile::kDexNoIndex);
// If the interface method is already resolved, look whether we have a match in the
// ImtConflictTable.
ArtMethod* conflict_method = imt->Get(ImTable::GetImtIndex(interface_method),
@@ -2505,9 +2520,7 @@
return GetTwoWordFailureValue(); // Failure.
}
} else {
- // The interface method is unresolved, so look it up in the dex file of the caller.
- DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod());
-
+ // The interface method is unresolved, so resolve it in the dex file of the caller.
// Fetch the dex_method_idx of the target interface method from the caller.
uint32_t dex_method_idx;
uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 3ae382e..fe0d35f 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -1268,17 +1268,19 @@
}
dex_cache->FixupResolvedTypes<kWithoutReadBarrier>(new_types, fixup_adapter);
}
- ArtMethod** methods = dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* methods = dex_cache->GetResolvedMethods();
if (methods != nullptr) {
- ArtMethod** new_methods = fixup_adapter.ForwardObject(methods);
+ mirror::MethodDexCacheType* new_methods = fixup_adapter.ForwardObject(methods);
if (methods != new_methods) {
dex_cache->SetResolvedMethods(new_methods);
}
for (size_t j = 0, num = dex_cache->NumResolvedMethods(); j != num; ++j) {
- ArtMethod* orig = mirror::DexCache::GetElementPtrSize(new_methods, j, pointer_size);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(new_methods, j, pointer_size);
+ ArtMethod* orig = pair.object;
ArtMethod* copy = fixup_adapter.ForwardObject(orig);
if (orig != copy) {
- mirror::DexCache::SetElementPtrSize(new_methods, j, copy, pointer_size);
+ pair.object = copy;
+ mirror::DexCache::SetNativePairPtrSize(new_methods, j, pair, pointer_size);
}
}
}
diff --git a/runtime/generated/asm_support_gen.h b/runtime/generated/asm_support_gen.h
index 06e4704..acfd889 100644
--- a/runtime/generated/asm_support_gen.h
+++ b/runtime/generated/asm_support_gen.h
@@ -78,6 +78,10 @@
DEFINE_CHECK_EQ(static_cast<int32_t>(STRING_DEX_CACHE_HASH_BITS), (static_cast<int32_t>(art::LeastSignificantBit(art::mirror::DexCache::kDexCacheStringCacheSize))))
#define STRING_DEX_CACHE_ELEMENT_SIZE 8
DEFINE_CHECK_EQ(static_cast<int32_t>(STRING_DEX_CACHE_ELEMENT_SIZE), (static_cast<int32_t>(sizeof(art::mirror::StringDexCachePair))))
+#define METHOD_DEX_CACHE_SIZE_MINUS_ONE 1023
+DEFINE_CHECK_EQ(static_cast<int32_t>(METHOD_DEX_CACHE_SIZE_MINUS_ONE), (static_cast<int32_t>(art::mirror::DexCache::kDexCacheMethodCacheSize - 1)))
+#define METHOD_DEX_CACHE_HASH_BITS 10
+DEFINE_CHECK_EQ(static_cast<int32_t>(METHOD_DEX_CACHE_HASH_BITS), (static_cast<int32_t>(art::LeastSignificantBit(art::mirror::DexCache::kDexCacheMethodCacheSize))))
#define CARD_TABLE_CARD_SHIFT 0xa
DEFINE_CHECK_EQ(static_cast<size_t>(CARD_TABLE_CARD_SHIFT), (static_cast<size_t>(art::gc::accounting::CardTable::kCardShift)))
#define MIN_LARGE_OBJECT_THRESHOLD 0x3000
diff --git a/runtime/image.cc b/runtime/image.cc
index ac36d7c..7d0a709 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -26,7 +26,7 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '5', '\0' }; // Fix DexCache fields.
+const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '6', '\0' }; // Hash-based methods array.
ImageHeader::ImageHeader(uint32_t image_begin,
uint32_t image_size,
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 18e22ef..fdb14f1 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -208,24 +208,38 @@
}
}
+inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
+ DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
+ const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
+ DCHECK_LT(slot_idx, NumResolvedMethods());
+ return slot_idx;
+}
+
inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
- DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB.
- ArtMethod* method = GetElementPtrSize<ArtMethod*>(GetResolvedMethods(), method_idx, ptr_size);
- // Hide resolution trampoline methods from the caller
- if (method != nullptr && method->IsRuntimeMethod()) {
- DCHECK_EQ(method, Runtime::Current()->GetResolutionMethod());
- return nullptr;
- }
- return method;
+ auto pair = GetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), ptr_size);
+ return pair.GetObjectForIndex(method_idx);
}
inline void DexCache::SetResolvedMethod(uint32_t method_idx,
ArtMethod* method,
PointerSize ptr_size) {
DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
- DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB.
- SetElementPtrSize(GetResolvedMethods(), method_idx, method, ptr_size);
+ DCHECK(method != nullptr);
+ MethodDexCachePair pair(method, method_idx);
+ SetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), pair, ptr_size);
+}
+
+inline void DexCache::ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
+ DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
+ uint32_t slot_idx = MethodSlotIndex(method_idx);
+ auto* resolved_methods = GetResolvedMethods();
+ // This is racy but should only be called from the single-threaded ImageWriter.
+ DCHECK(Runtime::Current()->IsAotCompiler());
+ if (GetNativePairPtrSize(resolved_methods, slot_idx, ptr_size).index == method_idx) {
+ MethodDexCachePair cleared(nullptr, MethodDexCachePair::InvalidIndexForSlot(slot_idx));
+ SetNativePairPtrSize(resolved_methods, slot_idx, cleared, ptr_size);
+ }
}
template <typename PtrType>
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index 96e3475..7b18a4c 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -61,14 +61,14 @@
: reinterpret_cast<uint8_t*>(linear_alloc->Alloc(self, layout.Size()));
}
- mirror::StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
- reinterpret_cast<mirror::StringDexCacheType*>(raw_arrays + layout.StringsOffset());
- mirror::TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
- reinterpret_cast<mirror::TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
- ArtMethod** methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
- reinterpret_cast<ArtMethod**>(raw_arrays + layout.MethodsOffset());
- mirror::FieldDexCacheType* fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
- reinterpret_cast<mirror::FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
+ StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
+ reinterpret_cast<StringDexCacheType*>(raw_arrays + layout.StringsOffset());
+ TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
+ reinterpret_cast<TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
+ MethodDexCacheType* methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
+ reinterpret_cast<MethodDexCacheType*>(raw_arrays + layout.MethodsOffset());
+ FieldDexCacheType* fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
+ reinterpret_cast<FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
size_t num_strings = kDexCacheStringCacheSize;
if (dex_file->NumStringIds() < num_strings) {
@@ -82,6 +82,10 @@
if (dex_file->NumFieldIds() < num_fields) {
num_fields = dex_file->NumFieldIds();
}
+ size_t num_methods = kDexCacheMethodCacheSize;
+ if (dex_file->NumMethodIds() < num_methods) {
+ num_methods = dex_file->NumMethodIds();
+ }
// Note that we allocate the method type dex caches regardless of this flag,
// and we make sure here that they're not used by the runtime. This is in the
@@ -105,7 +109,7 @@
GcRoot<mirror::CallSite>* call_sites = (dex_file->NumCallSiteIds() == 0)
? nullptr
- : reinterpret_cast<GcRoot<mirror::CallSite>*>(raw_arrays + layout.CallSitesOffset());
+ : reinterpret_cast<GcRoot<CallSite>*>(raw_arrays + layout.CallSitesOffset());
DCHECK_ALIGNED(raw_arrays, alignof(StringDexCacheType)) <<
"Expected raw_arrays to align to StringDexCacheType.";
@@ -125,8 +129,9 @@
CHECK_EQ(types[i].load(std::memory_order_relaxed).index, 0u);
CHECK(types[i].load(std::memory_order_relaxed).object.IsNull());
}
- for (size_t i = 0; i < dex_file->NumMethodIds(); ++i) {
- CHECK(GetElementPtrSize(methods, i, image_pointer_size) == nullptr);
+ for (size_t i = 0; i < num_methods; ++i) {
+ CHECK_EQ(GetNativePairPtrSize(methods, i, image_pointer_size).index, 0u);
+ CHECK(GetNativePairPtrSize(methods, i, image_pointer_size).object == nullptr);
}
for (size_t i = 0; i < num_fields; ++i) {
CHECK_EQ(GetNativePairPtrSize(fields, i, image_pointer_size).index, 0u);
@@ -149,6 +154,9 @@
if (fields != nullptr) {
mirror::FieldDexCachePair::Initialize(fields, image_pointer_size);
}
+ if (methods != nullptr) {
+ mirror::MethodDexCachePair::Initialize(methods, image_pointer_size);
+ }
if (method_types != nullptr) {
mirror::MethodTypeDexCachePair::Initialize(method_types);
}
@@ -159,14 +167,13 @@
types,
num_types,
methods,
- dex_file->NumMethodIds(),
+ num_methods,
fields,
num_fields,
method_types,
num_method_types,
call_sites,
- dex_file->NumCallSiteIds(),
- image_pointer_size);
+ dex_file->NumCallSiteIds());
}
void DexCache::Init(const DexFile* dex_file,
@@ -175,15 +182,14 @@
uint32_t num_strings,
TypeDexCacheType* resolved_types,
uint32_t num_resolved_types,
- ArtMethod** resolved_methods,
+ MethodDexCacheType* resolved_methods,
uint32_t num_resolved_methods,
FieldDexCacheType* resolved_fields,
uint32_t num_resolved_fields,
MethodTypeDexCacheType* resolved_method_types,
uint32_t num_resolved_method_types,
GcRoot<CallSite>* resolved_call_sites,
- uint32_t num_resolved_call_sites,
- PointerSize pointer_size) {
+ uint32_t num_resolved_call_sites) {
CHECK(dex_file != nullptr);
CHECK(location != nullptr);
CHECK_EQ(num_strings != 0u, strings != nullptr);
@@ -207,24 +213,6 @@
SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields);
SetField32<false>(NumResolvedMethodTypesOffset(), num_resolved_method_types);
SetField32<false>(NumResolvedCallSitesOffset(), num_resolved_call_sites);
-
- Runtime* const runtime = Runtime::Current();
- if (runtime->HasResolutionMethod()) {
- // Initialize the resolve methods array to contain trampolines for resolution.
- Fixup(runtime->GetResolutionMethod(), pointer_size);
- }
-}
-
-void DexCache::Fixup(ArtMethod* trampoline, PointerSize pointer_size) {
- // Fixup the resolve methods array to contain trampoline for resolution.
- CHECK(trampoline != nullptr);
- CHECK(trampoline->IsRuntimeMethod());
- auto* resolved_methods = GetResolvedMethods();
- for (size_t i = 0, length = NumResolvedMethods(); i < length; i++) {
- if (GetElementPtrSize<ArtMethod*>(resolved_methods, i, pointer_size) == nullptr) {
- SetElementPtrSize(resolved_methods, i, trampoline, pointer_size);
- }
- }
}
void DexCache::SetLocation(ObjPtr<mirror::String> location) {
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index cf570b8..7fd5dd1 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -129,6 +129,9 @@
using FieldDexCachePair = NativeDexCachePair<ArtField>;
using FieldDexCacheType = std::atomic<FieldDexCachePair>;
+using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+using MethodDexCacheType = std::atomic<MethodDexCachePair>;
+
using MethodTypeDexCachePair = DexCachePair<MethodType>;
using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
@@ -153,6 +156,11 @@
static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize),
"Field dex cache size is not a power of 2.");
+ // Size of method dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
+ static constexpr size_t kDexCacheMethodCacheSize = 1024;
+ static_assert(IsPowerOfTwo(kDexCacheMethodCacheSize),
+ "Method dex cache size is not a power of 2.");
+
// Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions
// to hold.
static constexpr size_t kDexCacheMethodTypeCacheSize = 1024;
@@ -171,6 +179,10 @@
return kDexCacheFieldCacheSize;
}
+ static constexpr size_t StaticMethodSize() {
+ return kDexCacheMethodCacheSize;
+ }
+
static constexpr size_t StaticMethodTypeSize() {
return kDexCacheMethodTypeCacheSize;
}
@@ -189,9 +201,6 @@
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::dex_lock_);
- void Fixup(ArtMethod* trampoline, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -284,6 +293,8 @@
ArtMethod* resolved,
PointerSize ptr_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
+ REQUIRES_SHARED(Locks::mutator_lock_);
// Pointer sized variant, used for patching.
ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size)
@@ -328,11 +339,11 @@
SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
}
- ArtMethod** GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
- return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset());
+ MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset());
}
- void SetResolvedMethods(ArtMethod** resolved_methods)
+ void SetResolvedMethods(MethodDexCacheType* resolved_methods)
ALWAYS_INLINE
REQUIRES_SHARED(Locks::mutator_lock_) {
SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
@@ -429,6 +440,7 @@
uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+ uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
private:
@@ -438,15 +450,14 @@
uint32_t num_strings,
TypeDexCacheType* resolved_types,
uint32_t num_resolved_types,
- ArtMethod** resolved_methods,
+ MethodDexCacheType* resolved_methods,
uint32_t num_resolved_methods,
FieldDexCacheType* resolved_fields,
uint32_t num_resolved_fields,
MethodTypeDexCacheType* resolved_method_types,
uint32_t num_resolved_method_types,
GcRoot<CallSite>* resolved_call_sites,
- uint32_t num_resolved_call_sites,
- PointerSize pointer_size)
+ uint32_t num_resolved_call_sites)
REQUIRES_SHARED(Locks::mutator_lock_);
// std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
@@ -471,7 +482,7 @@
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
// Due to lack of 16-byte atomics support, we use hand-crafted routines.
-#if defined(__aarch64__)
+#if defined(__aarch64__)
// 16-byte atomics are supported on aarch64.
ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
std::atomic<ConversionPair64>* target) {
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index 194d9bc..d2b9240 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -54,7 +54,8 @@
|| java_lang_dex_file_->NumStringIds() == dex_cache->NumStrings());
EXPECT_TRUE(dex_cache->StaticTypeSize() == dex_cache->NumResolvedTypes()
|| java_lang_dex_file_->NumTypeIds() == dex_cache->NumResolvedTypes());
- EXPECT_EQ(java_lang_dex_file_->NumMethodIds(), dex_cache->NumResolvedMethods());
+ EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
+ || java_lang_dex_file_->NumMethodIds() == dex_cache->NumResolvedMethods());
EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields()
|| java_lang_dex_file_->NumFieldIds() == dex_cache->NumResolvedFields());
EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes()
diff --git a/runtime/native/dalvik_system_VMRuntime.cc b/runtime/native/dalvik_system_VMRuntime.cc
index e6e55a2..020612b 100644
--- a/runtime/native/dalvik_system_VMRuntime.cc
+++ b/runtime/native/dalvik_system_VMRuntime.cc
@@ -298,15 +298,16 @@
// Based on ClassLinker::ResolveString.
static void PreloadDexCachesResolveString(
- Handle<mirror::DexCache> dex_cache, dex::StringIndex string_idx, StringTable& strings)
+ ObjPtr<mirror::DexCache> dex_cache, dex::StringIndex string_idx, StringTable& strings)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ObjPtr<mirror::String> string = dex_cache->GetResolvedString(string_idx);
- if (string != nullptr) {
- return;
+ uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
+ auto pair = dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
+ if (!pair.object.IsNull()) {
+ return; // The entry already contains some String.
}
const DexFile* dex_file = dex_cache->GetDexFile();
const char* utf8 = dex_file->StringDataByIdx(string_idx);
- string = strings[utf8];
+ ObjPtr<mirror::String> string = strings[utf8];
if (string == nullptr) {
return;
}
@@ -319,18 +320,17 @@
ObjPtr<mirror::DexCache> dex_cache,
dex::TypeIndex type_idx)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(type_idx);
- if (klass != nullptr) {
- return;
+ uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
+ auto pair = dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
+ if (!pair.object.IsNull()) {
+ return; // The entry already contains some Class.
}
const DexFile* dex_file = dex_cache->GetDexFile();
const char* class_name = dex_file->StringByTypeIdx(type_idx);
ClassLinker* linker = Runtime::Current()->GetClassLinker();
- if (class_name[1] == '\0') {
- klass = linker->FindPrimitiveClass(class_name[0]);
- } else {
- klass = linker->LookupClass(self, class_name, nullptr);
- }
+ ObjPtr<mirror::Class> klass = (class_name[1] == '\0')
+ ? linker->FindPrimitiveClass(class_name[0])
+ : linker->LookupClass(self, class_name, nullptr);
if (klass == nullptr) {
return;
}
@@ -345,26 +345,27 @@
}
// Based on ClassLinker::ResolveField.
-static void PreloadDexCachesResolveField(Handle<mirror::DexCache> dex_cache, uint32_t field_idx,
+static void PreloadDexCachesResolveField(ObjPtr<mirror::DexCache> dex_cache,
+ uint32_t field_idx,
bool is_static)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtField* field = dex_cache->GetResolvedField(field_idx, kRuntimePointerSize);
- if (field != nullptr) {
- return;
+ uint32_t slot_idx = dex_cache->FieldSlotIndex(field_idx);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(dex_cache->GetResolvedFields(),
+ slot_idx,
+ kRuntimePointerSize);
+ if (pair.object != nullptr) {
+ return; // The entry already contains some ArtField.
}
const DexFile* dex_file = dex_cache->GetDexFile();
const DexFile::FieldId& field_id = dex_file->GetFieldId(field_idx);
- Thread* const self = Thread::Current();
- StackHandleScope<1> hs(self);
- Handle<mirror::Class> klass(hs.NewHandle(dex_cache->GetResolvedType(field_id.class_idx_)));
+ ObjPtr<mirror::Class> klass =
+ ClassLinker::LookupResolvedType(field_id.class_idx_, dex_cache, nullptr);
if (klass == nullptr) {
return;
}
- if (is_static) {
- field = mirror::Class::FindStaticField(self, klass.Get(), dex_cache.Get(), field_idx);
- } else {
- field = klass->FindInstanceField(dex_cache.Get(), field_idx);
- }
+ ArtField* field = is_static
+ ? mirror::Class::FindStaticField(Thread::Current(), klass, dex_cache, field_idx)
+ : klass->FindInstanceField(dex_cache, field_idx);
if (field == nullptr) {
return;
}
@@ -372,24 +373,25 @@
}
// Based on ClassLinker::ResolveMethod.
-static void PreloadDexCachesResolveMethod(Handle<mirror::DexCache> dex_cache, uint32_t method_idx)
+static void PreloadDexCachesResolveMethod(ObjPtr<mirror::DexCache> dex_cache, uint32_t method_idx)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ArtMethod* method = dex_cache->GetResolvedMethod(method_idx, kRuntimePointerSize);
- if (method != nullptr) {
- return;
+ uint32_t slot_idx = dex_cache->MethodSlotIndex(method_idx);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(dex_cache->GetResolvedMethods(),
+ slot_idx,
+ kRuntimePointerSize);
+ if (pair.object != nullptr) {
+ return; // The entry already contains some ArtMethod.
}
const DexFile* dex_file = dex_cache->GetDexFile();
const DexFile::MethodId& method_id = dex_file->GetMethodId(method_idx);
ObjPtr<mirror::Class> klass =
- ClassLinker::LookupResolvedType(method_id.class_idx_, dex_cache.Get(), nullptr);
+ ClassLinker::LookupResolvedType(method_id.class_idx_, dex_cache, nullptr);
if (klass == nullptr) {
return;
}
- if (klass->IsInterface()) {
- method = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, kRuntimePointerSize);
- } else {
- method = klass->FindClassMethod(dex_cache.Get(), method_idx, kRuntimePointerSize);
- }
+ ArtMethod* method = klass->IsInterface()
+ ? klass->FindInterfaceMethod(dex_cache, method_idx, kRuntimePointerSize)
+ : klass->FindClassMethod(dex_cache, method_idx, kRuntimePointerSize);
if (method == nullptr) {
return;
}
@@ -451,27 +453,31 @@
}
ObjPtr<mirror::DexCache> const dex_cache = class_linker->FindDexCache(self, *dex_file);
DCHECK(dex_cache != nullptr); // Boot class path dex caches are never unloaded.
- for (size_t j = 0; j < dex_cache->NumStrings(); j++) {
- ObjPtr<mirror::String> string = dex_cache->GetResolvedString(dex::StringIndex(j));
- if (string != nullptr) {
+ for (size_t j = 0, num_strings = dex_cache->NumStrings(); j < num_strings; ++j) {
+ auto pair = dex_cache->GetStrings()[j].load(std::memory_order_relaxed);
+ if (!pair.object.IsNull()) {
filled->num_strings++;
}
}
- for (size_t j = 0; j < dex_cache->NumResolvedTypes(); j++) {
- ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(dex::TypeIndex(j));
- if (klass != nullptr) {
+ for (size_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; ++j) {
+ auto pair = dex_cache->GetResolvedTypes()[j].load(std::memory_order_relaxed);
+ if (!pair.object.IsNull()) {
filled->num_types++;
}
}
- for (size_t j = 0; j < dex_cache->NumResolvedFields(); j++) {
- ArtField* field = dex_cache->GetResolvedField(j, class_linker->GetImagePointerSize());
- if (field != nullptr) {
+ for (size_t j = 0, num_fields = dex_cache->NumResolvedFields(); j < num_fields; ++j) {
+ auto pair = mirror::DexCache::GetNativePairPtrSize(dex_cache->GetResolvedFields(),
+ j,
+ kRuntimePointerSize);
+ if (pair.object != nullptr) {
filled->num_fields++;
}
}
- for (size_t j = 0; j < dex_cache->NumResolvedMethods(); j++) {
- ArtMethod* method = dex_cache->GetResolvedMethod(j, kRuntimePointerSize);
- if (method != nullptr) {
+ for (size_t j = 0, num_methods = dex_cache->NumResolvedMethods(); j < num_methods; ++j) {
+ auto pair = mirror::DexCache::GetNativePairPtrSize(dex_cache->GetResolvedMethods(),
+ j,
+ kRuntimePointerSize);
+ if (pair.object != nullptr) {
filled->num_methods++;
}
}
@@ -511,8 +517,7 @@
for (size_t i = 0; i < boot_class_path.size(); i++) {
const DexFile* dex_file = boot_class_path[i];
CHECK(dex_file != nullptr);
- StackHandleScope<1> hs(soa.Self());
- Handle<mirror::DexCache> dex_cache(hs.NewHandle(linker->RegisterDexFile(*dex_file, nullptr)));
+ ObjPtr<mirror::DexCache> dex_cache = linker->RegisterDexFile(*dex_file, nullptr);
CHECK(dex_cache != nullptr); // Boot class path dex caches are never unloaded.
if (kPreloadDexCachesStrings) {
for (size_t j = 0; j < dex_cache->NumStrings(); j++) {
@@ -522,7 +527,7 @@
if (kPreloadDexCachesTypes) {
for (size_t j = 0; j < dex_cache->NumResolvedTypes(); j++) {
- PreloadDexCachesResolveType(soa.Self(), dex_cache.Get(), dex::TypeIndex(j));
+ PreloadDexCachesResolveType(soa.Self(), dex_cache, dex::TypeIndex(j));
}
}
diff --git a/runtime/openjdkjvmti/ti_thread.cc b/runtime/openjdkjvmti/ti_thread.cc
index f16b419..9acea2a 100644
--- a/runtime/openjdkjvmti/ti_thread.cc
+++ b/runtime/openjdkjvmti/ti_thread.cc
@@ -701,7 +701,7 @@
jvmtiError ThreadUtil::SuspendOther(art::Thread* self,
jthread target_jthread,
- art::Thread* target) {
+ const art::Thread* target) {
// Loop since we need to bail out and try again if we would end up getting suspended while holding
// the user_code_suspension_lock_ due to a SuspendReason::kForUserCode. In this situation we
// release the lock, wait to get resumed and try again.
@@ -729,12 +729,12 @@
if (state == art::ThreadState::kTerminated || state == art::ThreadState::kStarting) {
return ERR(THREAD_NOT_ALIVE);
}
- target = art::Runtime::Current()->GetThreadList()->SuspendThreadByPeer(
+ art::Thread* ret_target = art::Runtime::Current()->GetThreadList()->SuspendThreadByPeer(
target_jthread,
/* request_suspension */ true,
art::SuspendReason::kForUserCode,
&timeout);
- if (target == nullptr && !timeout) {
+ if (ret_target == nullptr && !timeout) {
// TODO It would be good to get more information about why exactly the thread failed to
// suspend.
return ERR(INTERNAL);
diff --git a/runtime/openjdkjvmti/ti_thread.h b/runtime/openjdkjvmti/ti_thread.h
index d07dc06..0f7e837 100644
--- a/runtime/openjdkjvmti/ti_thread.h
+++ b/runtime/openjdkjvmti/ti_thread.h
@@ -98,7 +98,9 @@
// cause the thread to wake up if the thread is suspended for the debugger or gc or something.
static jvmtiError SuspendSelf(art::Thread* self)
REQUIRES(!art::Locks::mutator_lock_, !art::Locks::user_code_suspension_lock_);
- static jvmtiError SuspendOther(art::Thread* self, jthread target_jthread, art::Thread* target)
+ static jvmtiError SuspendOther(art::Thread* self,
+ jthread target_jthread,
+ const art::Thread* target)
REQUIRES(!art::Locks::mutator_lock_, !art::Locks::user_code_suspension_lock_);
static art::ArtField* context_class_loader_;
diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h
index 72f63c6..9d4e9fb 100644
--- a/runtime/utils/dex_cache_arrays_layout-inl.h
+++ b/runtime/utils/dex_cache_arrays_layout-inl.h
@@ -64,7 +64,7 @@
"Expecting alignof(StringDexCacheType) == 8");
static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
"Expecting alignof(MethodTypeDexCacheType) == 8");
- // This is the same as alignof(FieldDexCacheType) for the given pointer size.
+ // This is the same as alignof({Field,Method}DexCacheType) for the given pointer size.
return 2u * static_cast<size_t>(pointer_size);
}
@@ -84,7 +84,7 @@
if (num_elements < cache_size) {
cache_size = num_elements;
}
- return ArraySize(PointerSize::k64, cache_size);
+ return PairArraySize(GcRootAsPointerSize<mirror::Class>(), cache_size);
}
inline size_t DexCacheArraysLayout::TypesAlignment() const {
@@ -96,11 +96,15 @@
}
inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const {
- return ArraySize(pointer_size_, num_elements);
+ size_t cache_size = mirror::DexCache::kDexCacheMethodCacheSize;
+ if (num_elements < cache_size) {
+ cache_size = num_elements;
+ }
+ return PairArraySize(pointer_size_, cache_size);
}
inline size_t DexCacheArraysLayout::MethodsAlignment() const {
- return static_cast<size_t>(pointer_size_);
+ return 2u * static_cast<size_t>(pointer_size_);
}
inline size_t DexCacheArraysLayout::StringOffset(uint32_t string_idx) const {
@@ -113,7 +117,7 @@
if (num_elements < cache_size) {
cache_size = num_elements;
}
- return ArraySize(PointerSize::k64, cache_size);
+ return PairArraySize(GcRootAsPointerSize<mirror::String>(), cache_size);
}
inline size_t DexCacheArraysLayout::StringsAlignment() const {
@@ -132,7 +136,7 @@
if (num_elements < cache_size) {
cache_size = num_elements;
}
- return 2u * static_cast<size_t>(pointer_size_) * cache_size;
+ return PairArraySize(pointer_size_, cache_size);
}
inline size_t DexCacheArraysLayout::FieldsAlignment() const {
@@ -170,6 +174,10 @@
return static_cast<size_t>(element_size) * num_elements;
}
+inline size_t DexCacheArraysLayout::PairArraySize(PointerSize element_size, uint32_t num_elements) {
+ return 2u * static_cast<size_t>(element_size) * num_elements;
+}
+
} // namespace art
#endif // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
diff --git a/runtime/utils/dex_cache_arrays_layout.h b/runtime/utils/dex_cache_arrays_layout.h
index 377a374..fc04159 100644
--- a/runtime/utils/dex_cache_arrays_layout.h
+++ b/runtime/utils/dex_cache_arrays_layout.h
@@ -130,6 +130,7 @@
static size_t ElementOffset(PointerSize element_size, uint32_t idx);
static size_t ArraySize(PointerSize element_size, uint32_t num_elements);
+ static size_t PairArraySize(PointerSize element_size, uint32_t num_elements);
};
} // namespace art
diff --git a/test/497-inlining-and-class-loader/clear_dex_cache.cc b/test/497-inlining-and-class-loader/clear_dex_cache.cc
index 9ba05bc..c113042 100644
--- a/test/497-inlining-and-class-loader/clear_dex_cache.cc
+++ b/test/497-inlining-and-class-loader/clear_dex_cache.cc
@@ -34,22 +34,32 @@
ScopedObjectAccess soa(Thread::Current());
mirror::DexCache* dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
size_t num_methods = dex_cache->NumResolvedMethods();
- ArtMethod** methods = dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* methods = dex_cache->GetResolvedMethods();
CHECK_EQ(num_methods != 0u, methods != nullptr);
if (num_methods == 0u) {
return nullptr;
}
jarray array;
if (sizeof(void*) == 4) {
- array = env->NewIntArray(num_methods);
+ array = env->NewIntArray(2u * num_methods);
} else {
- array = env->NewLongArray(num_methods);
+ array = env->NewLongArray(2u * num_methods);
}
CHECK(array != nullptr);
- mirror::PointerArray* pointer_array = soa.Decode<mirror::PointerArray>(array).Ptr();
+ ObjPtr<mirror::Array> decoded_array = soa.Decode<mirror::Array>(array);
for (size_t i = 0; i != num_methods; ++i) {
- ArtMethod* method = mirror::DexCache::GetElementPtrSize(methods, i, kRuntimePointerSize);
- pointer_array->SetElementPtrSize(i, method, kRuntimePointerSize);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(methods, i, kRuntimePointerSize);
+ uint32_t index = pair.index;
+ ArtMethod* method = pair.object;
+ if (sizeof(void*) == 4) {
+ ObjPtr<mirror::IntArray> int_array = down_cast<mirror::IntArray*>(decoded_array.Ptr());
+ int_array->Set(2u * i, index);
+ int_array->Set(2u * i + 1u, static_cast<jint>(reinterpret_cast<uintptr_t>(method)));
+ } else {
+ ObjPtr<mirror::LongArray> long_array = down_cast<mirror::LongArray*>(decoded_array.Ptr());
+ long_array->Set(2u * i, index);
+ long_array->Set(2u * i + 1u, reinterpret_cast64<jlong>(method));
+ }
}
return array;
}
@@ -59,14 +69,26 @@
ScopedObjectAccess soa(Thread::Current());
mirror::DexCache* dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
size_t num_methods = dex_cache->NumResolvedMethods();
- ArtMethod** methods = soa.Decode<mirror::Class>(cls)->GetDexCache()->GetResolvedMethods();
+ mirror::MethodDexCacheType* methods =
+ soa.Decode<mirror::Class>(cls)->GetDexCache()->GetResolvedMethods();
CHECK_EQ(num_methods != 0u, methods != nullptr);
- ObjPtr<mirror::PointerArray> old = soa.Decode<mirror::PointerArray>(old_cache);
+ ObjPtr<mirror::Array> old = soa.Decode<mirror::Array>(old_cache);
CHECK_EQ(methods != nullptr, old != nullptr);
CHECK_EQ(num_methods, static_cast<size_t>(old->GetLength()));
for (size_t i = 0; i != num_methods; ++i) {
- ArtMethod* method = old->GetElementPtrSize<ArtMethod*>(i, kRuntimePointerSize);
- mirror::DexCache::SetElementPtrSize(methods, i, method, kRuntimePointerSize);
+ uint32_t index;
+ ArtMethod* method;
+ if (sizeof(void*) == 4) {
+ ObjPtr<mirror::IntArray> int_array = down_cast<mirror::IntArray*>(old.Ptr());
+ index = static_cast<uint32_t>(int_array->Get(2u * i));
+ method = reinterpret_cast<ArtMethod*>(static_cast<uint32_t>(int_array->Get(2u * i + 1u)));
+ } else {
+ ObjPtr<mirror::LongArray> long_array = down_cast<mirror::LongArray*>(old.Ptr());
+ index = dchecked_integral_cast<uint32_t>(long_array->Get(2u * i));
+ method = reinterpret_cast64<ArtMethod*>(long_array->Get(2u * i + 1u));
+ }
+ mirror::MethodDexCachePair pair(method, index);
+ mirror::DexCache::SetNativePairPtrSize(methods, i, pair, kRuntimePointerSize);
}
}
diff --git a/test/924-threads/src/art/Test924.java b/test/924-threads/src/art/Test924.java
index 84b7c62..b73eb30 100644
--- a/test/924-threads/src/art/Test924.java
+++ b/test/924-threads/src/art/Test924.java
@@ -164,8 +164,10 @@
do {
Thread.yield();
} while (t.getState() != Thread.State.BLOCKED);
- Thread.sleep(10);
- printThreadState(t);
+ // Since internal thread suspension (For GC or other cases) can happen at any time and changes
+ // the thread state we just have it print the majority thread state across 11 calls over 55
+ // milliseconds.
+ printMajorityThreadState(t, 11, 5);
}
// Sleeping.
@@ -357,10 +359,32 @@
STATE_KEYS.addAll(STATE_NAMES.keySet());
Collections.sort(STATE_KEYS);
}
-
- private static void printThreadState(Thread t) {
- int state = getThreadState(t);
+ // Call getThreadState 'votes' times waiting 'wait' millis between calls and print the most common
+ // result.
+ private static void printMajorityThreadState(Thread t, int votes, int wait) throws Exception {
+ Map<Integer, Integer> states = new HashMap<>();
+ for (int i = 0; i < votes; i++) {
+ int cur_state = getThreadState(t);
+ states.put(cur_state, states.getOrDefault(cur_state, 0) + 1);
+ Thread.sleep(wait); // Wait a little bit.
+ }
+ int best_state = -1;
+ int highest_count = 0;
+ for (Map.Entry<Integer, Integer> e : states.entrySet()) {
+ if (e.getValue() > highest_count) {
+ highest_count = e.getValue();
+ best_state = e.getKey();
+ }
+ }
+ printThreadState(best_state);
+ }
+
+ private static void printThreadState(Thread t) {
+ printThreadState(getThreadState(t));
+ }
+
+ private static void printThreadState(int state) {
StringBuilder sb = new StringBuilder();
for (Integer i : STATE_KEYS) {
diff --git a/test/testrunner/run_build_test_target.py b/test/testrunner/run_build_test_target.py
index 37b3559..b1274c9 100755
--- a/test/testrunner/run_build_test_target.py
+++ b/test/testrunner/run_build_test_target.py
@@ -100,8 +100,6 @@
run_test_command += ['-b']
run_test_command += ['--host']
run_test_command += ['--verbose']
- run_test_command += ['--timeout']
- run_test_command += ['14100'] # 235 minutes (The go/ab timeout is 14500)
sys.stdout.write(str(run_test_command) + '\n')
sys.stdout.flush()
diff --git a/test/testrunner/testrunner.py b/test/testrunner/testrunner.py
index 017c19b..68e1856 100755
--- a/test/testrunner/testrunner.py
+++ b/test/testrunner/testrunner.py
@@ -50,7 +50,6 @@
import json
import multiprocessing
import os
-import operator
import re
import subprocess
import sys
@@ -76,7 +75,9 @@
OPTIMIZING_COMPILER_TYPES = set()
JVMTI_TYPES = set()
ADDRESS_SIZES_TARGET = {'host': set(), 'target': set()}
-TIME_STATS = {}
+# timeout for individual tests.
+# TODO: make it adjustable per tests and for buildbots
+timeout = 3000 # 50 minutes
# DISABLED_TEST_CONTAINER holds information about the disabled tests. It is a map
# that has key as the test name (like 001-HelloWorld), and value as set of
@@ -127,11 +128,6 @@
gdb_arg = ''
stop_testrunner = False
-# timeout for individual tests.
-# TODO: make it adjustable per tests and for buildbots
-test_timeout = 3000 # 50 minutes
-timeout = sys.maxsize
-
def gather_test_info():
"""The method gathers test information about the test to be run which includes
generating the list of total tests from the art/test directory and the list
@@ -510,13 +506,12 @@
test_skipped = True
else:
test_skipped = False
- start_recording_time(test_name)
if gdb:
proc = subprocess.Popen(command.split(), stderr=subprocess.STDOUT, universal_newlines=True)
else:
proc = subprocess.Popen(command.split(), stderr=subprocess.STDOUT, stdout = subprocess.PIPE,
universal_newlines=True)
- script_output = proc.communicate(timeout=test_timeout)[0]
+ script_output = proc.communicate(timeout=timeout)[0]
test_passed = not proc.wait()
if not test_skipped:
@@ -534,16 +529,15 @@
else:
print_test_info(test_name, '')
except subprocess.TimeoutExpired as e:
- failed_tests.append((test_name, 'Timed out in %d seconds' % test_timeout))
+ failed_tests.append((test_name, 'Timed out in %d seconds' % timeout))
print_test_info(test_name, 'TIMEOUT', 'Timed out in %d seconds\n%s' % (
- test_timeout, command))
+ timeout, command))
except Exception as e:
failed_tests.append((test_name, str(e)))
print_test_info(test_name, 'FAIL',
('%s\n%s\n\n') % (command, str(e)))
finally:
semaphore.release()
- stop_recording_time(test_name)
def print_test_info(test_name, result, failed_test_info=""):
@@ -735,7 +729,6 @@
sys.stdout.flush()
def print_analysis():
- print_mutex.acquire()
if not verbose:
# Without --verbose, the testrunner erases passing test info. It
# does that by overriding the printed text with white spaces all across
@@ -769,7 +762,6 @@
print_text(COLOR_ERROR + '----------' + COLOR_NORMAL + '\n')
for failed_test in sorted([test_info[0] for test_info in failed_tests]):
print_text(('%s\n' % (failed_test)))
- print_mutex.release()
def parse_test_name(test_name):
@@ -867,16 +859,12 @@
global build
global gdb
global gdb_arg
- global test_timeout
global timeout
parser = argparse.ArgumentParser(description="Runs all or a subset of the ART test suite.")
parser.add_argument('-t', '--test', dest='test', help='name of the test')
parser.add_argument('-j', type=int, dest='n_thread')
- parser.add_argument('--timeout', default=timeout, type=int, dest='timeout',
- help='timeout the testrunner')
- parser.add_argument('--test-timeout', default=test_timeout, type=int, dest='test_timeout',
- help='timeout for individual tests')
+ parser.add_argument('--timeout', default=timeout, type=int, dest='timeout')
for variant in TOTAL_VARIANTS_SET:
flag = '--' + variant
flag_dest = variant.replace('-', '_')
@@ -998,58 +986,28 @@
gdb = True
if options['gdb_arg']:
gdb_arg = options['gdb_arg']
-
timeout = options['timeout']
- test_timeout = options['test_timeout']
return test
-def start_recording_time(key):
- """To begin recording time for the event associated with the key.
- """
- TIME_STATS[key] = -(time.time())
-
-def stop_recording_time(key):
- """To stop timer for the event associated with the key.
- """
- TIME_STATS[key] = time.time() + TIME_STATS[key]
-
-def print_time_info():
- """Print time information for different invocation.
- """
- print_mutex.acquire()
- print_text('\nTIME INFO\n')
- for key in TIME_STATS:
- # Handle unfinised jobs.
- if TIME_STATS[key] < 0:
- TIME_STATS[key] = time.time() + TIME_STATS[key]
-
- info_list = sorted(TIME_STATS.items(), key=operator.itemgetter(1), reverse=True)
- for time_info_tuple in info_list:
- print_text('%s : %.2f sec\n' % (time_info_tuple[0], time_info_tuple[1]))
- print_mutex.release()
-
def main():
- start_time = time.time()
gather_test_info()
user_requested_test = parse_option()
setup_test_env()
if build:
build_targets = ''
if 'host' in TARGET_TYPES:
- build_targets += ' test-art-host-run-test-dependencies'
+ build_targets += 'test-art-host-run-test-dependencies'
if 'target' in TARGET_TYPES:
- build_targets += ' test-art-target-run-test-dependencies'
+ build_targets += 'test-art-target-run-test-dependencies'
build_command = 'make'
build_command += ' -j'
build_command += ' -C ' + env.ANDROID_BUILD_TOP
build_command += ' ' + build_targets
# Add 'dist' to avoid Jack issues b/36169180.
build_command += ' dist'
- start_recording_time(build_command)
if subprocess.call(build_command.split()):
sys.exit(1)
- stop_recording_time(build_command)
if user_requested_test:
test_runner_thread = threading.Thread(target=run_tests, args=(user_requested_test,))
else:
@@ -1058,15 +1016,8 @@
try:
test_runner_thread.start()
while threading.active_count() > 1:
- if (time.time() - start_time > timeout):
- # to ensure that the run ends before the go/ab bots
- # time out the invocation.
- print_text("FAILED: timeout reached")
- print_time_info()
- print_analysis()
- sys.exit(1)
- time.sleep(1)
-
+ time.sleep(0.1)
+ print_analysis()
except Exception as e:
print_analysis()
print_text(str(e))
diff --git a/tools/ahat/src/ObjectsHandler.java b/tools/ahat/src/ObjectsHandler.java
index 86d48f1..fd226c2 100644
--- a/tools/ahat/src/ObjectsHandler.java
+++ b/tools/ahat/src/ObjectsHandler.java
@@ -43,13 +43,7 @@
Site site = mSnapshot.getSite(id, depth);
List<AhatInstance> insts = new ArrayList<AhatInstance>();
- for (AhatInstance inst : site.getObjects()) {
- if ((heapName == null || inst.getHeap().getName().equals(heapName))
- && (className == null || inst.getClassName().equals(className))) {
- insts.add(inst);
- }
- }
-
+ site.getObjects(heapName, className, insts);
Collections.sort(insts, Sort.defaultInstanceCompare(mSnapshot));
doc.title("Objects");
diff --git a/tools/ahat/src/Summarizer.java b/tools/ahat/src/Summarizer.java
index 016eab4..3e9da31 100644
--- a/tools/ahat/src/Summarizer.java
+++ b/tools/ahat/src/Summarizer.java
@@ -60,14 +60,7 @@
formatted.append("root ");
}
- // Annotate classes as classes.
- DocString linkText = new DocString();
- if (inst.isClassObj()) {
- linkText.append("class ");
- }
-
- linkText.append(inst.toString());
-
+ DocString linkText = DocString.text(inst.toString());
if (inst.isPlaceHolder()) {
// Don't make links to placeholder objects.
formatted.append(linkText);
diff --git a/tools/ahat/src/dominators/DominatorsComputation.java b/tools/ahat/src/dominators/DominatorsComputation.java
new file mode 100644
index 0000000..9a2a272
--- /dev/null
+++ b/tools/ahat/src/dominators/DominatorsComputation.java
@@ -0,0 +1,260 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.ahat.dominators;
+
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.List;
+import java.util.Queue;
+
+/**
+ * Generic DominatorsComputation.
+ *
+ * To use the dominators computation, have your graph nodes implement the
+ * DominatorsComputation.Node interface, then call
+ * DominatorsComputation.computeDominators on the single root node.
+ */
+public class DominatorsComputation {
+ /**
+ * Interface for a directed graph to perform the dominators computation on.
+ */
+ public interface Node {
+ /**
+ * Associate the given dominator state with this node.
+ */
+ void setDominatorsComputationState(Object state);
+
+ /**
+ * Get the most recent dominator state associated with this node using
+ * setDominatorsComputationState. If setDominatorsComputationState has not
+ * yet been called, this should return null.
+ */
+ Object getDominatorsComputationState();
+
+ /**
+ * Return a collection of nodes referenced from this node, for the
+ * purposes of computing dominators.
+ */
+ Iterable<? extends Node> getReferencesForDominators();
+
+ /**
+ * Update this node's dominator based on the results of the dominators
+ * computation.
+ */
+ void setDominator(Node dominator);
+ }
+
+ // NodeS is information associated with a particular node for the
+ // purposes of computing dominators.
+ // By convention we use the suffix 'S' to name instances of NodeS.
+ private static class NodeS {
+ // The node that this NodeS is associated with.
+ public Node node;
+
+ // Unique identifier for this node, in increasing order based on the order
+ // this node was visited in a depth first search from the root. In
+ // particular, given nodes A and B, if A.id > B.id, then A cannot be a
+ // dominator of B.
+ public long id;
+
+ // Upper bound on the id of this node's dominator.
+ // The true immediate dominator of this node must have id <= domid.
+ // This upper bound is slowly tightened as part of the dominators
+ // computation.
+ public long domid;
+
+ // The current candidate dominator for this node.
+ // Invariant: (domid < domS.id) implies this node is on the queue of
+ // nodes to be revisited.
+ public NodeS domS;
+
+ // A node with a reference to this node that is one step closer to the
+ // root than this node.
+ // Invariant: srcS.id < this.id
+ public NodeS srcS;
+
+ // The set of nodes X reachable by 'this' on a path of nodes from the
+ // root with increasing ids (possibly excluding X) that this node does not
+ // dominate (this.id > X.domid).
+ // We can use a List instead of a Set for this because we guarentee that
+ // we don't add the same node more than once to the list (see below).
+ public List<NodeS> undom = new ArrayList<NodeS>();
+
+ // The largest id of the node X for which we did X.undom.add(this).
+ // This is an optimization to avoid adding duplicate node entries to the
+ // undom set.
+ //
+ // The first time we see this node, we reach it through a path of nodes
+ // with IDs 0,...,a,this. These form our src chain to the root.
+ //
+ // The next time we see this node, we reach it through a path of nodes
+ // with IDS 0,...,b,c,...,d,this. Where all 0,...,b < a and all c,...,d > a.
+ //
+ // The next time we see this node, we reach it through a path of nodes
+ // with IDS 0,...,e,f,...,g,this. With all 0,...,e < d and all f,...,g > d.
+ // And so on.
+ //
+ // The first time we see this node, we set undomid to a.id. Nodes 0,...,a
+ // will be added as undom in the 'revisit' phase of the node.
+ //
+ // The next times we see this node, we mark a+,...,d as undom and
+ // change undomid to d. And so on.
+ public long undomid;
+ }
+
+ private static class Link {
+ public NodeS srcS;
+ public Node dst;
+
+ public Link(NodeS srcS, Node dst) {
+ this.srcS = srcS;
+ this.dst = dst;
+ }
+ }
+
+ /**
+ * Compute the dominator tree rooted at the given node.
+ * There must not be any incoming references to the root node.
+ */
+ public static void computeDominators(Node root) {
+ long id = 0;
+
+ // List of all nodes seen. We keep track of this here to update all the
+ // dominators once we are done.
+ List<NodeS> nodes = new ArrayList<NodeS>();
+
+ // The set of nodes N such that N.domid < N.domS.id. These nodes need
+ // to be revisisted because their dominator is clearly wrong.
+ // Use a Queue instead of a Set because performance will be better. We
+ // avoid adding nodes already on the queue by checking whether it was
+ // already true that N.domid < N.domS.id, in which case the node is
+ // already on the queue.
+ Queue<NodeS> revisit = new ArrayDeque<NodeS>();
+
+ // Set up the root node specially.
+ NodeS rootS = new NodeS();
+ rootS.node = root;
+ rootS.id = id++;
+ root.setDominatorsComputationState(rootS);
+
+ // 1. Do a depth first search of the nodes, label them with ids and come
+ // up with intial candidate dominators for them.
+ Deque<Link> dfs = new ArrayDeque<Link>();
+ for (Node child : root.getReferencesForDominators()) {
+ dfs.push(new Link(rootS, child));
+ }
+
+ while (!dfs.isEmpty()) {
+ Link link = dfs.pop();
+ NodeS dstS = (NodeS)link.dst.getDominatorsComputationState();
+ if (dstS == null) {
+ // This is the first time we have seen the node. The candidate
+ // dominator is link src.
+ dstS = new NodeS();
+ dstS.node = link.dst;
+ dstS.id = id++;
+ dstS.domid = link.srcS.id;
+ dstS.domS = link.srcS;
+ dstS.srcS = link.srcS;
+ dstS.undomid = dstS.domid;
+ nodes.add(dstS);
+ link.dst.setDominatorsComputationState(dstS);
+
+ for (Node child : link.dst.getReferencesForDominators()) {
+ dfs.push(new Link(dstS, child));
+ }
+ } else {
+ // We have seen the node already. Update the state based on the new
+ // potential dominator.
+ NodeS srcS = link.srcS;
+ boolean revisiting = dstS.domid < dstS.domS.id;
+
+ while (srcS.id > dstS.domid) {
+ if (srcS.id > dstS.undomid) {
+ srcS.undom.add(dstS);
+ }
+ srcS = srcS.srcS;
+ }
+ dstS.undomid = link.srcS.id;
+
+ if (srcS.id < dstS.domid) {
+ // In this case, dstS.domid must be wrong, because we just found a
+ // path to dstS that does not go through dstS.domid:
+ // All nodes from root to srcS have id < domid, and all nodes from
+ // srcS to dstS had id > domid, so dstS.domid cannot be on this path
+ // from root to dstS.
+ dstS.domid = srcS.id;
+ if (!revisiting) {
+ revisit.add(dstS);
+ }
+ }
+ }
+ }
+
+ // 2. Continue revisiting nodes until they all satisfy the requirement
+ // that domS.id <= domid.
+ while (!revisit.isEmpty()) {
+ NodeS nodeS = revisit.poll();
+ NodeS domS = nodeS.domS;
+ assert nodeS.domid < domS.id;
+ while (domS.id > nodeS.domid) {
+ if (domS.domS.id < nodeS.domid) {
+ // In this case, nodeS.domid must be wrong, because there is a path
+ // from root to nodeS that does not go through nodeS.domid:
+ // * We can go from root to domS without going through nodeS.domid,
+ // because otherwise nodeS.domid would dominate domS, not
+ // domS.domS.
+ // * We can go from domS to nodeS without going through nodeS.domid
+ // because we know nodeS is reachable from domS on a path of nodes
+ // with increases ids, which cannot include nodeS.domid, which
+ // has a smaller id than domS.
+ nodeS.domid = domS.domS.id;
+ }
+ domS.undom.add(nodeS);
+ domS = domS.srcS;
+ }
+ nodeS.domS = domS;
+ nodeS.domid = domS.id;
+
+ for (NodeS xS : nodeS.undom) {
+ if (domS.id < xS.domid) {
+ // In this case, xS.domid must be wrong, because there is a path
+ // from the root to xX that does not go through xS.domid:
+ // * We can go from root to nodeS without going through xS.domid,
+ // because otherwise xS.domid would dominate nodeS, not domS.
+ // * We can go from nodeS to xS without going through xS.domid
+ // because we know xS is reachable from nodeS on a path of nodes
+ // with increasing ids, which cannot include xS.domid, which has
+ // a smaller id than nodeS.
+ boolean revisiting = xS.domid < xS.domS.id;
+ xS.domid = domS.id;
+ if (!revisiting) {
+ revisit.add(xS);
+ }
+ }
+ }
+ }
+
+ // 3. Update the dominators of the nodes.
+ root.setDominatorsComputationState(null);
+ for (NodeS nodeS : nodes) {
+ nodeS.node.setDominator(nodeS.domS.node);
+ nodeS.node.setDominatorsComputationState(null);
+ }
+ }
+}
diff --git a/tools/ahat/src/heapdump/AhatArrayInstance.java b/tools/ahat/src/heapdump/AhatArrayInstance.java
index d88cf94..6d4485d 100644
--- a/tools/ahat/src/heapdump/AhatArrayInstance.java
+++ b/tools/ahat/src/heapdump/AhatArrayInstance.java
@@ -20,6 +20,7 @@
import com.android.tools.perflib.heap.Instance;
import java.nio.charset.StandardCharsets;
import java.util.AbstractList;
+import java.util.Collections;
import java.util.List;
public class AhatArrayInstance extends AhatInstance {
@@ -37,8 +38,8 @@
super(id);
}
- @Override void initialize(AhatSnapshot snapshot, Instance inst) {
- super.initialize(snapshot, inst);
+ @Override void initialize(AhatSnapshot snapshot, Instance inst, Site site) {
+ super.initialize(snapshot, inst, site);
ArrayInstance array = (ArrayInstance)inst;
switch (array.getArrayType()) {
@@ -49,10 +50,6 @@
if (objects[i] != null) {
Instance ref = (Instance)objects[i];
insts[i] = snapshot.findInstance(ref.getId());
- if (ref.getNextInstanceToGcRoot() == inst) {
- String field = "[" + Integer.toString(i) + "]";
- insts[i].setNextInstanceToGcRoot(this, field);
- }
}
}
mValues = new AbstractList<Value>() {
@@ -132,6 +129,35 @@
return mValues.get(index);
}
+ @Override
+ ReferenceIterator getReferences() {
+ // The list of references will be empty if this is a primitive array.
+ List<Reference> refs = Collections.emptyList();
+ if (!mValues.isEmpty()) {
+ Value first = mValues.get(0);
+ if (first == null || first.isAhatInstance()) {
+ refs = new AbstractList<Reference>() {
+ @Override
+ public int size() {
+ return mValues.size();
+ }
+
+ @Override
+ public Reference get(int index) {
+ Value value = mValues.get(index);
+ if (value != null) {
+ assert value.isAhatInstance();
+ String field = "[" + Integer.toString(index) + "]";
+ return new Reference(AhatArrayInstance.this, field, value.asAhatInstance(), true);
+ }
+ return null;
+ }
+ };
+ }
+ }
+ return new ReferenceIterator(refs);
+ }
+
@Override public boolean isArrayInstance() {
return true;
}
diff --git a/tools/ahat/src/heapdump/AhatClassInstance.java b/tools/ahat/src/heapdump/AhatClassInstance.java
index 158de52..2115923 100644
--- a/tools/ahat/src/heapdump/AhatClassInstance.java
+++ b/tools/ahat/src/heapdump/AhatClassInstance.java
@@ -19,6 +19,7 @@
import com.android.tools.perflib.heap.ClassInstance;
import com.android.tools.perflib.heap.Instance;
import java.awt.image.BufferedImage;
+import java.util.AbstractList;
import java.util.Arrays;
import java.util.List;
@@ -29,8 +30,8 @@
super(id);
}
- @Override void initialize(AhatSnapshot snapshot, Instance inst) {
- super.initialize(snapshot, inst);
+ @Override void initialize(AhatSnapshot snapshot, Instance inst, Site site) {
+ super.initialize(snapshot, inst, site);
ClassInstance classInst = (ClassInstance)inst;
List<ClassInstance.FieldValue> fieldValues = classInst.getValues();
@@ -40,15 +41,7 @@
String name = field.getField().getName();
String type = field.getField().getType().toString();
Value value = snapshot.getValue(field.getValue());
-
mFieldValues[i] = new FieldValue(name, type, value);
-
- if (field.getValue() instanceof Instance) {
- Instance ref = (Instance)field.getValue();
- if (ref.getNextInstanceToGcRoot() == inst) {
- value.asAhatInstance().setNextInstanceToGcRoot(this, "." + name);
- }
- }
}
}
@@ -101,6 +94,30 @@
return Arrays.asList(mFieldValues);
}
+ @Override
+ ReferenceIterator getReferences() {
+ List<Reference> refs = new AbstractList<Reference>() {
+ @Override
+ public int size() {
+ return mFieldValues.length;
+ }
+
+ @Override
+ public Reference get(int index) {
+ FieldValue field = mFieldValues[index];
+ Value value = field.value;
+ if (value != null && value.isAhatInstance()) {
+ boolean strong = !field.name.equals("referent")
+ || !isInstanceOfClass("java.lang.ref.Reference");
+ AhatInstance ref = value.asAhatInstance();
+ return new Reference(AhatClassInstance.this, "." + field.name, ref, strong);
+ }
+ return null;
+ }
+ };
+ return new ReferenceIterator(refs);
+ }
+
/**
* Returns true if this is an instance of a class with the given name.
*/
diff --git a/tools/ahat/src/heapdump/AhatClassObj.java b/tools/ahat/src/heapdump/AhatClassObj.java
index c5ade1d..052d7a8 100644
--- a/tools/ahat/src/heapdump/AhatClassObj.java
+++ b/tools/ahat/src/heapdump/AhatClassObj.java
@@ -19,6 +19,7 @@
import com.android.tools.perflib.heap.ClassObj;
import com.android.tools.perflib.heap.Field;
import com.android.tools.perflib.heap.Instance;
+import java.util.AbstractList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
@@ -34,8 +35,8 @@
super(id);
}
- @Override void initialize(AhatSnapshot snapshot, Instance inst) {
- super.initialize(snapshot, inst);
+ @Override void initialize(AhatSnapshot snapshot, Instance inst, Site site) {
+ super.initialize(snapshot, inst, site);
ClassObj classObj = (ClassObj)inst;
mClassName = classObj.getClassName();
@@ -58,13 +59,6 @@
String type = field.getKey().getType().toString();
Value value = snapshot.getValue(field.getValue());
mStaticFieldValues[index++] = new FieldValue(name, type, value);
-
- if (field.getValue() instanceof Instance) {
- Instance ref = (Instance)field.getValue();
- if (ref.getNextInstanceToGcRoot() == inst) {
- value.asAhatInstance().setNextInstanceToGcRoot(this, "." + name);
- }
- }
}
}
@@ -96,6 +90,27 @@
return Arrays.asList(mStaticFieldValues);
}
+ @Override
+ ReferenceIterator getReferences() {
+ List<Reference> refs = new AbstractList<Reference>() {
+ @Override
+ public int size() {
+ return mStaticFieldValues.length;
+ }
+
+ @Override
+ public Reference get(int index) {
+ FieldValue field = mStaticFieldValues[index];
+ Value value = field.value;
+ if (value != null && value.isAhatInstance()) {
+ return new Reference(AhatClassObj.this, "." + field.name, value.asAhatInstance(), true);
+ }
+ return null;
+ }
+ };
+ return new ReferenceIterator(refs);
+ }
+
@Override public boolean isClassObj() {
return true;
}
@@ -105,11 +120,10 @@
}
@Override public String toString() {
- return mClassName;
+ return "class " + mClassName;
}
@Override AhatInstance newPlaceHolderInstance() {
return new AhatPlaceHolderClassObj(this);
}
}
-
diff --git a/tools/ahat/src/heapdump/AhatInstance.java b/tools/ahat/src/heapdump/AhatInstance.java
index af369d9..8905b76 100644
--- a/tools/ahat/src/heapdump/AhatInstance.java
+++ b/tools/ahat/src/heapdump/AhatInstance.java
@@ -16,39 +16,48 @@
package com.android.ahat.heapdump;
+import com.android.ahat.dominators.DominatorsComputation;
import com.android.tools.perflib.heap.ClassObj;
import com.android.tools.perflib.heap.Instance;
-import com.android.tools.perflib.heap.RootObj;
import java.awt.image.BufferedImage;
import java.util.ArrayDeque;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.List;
+import java.util.Queue;
-public abstract class AhatInstance implements Diffable<AhatInstance> {
- private long mId;
+public abstract class AhatInstance implements Diffable<AhatInstance>,
+ DominatorsComputation.Node {
+ // The id of this instance from the heap dump.
+ private final long mId;
+
+ // Fields initialized in initialize().
private Size mSize;
- private Size[] mRetainedSizes; // Retained size indexed by heap index
- private boolean mIsReachable;
private AhatHeap mHeap;
- private AhatInstance mImmediateDominator;
- private AhatInstance mNextInstanceToGcRoot;
- private String mNextInstanceToGcRootField = "???";
private AhatClassObj mClassObj;
- private AhatInstance[] mHardReverseReferences;
- private AhatInstance[] mSoftReverseReferences;
private Site mSite;
// If this instance is a root, mRootTypes contains a set of the root types.
// If this instance is not a root, mRootTypes is null.
private List<String> mRootTypes;
- // List of instances this instance immediately dominates.
- private List<AhatInstance> mDominated = new ArrayList<AhatInstance>();
+ // Fields initialized in computeReverseReferences().
+ private AhatInstance mNextInstanceToGcRoot;
+ private String mNextInstanceToGcRootField;
+ private ArrayList<AhatInstance> mHardReverseReferences;
+ private ArrayList<AhatInstance> mSoftReverseReferences;
+ // Fields initialized in DominatorsComputation.computeDominators().
+ // mDominated - the list of instances immediately dominated by this instance.
+ // mRetainedSizes - retained size indexed by heap index.
+ private AhatInstance mImmediateDominator;
+ private List<AhatInstance> mDominated = new ArrayList<AhatInstance>();
+ private Size[] mRetainedSizes;
+ private Object mDominatorsComputationState;
+
+ // The baseline instance for purposes of diff.
private AhatInstance mBaseline;
public AhatInstance(long id) {
@@ -62,58 +71,16 @@
* There is no guarantee that the AhatInstances returned by
* snapshot.findInstance have been initialized yet.
*/
- void initialize(AhatSnapshot snapshot, Instance inst) {
- mId = inst.getId();
+ void initialize(AhatSnapshot snapshot, Instance inst, Site site) {
mSize = new Size(inst.getSize(), 0);
- mIsReachable = inst.isReachable();
-
- List<AhatHeap> heaps = snapshot.getHeaps();
-
mHeap = snapshot.getHeap(inst.getHeap().getName());
- Instance dom = inst.getImmediateDominator();
- if (dom == null || dom instanceof RootObj) {
- mImmediateDominator = null;
- } else {
- mImmediateDominator = snapshot.findInstance(dom.getId());
- mImmediateDominator.mDominated.add(this);
- }
-
ClassObj clsObj = inst.getClassObj();
if (clsObj != null) {
mClassObj = snapshot.findClassObj(clsObj.getId());
}
- // A couple notes about reverse references:
- // * perflib sometimes returns unreachable reverse references. If
- // snapshot.findInstance returns null, it means the reverse reference is
- // not reachable, so we filter it out.
- // * We store the references as AhatInstance[] instead of
- // ArrayList<AhatInstance> because it saves a lot of space and helps
- // with performance when there are a lot of AhatInstances.
- ArrayList<AhatInstance> ahatRefs = new ArrayList<AhatInstance>();
- ahatRefs = new ArrayList<AhatInstance>();
- for (Instance ref : inst.getHardReverseReferences()) {
- AhatInstance ahat = snapshot.findInstance(ref.getId());
- if (ahat != null) {
- ahatRefs.add(ahat);
- }
- }
- mHardReverseReferences = new AhatInstance[ahatRefs.size()];
- ahatRefs.toArray(mHardReverseReferences);
-
- List<Instance> refs = inst.getSoftReverseReferences();
- ahatRefs.clear();
- if (refs != null) {
- for (Instance ref : refs) {
- AhatInstance ahat = snapshot.findInstance(ref.getId());
- if (ahat != null) {
- ahatRefs.add(ahat);
- }
- }
- }
- mSoftReverseReferences = new AhatInstance[ahatRefs.size()];
- ahatRefs.toArray(mSoftReverseReferences);
+ mSite = site;
}
/**
@@ -166,7 +133,7 @@
* Returns whether this object is strongly-reachable.
*/
public boolean isReachable() {
- return mIsReachable;
+ return mImmediateDominator != null;
}
/**
@@ -177,6 +144,12 @@
}
/**
+ * Returns an iterator over the references this AhatInstance has to other
+ * AhatInstances.
+ */
+ abstract ReferenceIterator getReferences();
+
+ /**
* Returns true if this instance is marked as a root instance.
*/
public boolean isRoot() {
@@ -227,13 +200,6 @@
}
/**
- * Sets the allocation site of this instance.
- */
- void setSite(Site site) {
- mSite = site;
- }
-
- /**
* Returns true if the given instance is a class object
*/
public boolean isClassObj() {
@@ -311,14 +277,20 @@
* Returns a list of objects with hard references to this object.
*/
public List<AhatInstance> getHardReverseReferences() {
- return Arrays.asList(mHardReverseReferences);
+ if (mHardReverseReferences != null) {
+ return mHardReverseReferences;
+ }
+ return Collections.emptyList();
}
/**
* Returns a list of objects with soft references to this object.
*/
public List<AhatInstance> getSoftReverseReferences() {
- return Arrays.asList(mSoftReverseReferences);
+ if (mSoftReverseReferences != null) {
+ return mSoftReverseReferences;
+ }
+ return Collections.emptyList();
}
/**
@@ -425,8 +397,10 @@
}
void setNextInstanceToGcRoot(AhatInstance inst, String field) {
- mNextInstanceToGcRoot = inst;
- mNextInstanceToGcRootField = field;
+ if (mNextInstanceToGcRoot == null && !isRoot()) {
+ mNextInstanceToGcRoot = inst;
+ mNextInstanceToGcRootField = field;
+ }
}
/** Returns a human-readable identifier for this object.
@@ -466,6 +440,47 @@
}
/**
+ * Initialize the reverse reference fields of this instance and all other
+ * instances reachable from it. Initializes the following fields:
+ * mNextInstanceToGcRoot
+ * mNextInstanceToGcRootField
+ * mHardReverseReferences
+ * mSoftReverseReferences
+ */
+ static void computeReverseReferences(AhatInstance root) {
+ // Do a breadth first search to visit the nodes.
+ Queue<Reference> bfs = new ArrayDeque<Reference>();
+ for (Reference ref : root.getReferences()) {
+ bfs.add(ref);
+ }
+ while (!bfs.isEmpty()) {
+ Reference ref = bfs.poll();
+
+ if (ref.ref.mHardReverseReferences == null) {
+ // This is the first time we are seeing ref.ref.
+ ref.ref.mNextInstanceToGcRoot = ref.src;
+ ref.ref.mNextInstanceToGcRootField = ref.field;
+ ref.ref.mHardReverseReferences = new ArrayList<AhatInstance>();
+ for (Reference childRef : ref.ref.getReferences()) {
+ bfs.add(childRef);
+ }
+ }
+
+ // Note: ref.src is null when the src is the SuperRoot.
+ if (ref.src != null) {
+ if (ref.strong) {
+ ref.ref.mHardReverseReferences.add(ref.src);
+ } else {
+ if (ref.ref.mSoftReverseReferences == null) {
+ ref.ref.mSoftReverseReferences = new ArrayList<AhatInstance>();
+ }
+ ref.ref.mSoftReverseReferences.add(ref.src);
+ }
+ }
+ }
+ }
+
+ /**
* Recursively compute the retained size of the given instance and all
* other instances it dominates.
*/
@@ -486,8 +501,10 @@
for (int i = 0; i < numHeaps; i++) {
inst.mRetainedSizes[i] = Size.ZERO;
}
- inst.mRetainedSizes[inst.mHeap.getIndex()] =
- inst.mRetainedSizes[inst.mHeap.getIndex()].plus(inst.mSize);
+ if (!(inst instanceof SuperRoot)) {
+ inst.mRetainedSizes[inst.mHeap.getIndex()] =
+ inst.mRetainedSizes[inst.mHeap.getIndex()].plus(inst.mSize);
+ }
deque.push(inst);
for (AhatInstance dominated : inst.mDominated) {
deque.push(dominated);
@@ -501,4 +518,25 @@
}
}
}
+
+ @Override
+ public void setDominatorsComputationState(Object state) {
+ mDominatorsComputationState = state;
+ }
+
+ @Override
+ public Object getDominatorsComputationState() {
+ return mDominatorsComputationState;
+ }
+
+ @Override
+ public Iterable<? extends DominatorsComputation.Node> getReferencesForDominators() {
+ return new DominatorReferenceIterator(getReferences());
+ }
+
+ @Override
+ public void setDominator(DominatorsComputation.Node dominator) {
+ mImmediateDominator = (AhatInstance)dominator;
+ mImmediateDominator.mDominated.add(this);
+ }
}
diff --git a/tools/ahat/src/heapdump/AhatPlaceHolderInstance.java b/tools/ahat/src/heapdump/AhatPlaceHolderInstance.java
index 4aac804..d797b11 100644
--- a/tools/ahat/src/heapdump/AhatPlaceHolderInstance.java
+++ b/tools/ahat/src/heapdump/AhatPlaceHolderInstance.java
@@ -16,6 +16,9 @@
package com.android.ahat.heapdump;
+import java.util.Collections;
+import java.util.List;
+
/**
* Generic PlaceHolder instance to take the place of a real AhatInstance for
* the purposes of displaying diffs.
@@ -60,4 +63,10 @@
@Override public boolean isPlaceHolder() {
return true;
}
+
+ @Override
+ ReferenceIterator getReferences() {
+ List<Reference> refs = Collections.emptyList();
+ return new ReferenceIterator(refs);
+ }
}
diff --git a/tools/ahat/src/heapdump/AhatSnapshot.java b/tools/ahat/src/heapdump/AhatSnapshot.java
index 35d6c8a..7df78c5 100644
--- a/tools/ahat/src/heapdump/AhatSnapshot.java
+++ b/tools/ahat/src/heapdump/AhatSnapshot.java
@@ -16,6 +16,7 @@
package com.android.ahat.heapdump;
+import com.android.ahat.dominators.DominatorsComputation;
import com.android.tools.perflib.captures.DataBuffer;
import com.android.tools.perflib.captures.MemoryMappedFileBuffer;
import com.android.tools.perflib.heap.ArrayInstance;
@@ -42,7 +43,7 @@
private final Site mRootSite = new Site("ROOT");
// Collection of objects whose immediate dominator is the SENTINEL_ROOT.
- private final List<AhatInstance> mRooted = new ArrayList<AhatInstance>();
+ private final List<AhatInstance> mRooted;
// List of all ahat instances stored in increasing order by id.
private final List<AhatInstance> mInstances = new ArrayList<AhatInstance>();
@@ -80,7 +81,6 @@
*/
private AhatSnapshot(DataBuffer buffer, ProguardMap map) throws IOException {
Snapshot snapshot = Snapshot.createSnapshot(buffer, map);
- snapshot.computeDominators();
// Properly label the class of class objects in the perflib snapshot.
final ClassObj javaLangClass = snapshot.findClass("java.lang.Class");
@@ -139,46 +139,45 @@
// and instances.
for (AhatInstance ahat : mInstances) {
Instance inst = snapshot.findInstance(ahat.getId());
- ahat.initialize(this, inst);
- Long registeredNativeSize = registeredNative.get(inst);
- if (registeredNativeSize != null) {
- ahat.addRegisteredNativeSize(registeredNativeSize);
- }
-
- if (inst.getImmediateDominator() == Snapshot.SENTINEL_ROOT) {
- mRooted.add(ahat);
- }
-
- if (inst.isReachable()) {
- ahat.getHeap().addToSize(ahat.getSize());
- }
-
- // Update sites.
StackFrame[] frames = null;
StackTrace stack = inst.getStack();
if (stack != null) {
frames = stack.getFrames();
}
Site site = mRootSite.add(frames, frames == null ? 0 : frames.length, ahat);
- ahat.setSite(site);
+ ahat.initialize(this, inst, site);
+
+ Long registeredNativeSize = registeredNative.get(inst);
+ if (registeredNativeSize != null) {
+ ahat.addRegisteredNativeSize(registeredNativeSize);
+ }
}
// Record the roots and their types.
+ SuperRoot superRoot = new SuperRoot();
for (RootObj root : snapshot.getGCRoots()) {
Instance inst = root.getReferredInstance();
if (inst != null) {
- findInstance(inst.getId()).addRootType(root.getRootType().toString());
+ AhatInstance ahat = findInstance(inst.getId());
+ if (!ahat.isRoot()) {
+ superRoot.addRoot(ahat);
+ }
+ ahat.addRootType(root.getRootType().toString());
}
}
snapshot.dispose();
- // Compute the retained sizes of objects. We do this explicitly now rather
- // than relying on the retained sizes computed by perflib so that
- // registered native sizes are included.
- for (AhatInstance inst : mRooted) {
- AhatInstance.computeRetainedSize(inst, mHeaps.size());
+ AhatInstance.computeReverseReferences(superRoot);
+ DominatorsComputation.computeDominators(superRoot);
+ AhatInstance.computeRetainedSize(superRoot, mHeaps.size());
+
+ mRooted = superRoot.getDominated();
+ for (AhatHeap heap : mHeaps) {
+ heap.addToSize(superRoot.getRetainedSize(heap));
}
+
+ mRootSite.computeObjectsInfos(mHeaps.size());
}
/**
diff --git a/tools/ahat/src/heapdump/DominatorReferenceIterator.java b/tools/ahat/src/heapdump/DominatorReferenceIterator.java
new file mode 100644
index 0000000..ce2e6ef
--- /dev/null
+++ b/tools/ahat/src/heapdump/DominatorReferenceIterator.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.ahat.heapdump;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+/**
+ * Reference iterator used for the dominators computation.
+ * This visits only strong references.
+ */
+class DominatorReferenceIterator implements Iterator<AhatInstance>,
+ Iterable<AhatInstance> {
+ private ReferenceIterator mIter;
+ private AhatInstance mNext;
+
+ public DominatorReferenceIterator(ReferenceIterator iter) {
+ mIter = iter;
+ mNext = null;
+ }
+
+ @Override
+ public boolean hasNext() {
+ while (mNext == null && mIter.hasNext()) {
+ Reference ref = mIter.next();
+ if (ref.strong) {
+ mNext = ref.ref;
+ }
+ }
+ return mNext != null;
+ }
+
+ @Override
+ public AhatInstance next() {
+ if (hasNext()) {
+ AhatInstance next = mNext;
+ mNext = null;
+ return next;
+ }
+ throw new NoSuchElementException();
+ }
+
+ @Override
+ public Iterator<AhatInstance> iterator() {
+ return this;
+ }
+}
diff --git a/tools/ahat/src/heapdump/Reference.java b/tools/ahat/src/heapdump/Reference.java
new file mode 100644
index 0000000..980f278
--- /dev/null
+++ b/tools/ahat/src/heapdump/Reference.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.ahat.heapdump;
+
+/**
+ * Reference represents a reference from 'src' to 'ref' through 'field'.
+ * Field is a string description for human consumption. This is typically
+ * either "." followed by the field name or an array subscript such as "[4]".
+ * 'strong' is true if this is a strong reference, false if it is a
+ * weak/soft/other reference.
+ */
+public class Reference {
+ public final AhatInstance src;
+ public final String field;
+ public final AhatInstance ref;
+ public final boolean strong;
+
+ public Reference(AhatInstance src, String field, AhatInstance ref, boolean strong) {
+ this.src = src;
+ this.field = field;
+ this.ref = ref;
+ this.strong = strong;
+ }
+}
diff --git a/tools/ahat/src/heapdump/ReferenceIterator.java b/tools/ahat/src/heapdump/ReferenceIterator.java
new file mode 100644
index 0000000..a707fb2
--- /dev/null
+++ b/tools/ahat/src/heapdump/ReferenceIterator.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.ahat.heapdump;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+class ReferenceIterator implements Iterator<Reference>,
+ Iterable<Reference> {
+ private List<Reference> mRefs;
+ private int mLength;
+ private int mNextIndex;
+ private Reference mNext;
+
+ /**
+ * Construct a ReferenceIterator that iterators over the given list of
+ * references. Elements of the given list of references may be null, in
+ * which case the ReferenceIterator will skip over them.
+ */
+ public ReferenceIterator(List<Reference> refs) {
+ mRefs = refs;
+ mLength = refs.size();
+ mNextIndex = 0;
+ mNext = null;
+ }
+
+ @Override
+ public boolean hasNext() {
+ while (mNext == null && mNextIndex < mLength) {
+ mNext = mRefs.get(mNextIndex);
+ mNextIndex++;
+ }
+ return mNext != null;
+ }
+
+ @Override
+ public Reference next() {
+ if (!hasNext()) {
+ throw new NoSuchElementException();
+ }
+ Reference next = mNext;
+ mNext = null;
+ return next;
+ }
+
+ @Override
+ public Iterator<Reference> iterator() {
+ return this;
+ }
+}
diff --git a/tools/ahat/src/heapdump/Site.java b/tools/ahat/src/heapdump/Site.java
index fdd4eea..f0fc5d2 100644
--- a/tools/ahat/src/heapdump/Site.java
+++ b/tools/ahat/src/heapdump/Site.java
@@ -42,15 +42,15 @@
private int mDepth;
// The total size of objects allocated in this site (including child sites),
- // organized by heap index. Heap indices outside the range of mSizesByHeap
- // implicitly have size 0.
+ // organized by heap index. Computed as part of computeObjectsInfos.
private Size[] mSizesByHeap;
// List of child sites.
private List<Site> mChildren;
- // List of all objects allocated in this site (including child sites).
+ // List of objects allocated at this site (not including child sites).
private List<AhatInstance> mObjects;
+
private List<ObjectsInfo> mObjectsInfos;
private Map<AhatHeap, Map<AhatClassObj, ObjectsInfo>> mObjectsInfoMap;
@@ -111,7 +111,6 @@
mLineNumber = line;
mId = id;
mDepth = depth;
- mSizesByHeap = new Size[0];
mChildren = new ArrayList<Site>();
mObjects = new ArrayList<AhatInstance>();
mObjectsInfos = new ArrayList<ObjectsInfo>();
@@ -130,67 +129,102 @@
}
private static Site add(Site site, StackFrame[] frames, int depth, AhatInstance inst) {
- while (true) {
- site.mObjects.add(inst);
+ while (depth > 0) {
+ StackFrame next = frames[depth - 1];
+ Site child = null;
+ for (int i = 0; i < site.mChildren.size(); i++) {
+ Site curr = site.mChildren.get(i);
+ if (curr.mLineNumber == next.getLineNumber()
+ && curr.mMethodName.equals(next.getMethodName())
+ && curr.mSignature.equals(next.getSignature())
+ && curr.mFilename.equals(next.getFilename())) {
+ child = curr;
+ break;
+ }
+ }
+ if (child == null) {
+ child = new Site(site, next.getMethodName(), next.getSignature(),
+ next.getFilename(), next.getLineNumber(), inst.getId(), depth - 1);
+ site.mChildren.add(child);
+ }
+ depth = depth - 1;
+ site = child;
+ }
+ site.mObjects.add(inst);
+ return site;
+ }
- ObjectsInfo info = site.getObjectsInfo(inst.getHeap(), inst.getClassObj());
+ /**
+ * Recompute the ObjectsInfos for this and all child sites.
+ * This should be done after the sites tree has been formed. It should also
+ * be done after dominators computation has been performed to ensure only
+ * reachable objects are included in the ObjectsInfos.
+ *
+ * @param numHeaps - The number of heaps in the heap dump.
+ */
+ void computeObjectsInfos(int numHeaps) {
+ // Count up the total sizes by heap.
+ mSizesByHeap = new Size[numHeaps];
+ for (int i = 0; i < numHeaps; ++i) {
+ mSizesByHeap[i] = Size.ZERO;
+ }
+
+ // Add all reachable objects allocated at this site.
+ for (AhatInstance inst : mObjects) {
if (inst.isReachable()) {
AhatHeap heap = inst.getHeap();
- if (heap.getIndex() >= site.mSizesByHeap.length) {
- Size[] newSizes = new Size[heap.getIndex() + 1];
- for (int i = 0; i < site.mSizesByHeap.length; i++) {
- newSizes[i] = site.mSizesByHeap[i];
- }
- for (int i = site.mSizesByHeap.length; i < heap.getIndex() + 1; i++) {
- newSizes[i] = Size.ZERO;
- }
- site.mSizesByHeap = newSizes;
- }
- site.mSizesByHeap[heap.getIndex()]
- = site.mSizesByHeap[heap.getIndex()].plus(inst.getSize());
-
+ Size size = inst.getSize();
+ ObjectsInfo info = getObjectsInfo(heap, inst.getClassObj());
info.numInstances++;
- info.numBytes = info.numBytes.plus(inst.getSize());
+ info.numBytes = info.numBytes.plus(size);
+ mSizesByHeap[heap.getIndex()] = mSizesByHeap[heap.getIndex()].plus(size);
}
+ }
- if (depth > 0) {
- StackFrame next = frames[depth - 1];
- Site child = null;
- for (int i = 0; i < site.mChildren.size(); i++) {
- Site curr = site.mChildren.get(i);
- if (curr.mLineNumber == next.getLineNumber()
- && curr.mMethodName.equals(next.getMethodName())
- && curr.mSignature.equals(next.getSignature())
- && curr.mFilename.equals(next.getFilename())) {
- child = curr;
- break;
- }
- }
- if (child == null) {
- child = new Site(site, next.getMethodName(), next.getSignature(),
- next.getFilename(), next.getLineNumber(), inst.getId(), depth - 1);
- site.mChildren.add(child);
- }
- depth = depth - 1;
- site = child;
- } else {
- return site;
+ // Add objects allocated in child sites.
+ for (Site child : mChildren) {
+ child.computeObjectsInfos(numHeaps);
+ for (ObjectsInfo childInfo : child.mObjectsInfos) {
+ ObjectsInfo info = getObjectsInfo(childInfo.heap, childInfo.classObj);
+ info.numInstances += childInfo.numInstances;
+ info.numBytes = info.numBytes.plus(childInfo.numBytes);
+ }
+ for (int i = 0; i < numHeaps; ++i) {
+ mSizesByHeap[i] = mSizesByHeap[i].plus(child.mSizesByHeap[i]);
}
}
}
// Get the size of a site for a specific heap.
public Size getSize(AhatHeap heap) {
- int index = heap.getIndex();
- return index >= 0 && index < mSizesByHeap.length ? mSizesByHeap[index] : Size.ZERO;
+ return mSizesByHeap[heap.getIndex()];
}
/**
- * Get the list of objects allocated under this site. Includes objects
- * allocated in children sites.
+ * Collect the objects allocated under this site, optionally filtered by
+ * heap name or class name. Includes objects allocated in children sites.
+ * @param heapName - The name of the heap the collected objects should
+ * belong to. This may be null to indicate objects of
+ * every heap should be collected.
+ * @param className - The name of the class the collected objects should
+ * belong to. This may be null to indicate objects of
+ * every class should be collected.
+ * @param objects - Out parameter. A collection of objects that all
+ * collected objects should be added to.
*/
- public Collection<AhatInstance> getObjects() {
- return mObjects;
+ public void getObjects(String heapName, String className, Collection<AhatInstance> objects) {
+ for (AhatInstance inst : mObjects) {
+ if ((heapName == null || inst.getHeap().getName().equals(heapName))
+ && (className == null || inst.getClassName().equals(className))) {
+ objects.add(inst);
+ }
+ }
+
+ // Recursively visit children. Recursion should be okay here because the
+ // stack depth is limited by a reasonable amount (128 frames or so).
+ for (Site child : mChildren) {
+ child.getObjects(heapName, className, objects);
+ }
}
/**
@@ -220,8 +254,8 @@
// Get the combined size of the site for all heaps.
public Size getTotalSize() {
Size total = Size.ZERO;
- for (int i = 0; i < mSizesByHeap.length; i++) {
- total = total.plus(mSizesByHeap[i]);
+ for (Size size : mSizesByHeap) {
+ total = total.plus(size);
}
return total;
}
diff --git a/tools/ahat/src/heapdump/SuperRoot.java b/tools/ahat/src/heapdump/SuperRoot.java
new file mode 100644
index 0000000..54410cf
--- /dev/null
+++ b/tools/ahat/src/heapdump/SuperRoot.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.ahat.heapdump;
+
+import com.android.ahat.dominators.DominatorsComputation;
+import java.util.AbstractList;
+import java.util.ArrayList;
+import java.util.List;
+
+public class SuperRoot extends AhatInstance implements DominatorsComputation.Node {
+ private List<AhatInstance> mRoots = new ArrayList<AhatInstance>();
+ private Object mDominatorsComputationState;
+
+ public SuperRoot() {
+ super(0);
+ }
+
+ public void addRoot(AhatInstance root) {
+ mRoots.add(root);
+ }
+
+ @Override
+ public String toString() {
+ return "SUPER_ROOT";
+ }
+
+ @Override
+ ReferenceIterator getReferences() {
+ List<Reference> refs = new AbstractList<Reference>() {
+ @Override
+ public int size() {
+ return mRoots.size();
+ }
+
+ @Override
+ public Reference get(int index) {
+ String field = ".roots[" + Integer.toString(index) + "]";
+ return new Reference(null, field, mRoots.get(index), true);
+ }
+ };
+ return new ReferenceIterator(refs);
+ }
+}
diff --git a/tools/ahat/test-dump/Main.java b/tools/ahat/test-dump/Main.java
index 3d3de78..13fd102 100644
--- a/tools/ahat/test-dump/Main.java
+++ b/tools/ahat/test-dump/Main.java
@@ -60,6 +60,14 @@
public StackSmasher child;
}
+ public static class Reference {
+ public Object referent;
+
+ public Reference(Object referent) {
+ this.referent = referent;
+ }
+ }
+
// We will take a heap dump that includes a single instance of this
// DumpedStuff class. Objects stored as fields in this class can be easily
// found in the hprof dump by searching for the instance of the DumpedStuff
@@ -71,6 +79,7 @@
public char[] charArray = "char thing".toCharArray();
public String nullString = null;
public Object anObject = new Object();
+ public Reference aReference = new Reference(anObject);
public ReferenceQueue<Object> referenceQueue = new ReferenceQueue<Object>();
public PhantomReference aPhantomReference = new PhantomReference(anObject, referenceQueue);
public WeakReference aWeakReference = new WeakReference(anObject, referenceQueue);
diff --git a/tools/ahat/test/DominatorsTest.java b/tools/ahat/test/DominatorsTest.java
new file mode 100644
index 0000000..0424e10
--- /dev/null
+++ b/tools/ahat/test/DominatorsTest.java
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.ahat;
+
+import com.android.ahat.dominators.DominatorsComputation;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+
+public class DominatorsTest {
+ private static class Node implements DominatorsComputation.Node {
+ public String name;
+ public List<Node> depends = new ArrayList<Node>();
+ public Node dominator;
+ private Object dominatorsComputationState;
+
+ public Node(String name) {
+ this.name = name;
+ }
+
+ public void computeDominators() {
+ DominatorsComputation.computeDominators(this);
+ }
+
+ public String toString() {
+ return name;
+ }
+
+ @Override
+ public void setDominatorsComputationState(Object state) {
+ dominatorsComputationState = state;
+ }
+
+ @Override
+ public Object getDominatorsComputationState() {
+ return dominatorsComputationState;
+ }
+
+ @Override
+ public Collection<Node> getReferencesForDominators() {
+ return depends;
+ }
+
+ @Override
+ public void setDominator(DominatorsComputation.Node dominator) {
+ this.dominator = (Node)dominator;
+ }
+ }
+
+ @Test
+ public void singleNode() {
+ // --> n
+ // Trivial case.
+ Node n = new Node("n");
+ n.computeDominators();
+ }
+
+ @Test
+ public void parentWithChild() {
+ // --> parent --> child
+ // The child node is dominated by the parent.
+ Node parent = new Node("parent");
+ Node child = new Node("child");
+ parent.depends = Arrays.asList(child);
+
+ parent.computeDominators();
+ assertEquals(parent, child.dominator);
+ }
+
+ @Test
+ public void reachableTwoWays() {
+ // /-> right -->\
+ // --> parent child
+ // \-> left --->/
+ // The child node can be reached either by right or by left.
+ Node parent = new Node("parent");
+ Node right = new Node("right");
+ Node left = new Node("left");
+ Node child = new Node("child");
+ parent.depends = Arrays.asList(left, right);
+ right.depends = Arrays.asList(child);
+ left.depends = Arrays.asList(child);
+
+ parent.computeDominators();
+ assertEquals(parent, left.dominator);
+ assertEquals(parent, right.dominator);
+ assertEquals(parent, child.dominator);
+ }
+
+ @Test
+ public void reachableDirectAndIndirect() {
+ // /-> right -->\
+ // --> parent -----------> child
+ // The child node can be reached either by right or parent.
+ Node parent = new Node("parent");
+ Node right = new Node("right");
+ Node child = new Node("child");
+ parent.depends = Arrays.asList(right, child);
+ right.depends = Arrays.asList(child);
+
+ parent.computeDominators();
+ assertEquals(parent, child.dominator);
+ assertEquals(parent, right.dominator);
+ }
+
+ @Test
+ public void subDominator() {
+ // --> parent --> middle --> child
+ // The child is dominated by an internal node.
+ Node parent = new Node("parent");
+ Node middle = new Node("middle");
+ Node child = new Node("child");
+ parent.depends = Arrays.asList(middle);
+ middle.depends = Arrays.asList(child);
+
+ parent.computeDominators();
+ assertEquals(parent, middle.dominator);
+ assertEquals(middle, child.dominator);
+ }
+
+ @Test
+ public void childSelfLoop() {
+ // --> parent --> child -\
+ // \<---/
+ // The child points back to itself.
+ Node parent = new Node("parent");
+ Node child = new Node("child");
+ parent.depends = Arrays.asList(child);
+ child.depends = Arrays.asList(child);
+
+ parent.computeDominators();
+ assertEquals(parent, child.dominator);
+ }
+
+ @Test
+ public void singleEntryLoop() {
+ // --> parent --> a --> b --> c -\
+ // \<------------/
+ // There is a loop in the graph, with only one way into the loop.
+ Node parent = new Node("parent");
+ Node a = new Node("a");
+ Node b = new Node("b");
+ Node c = new Node("c");
+ parent.depends = Arrays.asList(a);
+ a.depends = Arrays.asList(b);
+ b.depends = Arrays.asList(c);
+ c.depends = Arrays.asList(a);
+
+ parent.computeDominators();
+ assertEquals(parent, a.dominator);
+ assertEquals(a, b.dominator);
+ assertEquals(b, c.dominator);
+ }
+
+ @Test
+ public void multiEntryLoop() {
+ // --> parent --> right --> a --> b ----\
+ // \ \<-- c <---/
+ // \--> left --->--------/
+ // There is a loop in the graph, with two different ways to enter the
+ // loop.
+ Node parent = new Node("parent");
+ Node left = new Node("left");
+ Node right = new Node("right");
+ Node a = new Node("a");
+ Node b = new Node("b");
+ Node c = new Node("c");
+ parent.depends = Arrays.asList(left, right);
+ right.depends = Arrays.asList(a);
+ left.depends = Arrays.asList(c);
+ a.depends = Arrays.asList(b);
+ b.depends = Arrays.asList(c);
+ c.depends = Arrays.asList(a);
+
+ parent.computeDominators();
+ assertEquals(parent, right.dominator);
+ assertEquals(parent, left.dominator);
+ assertEquals(parent, a.dominator);
+ assertEquals(parent, c.dominator);
+ assertEquals(a, b.dominator);
+ }
+
+ @Test
+ public void dominatorOverwrite() {
+ // /---------> right <--\
+ // --> parent --> child <--/ /
+ // \---> left ---------/
+ // Test a strange case where we have had trouble in the past with a
+ // dominator getting improperly overwritten. The relevant features of this
+ // case are: 'child' is visited after 'right', 'child' is dominated by
+ // 'parent', and 'parent' revisits 'right' after visiting 'child'.
+ Node parent = new Node("parent");
+ Node right = new Node("right");
+ Node left = new Node("left");
+ Node child = new Node("child");
+ parent.depends = Arrays.asList(left, child, right);
+ left.depends = Arrays.asList(right);
+ right.depends = Arrays.asList(child);
+
+ parent.computeDominators();
+ assertEquals(parent, left.dominator);
+ assertEquals(parent, child.dominator);
+ assertEquals(parent, right.dominator);
+ }
+
+ @Test
+ public void stackOverflow() {
+ // --> a --> b --> ... --> N
+ // Verify we don't smash the stack for deep chains.
+ Node root = new Node("root");
+ Node curr = root;
+ for (int i = 0; i < 10000; ++i) {
+ Node node = new Node("n" + i);
+ curr.depends.add(node);
+ curr = node;
+ }
+
+ root.computeDominators();
+ }
+
+ @Test
+ public void hiddenRevisit() {
+ // /-> left ---->---------\
+ // --> parent \---> a --> b --> c
+ // \-> right -/
+ // Test a case we have had trouble in the past.
+ // When a's dominator is updated from left to parent, that should trigger
+ // all reachable children's dominators to be updated too. In particular,
+ // c's dominator should be updated, even though b's dominator is
+ // unchanged.
+ Node parent = new Node("parent");
+ Node right = new Node("right");
+ Node left = new Node("left");
+ Node a = new Node("a");
+ Node b = new Node("b");
+ Node c = new Node("c");
+ parent.depends = Arrays.asList(right, left);
+ left.depends = Arrays.asList(a, c);
+ right.depends = Arrays.asList(a);
+ a.depends = Arrays.asList(b);
+ b.depends = Arrays.asList(c);
+
+ parent.computeDominators();
+ assertEquals(parent, left.dominator);
+ assertEquals(parent, right.dominator);
+ assertEquals(parent, a.dominator);
+ assertEquals(parent, c.dominator);
+ assertEquals(a, b.dominator);
+ }
+
+ @Test
+ public void preUndominatedUpdate() {
+ // /--------->--------\
+ // / /---->----\
+ // --> p -> a --> b --> c --> d --> e
+ // \---------->----------/
+ // Test a case we have had trouble in the past.
+ // The candidate dominator for e is revised from d to a, then d is shown
+ // to be reachable from p. Make sure that causes e's dominator to be
+ // refined again from a to p. The extra nodes are there to ensure the
+ // necessary scheduling to expose the bug we had.
+ Node p = new Node("p");
+ Node a = new Node("a");
+ Node b = new Node("b");
+ Node c = new Node("c");
+ Node d = new Node("d");
+ Node e = new Node("e");
+ p.depends = Arrays.asList(d, a);
+ a.depends = Arrays.asList(e, b);
+ b.depends = Arrays.asList(d, c);
+ c.depends = Arrays.asList(d);
+ d.depends = Arrays.asList(e);
+
+ p.computeDominators();
+ assertEquals(p, a.dominator);
+ assertEquals(a, b.dominator);
+ assertEquals(b, c.dominator);
+ assertEquals(p, d.dominator);
+ assertEquals(p, e.dominator);
+ }
+}
diff --git a/tools/ahat/test/InstanceTest.java b/tools/ahat/test/InstanceTest.java
index 71b081c..f0e7f44 100644
--- a/tools/ahat/test/InstanceTest.java
+++ b/tools/ahat/test/InstanceTest.java
@@ -337,7 +337,7 @@
public void classObjToString() throws IOException {
TestDump dump = TestDump.getTestDump();
AhatInstance obj = dump.getAhatSnapshot().findClass("Main");
- assertEquals("Main", obj.toString());
+ assertEquals("class Main", obj.toString());
}
@Test
@@ -370,6 +370,18 @@
}
@Test
+ public void reverseReferences() throws IOException {
+ TestDump dump = TestDump.getTestDump();
+ AhatInstance obj = dump.getDumpedAhatInstance("anObject");
+ AhatInstance ref = dump.getDumpedAhatInstance("aReference");
+ AhatInstance weak = dump.getDumpedAhatInstance("aWeakReference");
+ assertTrue(obj.getHardReverseReferences().contains(ref));
+ assertFalse(obj.getHardReverseReferences().contains(weak));
+ assertFalse(obj.getSoftReverseReferences().contains(ref));
+ assertTrue(obj.getSoftReverseReferences().contains(weak));
+ }
+
+ @Test
public void asStringEmbedded() throws IOException {
// Set up a heap dump with an instance of java.lang.String of
// "hello" with instance id 0x42 that is backed by a char array that is
diff --git a/tools/ahat/test/Tests.java b/tools/ahat/test/Tests.java
index a95788e..a1e3246 100644
--- a/tools/ahat/test/Tests.java
+++ b/tools/ahat/test/Tests.java
@@ -24,6 +24,7 @@
args = new String[]{
"com.android.ahat.DiffFieldsTest",
"com.android.ahat.DiffTest",
+ "com.android.ahat.DominatorsTest",
"com.android.ahat.InstanceTest",
"com.android.ahat.NativeAllocationTest",
"com.android.ahat.ObjectHandlerTest",
diff --git a/tools/art b/tools/art
index 077dc4a..bc0c85e 100644
--- a/tools/art
+++ b/tools/art
@@ -278,7 +278,7 @@
-Xps-profile-path:$PROFILE_PATH \
-Xusejit:true \
"${ARGS_WITH_QUICKEN[@]}" \
- "&>" "$ANDROID_DATA/profile_gen.log"
+ &> "$ANDROID_DATA/profile_gen.log"
EXIT_STATUS=$?
if [ $EXIT_STATUS != 0 ]; then
diff --git a/tools/cpp-define-generator/constant_dexcache.def b/tools/cpp-define-generator/constant_dexcache.def
index ede16d2..743ebb7 100644
--- a/tools/cpp-define-generator/constant_dexcache.def
+++ b/tools/cpp-define-generator/constant_dexcache.def
@@ -25,4 +25,8 @@
DEFINE_EXPR(STRING_DEX_CACHE_HASH_BITS, int32_t,
art::LeastSignificantBit(art::mirror::DexCache::kDexCacheStringCacheSize))
DEFINE_EXPR(STRING_DEX_CACHE_ELEMENT_SIZE, int32_t,
- sizeof(art::mirror::StringDexCachePair))
\ No newline at end of file
+ sizeof(art::mirror::StringDexCachePair))
+DEFINE_EXPR(METHOD_DEX_CACHE_SIZE_MINUS_ONE, int32_t,
+ art::mirror::DexCache::kDexCacheMethodCacheSize - 1)
+DEFINE_EXPR(METHOD_DEX_CACHE_HASH_BITS, int32_t,
+ art::LeastSignificantBit(art::mirror::DexCache::kDexCacheMethodCacheSize))