Merge "MIPS: Follow-up to hash-based DexCache methods array"
diff --git a/compiler/optimizing/scheduler_arm.cc b/compiler/optimizing/scheduler_arm.cc
index 627ab4e..f025c0a 100644
--- a/compiler/optimizing/scheduler_arm.cc
+++ b/compiler/optimizing/scheduler_arm.cc
@@ -167,22 +167,346 @@
HandleShiftLatencies(instr);
}
-void SchedulingLatencyVisitorARM::VisitCondition(HCondition* instr) {
- switch (instr->GetLeft()->GetType()) {
- case Primitive::kPrimLong:
- last_visited_internal_latency_ = 4 * kArmIntegerOpLatency;
+void SchedulingLatencyVisitorARM::HandleGenerateConditionWithZero(IfCondition condition) {
+ switch (condition) {
+ case kCondEQ:
+ case kCondBE:
+ case kCondNE:
+ case kCondA:
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ last_visited_latency_ = kArmIntegerOpLatency;
break;
- case Primitive::kPrimFloat:
- case Primitive::kPrimDouble:
- last_visited_internal_latency_ = 2 * kArmFloatingPointOpLatency;
+ case kCondGE:
+ // Mvn
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ FALLTHROUGH_INTENDED;
+ case kCondLT:
+ // Lsr
+ last_visited_latency_ = kArmIntegerOpLatency;
+ break;
+ case kCondAE:
+ // Trivially true.
+ // Mov
+ last_visited_latency_ = kArmIntegerOpLatency;
+ break;
+ case kCondB:
+ // Trivially false.
+ // Mov
+ last_visited_latency_ = kArmIntegerOpLatency;
break;
default:
- last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
- break;
+ LOG(FATAL) << "Unexpected condition " << condition;
+ UNREACHABLE();
}
+}
+
+void SchedulingLatencyVisitorARM::HandleGenerateLongTestConstant(HCondition* condition) {
+ DCHECK_EQ(condition->GetLeft()->GetType(), Primitive::kPrimLong);
+
+ IfCondition cond = condition->GetCondition();
+
+ HInstruction* right = condition->InputAt(1);
+
+ int64_t value = Uint64ConstantFrom(right);
+
+ // Comparisons against 0 are common enough, so codegen has special handling for them.
+ if (value == 0) {
+ switch (cond) {
+ case kCondNE:
+ case kCondA:
+ case kCondEQ:
+ case kCondBE:
+ // Orrs
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ return;
+ case kCondLT:
+ case kCondGE:
+ // Cmp
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ return;
+ case kCondB:
+ case kCondAE:
+ // Cmp
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ return;
+ default:
+ break;
+ }
+ }
+
+ switch (cond) {
+ case kCondEQ:
+ case kCondNE:
+ case kCondB:
+ case kCondBE:
+ case kCondA:
+ case kCondAE: {
+ // Cmp, IT, Cmp
+ last_visited_internal_latency_ += 3 * kArmIntegerOpLatency;
+ break;
+ }
+ case kCondLE:
+ case kCondGT:
+ // Trivially true or false.
+ if (value == std::numeric_limits<int64_t>::max()) {
+ // Cmp
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ break;
+ }
+ FALLTHROUGH_INTENDED;
+ case kCondGE:
+ case kCondLT: {
+ // Cmp, Sbcs
+ last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
+ break;
+ }
+ default:
+ LOG(FATAL) << "Unreachable";
+ UNREACHABLE();
+ }
+}
+
+void SchedulingLatencyVisitorARM::HandleGenerateLongTest(HCondition* condition) {
+ DCHECK_EQ(condition->GetLeft()->GetType(), Primitive::kPrimLong);
+
+ IfCondition cond = condition->GetCondition();
+
+ switch (cond) {
+ case kCondEQ:
+ case kCondNE:
+ case kCondB:
+ case kCondBE:
+ case kCondA:
+ case kCondAE: {
+ // Cmp, IT, Cmp
+ last_visited_internal_latency_ += 3 * kArmIntegerOpLatency;
+ break;
+ }
+ case kCondLE:
+ case kCondGT:
+ case kCondGE:
+ case kCondLT: {
+ // Cmp, Sbcs
+ last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
+ break;
+ }
+ default:
+ LOG(FATAL) << "Unreachable";
+ UNREACHABLE();
+ }
+}
+
+// The GenerateTest series of function all counted as internal latency.
+void SchedulingLatencyVisitorARM::HandleGenerateTest(HCondition* condition) {
+ const Primitive::Type type = condition->GetLeft()->GetType();
+
+ if (type == Primitive::kPrimLong) {
+ condition->InputAt(1)->IsConstant()
+ ? HandleGenerateLongTestConstant(condition)
+ : HandleGenerateLongTest(condition);
+ } else if (Primitive::IsFloatingPointType(type)) {
+ // GenerateVcmp + Vmrs
+ last_visited_internal_latency_ += 2 * kArmFloatingPointOpLatency;
+ } else {
+ // Cmp
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ }
+}
+
+bool SchedulingLatencyVisitorARM::CanGenerateTest(HCondition* condition) {
+ if (condition->GetLeft()->GetType() == Primitive::kPrimLong) {
+ HInstruction* right = condition->InputAt(1);
+
+ if (right->IsConstant()) {
+ IfCondition c = condition->GetCondition();
+ const uint64_t value = Uint64ConstantFrom(right);
+
+ if (c < kCondLT || c > kCondGE) {
+ if (value != 0) {
+ return false;
+ }
+ } else if (c == kCondLE || c == kCondGT) {
+ if (value < std::numeric_limits<int64_t>::max() &&
+ !codegen_->GetAssembler()->ShifterOperandCanHold(SBC, High32Bits(value + 1), kCcSet)) {
+ return false;
+ }
+ } else if (!codegen_->GetAssembler()->ShifterOperandCanHold(SBC, High32Bits(value), kCcSet)) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+}
+
+void SchedulingLatencyVisitorARM::HandleGenerateConditionGeneric(HCondition* cond) {
+ HandleGenerateTest(cond);
+
+ // Unlike codegen pass, we cannot check 'out' register IsLow() here,
+ // because scheduling is before liveness(location builder) and register allocator,
+ // so we can only choose to follow one path of codegen by assuming otu.IsLow() is true.
+ last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
last_visited_latency_ = kArmIntegerOpLatency;
}
+void SchedulingLatencyVisitorARM::HandleGenerateEqualLong(HCondition* cond) {
+ DCHECK_EQ(cond->GetLeft()->GetType(), Primitive::kPrimLong);
+
+ IfCondition condition = cond->GetCondition();
+
+ last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
+
+ if (condition == kCondNE) {
+ // Orrs, IT, Mov
+ last_visited_internal_latency_ += 3 * kArmIntegerOpLatency;
+ } else {
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ HandleGenerateConditionWithZero(condition);
+ }
+}
+
+void SchedulingLatencyVisitorARM::HandleGenerateLongComparesAndJumps() {
+ last_visited_internal_latency_ += 4 * kArmIntegerOpLatency;
+ last_visited_internal_latency_ += kArmBranchLatency;
+}
+
+void SchedulingLatencyVisitorARM::HandleGenerateConditionLong(HCondition* cond) {
+ DCHECK_EQ(cond->GetLeft()->GetType(), Primitive::kPrimLong);
+
+ IfCondition condition = cond->GetCondition();
+ HInstruction* right = cond->InputAt(1);
+
+ if (right->IsConstant()) {
+ // Comparisons against 0 are common enough, so codegen has special handling for them.
+ if (Uint64ConstantFrom(right) == 0) {
+ switch (condition) {
+ case kCondNE:
+ case kCondA:
+ case kCondEQ:
+ case kCondBE:
+ // Orr
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ HandleGenerateConditionWithZero(condition);
+ return;
+ case kCondLT:
+ case kCondGE:
+ FALLTHROUGH_INTENDED;
+ case kCondAE:
+ case kCondB:
+ HandleGenerateConditionWithZero(condition);
+ return;
+ case kCondLE:
+ case kCondGT:
+ default:
+ break;
+ }
+ }
+ }
+
+ if ((condition == kCondEQ || condition == kCondNE) &&
+ !CanGenerateTest(cond)) {
+ HandleGenerateEqualLong(cond);
+ return;
+ }
+
+ if (CanGenerateTest(cond)) {
+ HandleGenerateConditionGeneric(cond);
+ return;
+ }
+
+ HandleGenerateLongComparesAndJumps();
+
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ last_visited_latency_ = kArmBranchLatency;;
+}
+
+void SchedulingLatencyVisitorARM::HandleGenerateConditionIntegralOrNonPrimitive(HCondition* cond) {
+ const Primitive::Type type = cond->GetLeft()->GetType();
+
+ DCHECK(Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) << type;
+
+ if (type == Primitive::kPrimLong) {
+ HandleGenerateConditionLong(cond);
+ return;
+ }
+
+ IfCondition condition = cond->GetCondition();
+ HInstruction* right = cond->InputAt(1);
+ int64_t value;
+
+ if (right->IsConstant()) {
+ value = Uint64ConstantFrom(right);
+
+ // Comparisons against 0 are common enough, so codegen has special handling for them.
+ if (value == 0) {
+ switch (condition) {
+ case kCondNE:
+ case kCondA:
+ case kCondEQ:
+ case kCondBE:
+ case kCondLT:
+ case kCondGE:
+ case kCondAE:
+ case kCondB:
+ HandleGenerateConditionWithZero(condition);
+ return;
+ case kCondLE:
+ case kCondGT:
+ default:
+ break;
+ }
+ }
+ }
+
+ if (condition == kCondEQ || condition == kCondNE) {
+ if (condition == kCondNE) {
+ // CMP, IT, MOV.ne
+ last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
+ last_visited_latency_ = kArmIntegerOpLatency;
+ } else {
+ last_visited_internal_latency_ += kArmIntegerOpLatency;
+ HandleGenerateConditionWithZero(condition);
+ }
+ return;
+ }
+
+ HandleGenerateConditionGeneric(cond);
+}
+
+void SchedulingLatencyVisitorARM::HandleCondition(HCondition* cond) {
+ if (cond->IsEmittedAtUseSite()) {
+ last_visited_latency_ = 0;
+ return;
+ }
+
+ const Primitive::Type type = cond->GetLeft()->GetType();
+
+ if (Primitive::IsFloatingPointType(type)) {
+ HandleGenerateConditionGeneric(cond);
+ return;
+ }
+
+ DCHECK(Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) << type;
+
+ const IfCondition condition = cond->GetCondition();
+
+ if (type == Primitive::kPrimBoolean &&
+ cond->GetRight()->GetType() == Primitive::kPrimBoolean &&
+ (condition == kCondEQ || condition == kCondNE)) {
+ if (condition == kCondEQ) {
+ last_visited_internal_latency_ = kArmIntegerOpLatency;
+ }
+ last_visited_latency_ = kArmIntegerOpLatency;
+ return;
+ }
+
+ HandleGenerateConditionIntegralOrNonPrimitive(cond);
+}
+
+void SchedulingLatencyVisitorARM::VisitCondition(HCondition* instr) {
+ HandleCondition(instr);
+}
+
void SchedulingLatencyVisitorARM::VisitCompare(HCompare* instr) {
Primitive::Type type = instr->InputAt(0)->GetType();
switch (type) {
diff --git a/compiler/optimizing/scheduler_arm.h b/compiler/optimizing/scheduler_arm.h
index a9f2295..fe274d2 100644
--- a/compiler/optimizing/scheduler_arm.h
+++ b/compiler/optimizing/scheduler_arm.h
@@ -109,6 +109,17 @@
#undef DECLARE_VISIT_INSTRUCTION
private:
+ bool CanGenerateTest(HCondition* cond);
+ void HandleGenerateConditionWithZero(IfCondition cond);
+ void HandleGenerateLongTestConstant(HCondition* cond);
+ void HandleGenerateLongTest(HCondition* cond);
+ void HandleGenerateLongComparesAndJumps();
+ void HandleGenerateTest(HCondition* cond);
+ void HandleGenerateConditionGeneric(HCondition* cond);
+ void HandleGenerateEqualLong(HCondition* cond);
+ void HandleGenerateConditionLong(HCondition* cond);
+ void HandleGenerateConditionIntegralOrNonPrimitive(HCondition* cond);
+ void HandleCondition(HCondition* instr);
void HandleBinaryOperationLantencies(HBinaryOperation* instr);
void HandleBitwiseOperationLantencies(HBinaryOperation* instr);
void HandleShiftLatencies(HBinaryOperation* instr);
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index dadea76..3cc41a6 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -1497,10 +1497,9 @@
Runtime* runtime = Runtime::Current();
CHECK(runtime != nullptr);
// Filter out class path classes since we don't want to include these in the image.
- std::set<DexCacheResolvedClasses> resolved_classes(
- profile_compilation_info_->GetResolvedClasses(dex_files_));
- image_classes_.reset(new std::unordered_set<std::string>(
- runtime->GetClassLinker()->GetClassDescriptorsForResolvedClasses(resolved_classes)));
+ image_classes_.reset(
+ new std::unordered_set<std::string>(
+ profile_compilation_info_->GetClassDescriptors(dex_files_)));
VLOG(compiler) << "Loaded " << image_classes_->size()
<< " image class descriptors from profile";
if (VLOG_IS_ON(compiler)) {
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 6133dd7..9e3e76c 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -5619,12 +5619,18 @@
return false;
}
// Verify
- if (super->IsFinal() || super->IsInterface()) {
+ if (super->IsFinal()) {
+ ThrowVerifyError(klass.Get(),
+ "Superclass %s of %s is declared final",
+ super->PrettyDescriptor().c_str(),
+ klass->PrettyDescriptor().c_str());
+ return false;
+ }
+ if (super->IsInterface()) {
ThrowIncompatibleClassChangeError(klass.Get(),
- "Superclass %s of %s is %s",
+ "Superclass %s of %s is an interface",
super->PrettyDescriptor().c_str(),
- klass->PrettyDescriptor().c_str(),
- super->IsFinal() ? "declared final" : "an interface");
+ klass->PrettyDescriptor().c_str());
return false;
}
if (!klass->CanAccess(super)) {
@@ -8980,51 +8986,6 @@
return ret;
}
-std::unordered_set<std::string> ClassLinker::GetClassDescriptorsForResolvedClasses(
- const std::set<DexCacheResolvedClasses>& classes) {
- ScopedTrace trace(__PRETTY_FUNCTION__);
- std::unordered_set<std::string> ret;
- Thread* const self = Thread::Current();
- std::unordered_map<std::string, const DexFile*> location_to_dex_file;
- ScopedObjectAccess soa(self);
- ScopedAssertNoThreadSuspension ants(__FUNCTION__);
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- for (const ClassLinker::DexCacheData& data : GetDexCachesData()) {
- if (!self->IsJWeakCleared(data.weak_root)) {
- ObjPtr<mirror::DexCache> dex_cache = soa.Decode<mirror::DexCache>(data.weak_root);
- if (dex_cache != nullptr) {
- const DexFile* dex_file = dex_cache->GetDexFile();
- // There could be duplicates if two dex files with the same location are mapped.
- location_to_dex_file.emplace(dex_file->GetLocation(), dex_file);
- }
- }
- }
- for (const DexCacheResolvedClasses& info : classes) {
- const std::string& location = info.GetDexLocation();
- auto found = location_to_dex_file.find(location);
- if (found != location_to_dex_file.end()) {
- const DexFile* dex_file = found->second;
- VLOG(profiler) << "Found opened dex file for " << dex_file->GetLocation() << " with "
- << info.GetClasses().size() << " classes";
- DCHECK_EQ(dex_file->GetLocationChecksum(), info.GetLocationChecksum());
- for (dex::TypeIndex type_idx : info.GetClasses()) {
- if (!dex_file->IsTypeIndexValid(type_idx)) {
- // Something went bad. The profile is probably corrupted. Abort and return an emtpy set.
- LOG(WARNING) << "Corrupted profile: invalid type index "
- << type_idx.index_ << " in dex " << location;
- return std::unordered_set<std::string>();
- }
- const DexFile::TypeId& type_id = dex_file->GetTypeId(type_idx);
- const char* descriptor = dex_file->GetTypeDescriptor(type_id);
- ret.insert(descriptor);
- }
- } else {
- VLOG(class_linker) << "Failed to find opened dex file for location " << location;
- }
- }
- return ret;
-}
-
class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
public:
FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 4a99c66..cb28187 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -614,11 +614,6 @@
std::set<DexCacheResolvedClasses> GetResolvedClasses(bool ignore_boot_classes)
REQUIRES(!Locks::dex_lock_);
- // Returns the class descriptors for loaded dex files.
- std::unordered_set<std::string> GetClassDescriptorsForResolvedClasses(
- const std::set<DexCacheResolvedClasses>& classes)
- REQUIRES(!Locks::dex_lock_);
-
static bool IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
ObjPtr<mirror::ClassLoader> class_loader)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 27501b9..3bee560 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -747,7 +747,7 @@
#ifdef __aarch64__
static void FlushJitCodeCacheRange(uint8_t* code_ptr,
- uint8_t* writable_ptr ATTRIBUTE_UNUSED,
+ uint8_t* writable_ptr,
size_t code_size) {
// Cache maintenance instructions can cause permission faults when a
// page is not present (e.g. swapped out or not backed). These
@@ -765,35 +765,63 @@
// cache-line sizes for big and little cores.
static const uintptr_t kSafeCacheLineSize = 32;
- // Ensure stores are present in data cache.
- __asm __volatile("dsb sy");
+ // Ensure stores are present in L1 data cache.
+ __asm __volatile("dsb ish" ::: "memory");
- uintptr_t addr = RoundDown(reinterpret_cast<uintptr_t>(code_ptr), kSafeCacheLineSize);
- const uintptr_t limit_addr = RoundUp(reinterpret_cast<uintptr_t>(code_ptr) + code_size,
- kSafeCacheLineSize);
volatile uint8_t mutant;
- while (addr < limit_addr) {
+
+ // Push dirty cache-lines out to the point of unification (PoU). The
+ // point of unification is the first point in the cache/memory
+ // hierarchy where the instruction cache and data cache have the
+ // same view of memory. The PoU is where an instruction fetch will
+ // fetch the new code generated by the JIT.
+ //
+ // See: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.den0024a/ch11s04.html
+ uintptr_t writable_addr = RoundDown(reinterpret_cast<uintptr_t>(writable_ptr),
+ kSafeCacheLineSize);
+ uintptr_t writable_end = RoundUp(reinterpret_cast<uintptr_t>(writable_ptr) + code_size,
+ kSafeCacheLineSize);
+ while (writable_addr < writable_end) {
// Read from the cache-line to minimize the chance that a cache
// maintenance instruction causes a fault (see kernel bug comment
// above).
- mutant = *reinterpret_cast<const uint8_t*>(addr);
+ mutant = *reinterpret_cast<const uint8_t*>(writable_addr);
+
+ // Flush cache-line
+ __asm volatile("dc cvau, %0" :: "r"(writable_addr) : "memory");
+ writable_addr += kSafeCacheLineSize;
+ }
+
+ __asm __volatile("dsb ish" ::: "memory");
+
+ uintptr_t code_addr = RoundDown(reinterpret_cast<uintptr_t>(code_ptr), kSafeCacheLineSize);
+ const uintptr_t code_end = RoundUp(reinterpret_cast<uintptr_t>(code_ptr) + code_size,
+ kSafeCacheLineSize);
+ while (code_addr < code_end) {
+ // Read from the cache-line to minimize the chance that a cache
+ // maintenance instruction causes a fault (see kernel bug comment
+ // above).
+ mutant = *reinterpret_cast<const uint8_t*>(code_addr);
// Invalidating the data cache line is only strictly necessary
// when the JIT code cache has two mappings (the default). We know
// this cache line is clean so this is just invalidating it (using
- // "dc ivac" would be preferable, but is privileged).
- __asm volatile("dc cvau, %0" :: "r"(addr));
+ // "dc ivac" would be preferable, but counts as a write and this
+ // memory may not be mapped write permission).
+ __asm volatile("dc cvau, %0" :: "r"(code_addr) : "memory");
// Invalidate the instruction cache line to force instructions in
// range to be re-fetched following update.
- __asm volatile("ic ivau, %0" :: "r"(addr));
+ __asm volatile("ic ivau, %0" :: "r"(code_addr) : "memory");
- addr += kSafeCacheLineSize;
+ code_addr += kSafeCacheLineSize;
}
- // Drain data and instruction buffers.
- __asm __volatile("dsb sy");
- __asm __volatile("isb sy");
+ // Wait for code cache invalidations to complete.
+ __asm __volatile("dsb ish" ::: "memory");
+
+ // Reset fetched instruction stream.
+ __asm __volatile("isb");
}
#else // __aarch64
diff --git a/runtime/jit/profile_compilation_info.cc b/runtime/jit/profile_compilation_info.cc
index 45c3792..0b7063d 100644
--- a/runtime/jit/profile_compilation_info.cc
+++ b/runtime/jit/profile_compilation_info.cc
@@ -1652,4 +1652,27 @@
return &(inline_cache->FindOrAdd(dex_pc, DexPcData(&arena_))->second);
}
+std::unordered_set<std::string> ProfileCompilationInfo::GetClassDescriptors(
+ const std::vector<const DexFile*>& dex_files) {
+ std::unordered_set<std::string> ret;
+ for (const DexFile* dex_file : dex_files) {
+ const DexFileData* data = FindDexData(dex_file);
+ if (data != nullptr) {
+ for (dex::TypeIndex type_idx : data->class_set) {
+ if (!dex_file->IsTypeIndexValid(type_idx)) {
+ // Something went bad. The profile is probably corrupted. Abort and return an emtpy set.
+ LOG(WARNING) << "Corrupted profile: invalid type index "
+ << type_idx.index_ << " in dex " << dex_file->GetLocation();
+ return std::unordered_set<std::string>();
+ }
+ const DexFile::TypeId& type_id = dex_file->GetTypeId(type_idx);
+ ret.insert(dex_file->GetTypeDescriptor(type_id));
+ }
+ } else {
+ VLOG(compiler) << "Failed to find profile data for " << dex_file->GetLocation();
+ }
+ }
+ return ret;
+}
+
} // namespace art
diff --git a/runtime/jit/profile_compilation_info.h b/runtime/jit/profile_compilation_info.h
index 079ce8d..4ab8be8 100644
--- a/runtime/jit/profile_compilation_info.h
+++ b/runtime/jit/profile_compilation_info.h
@@ -380,6 +380,9 @@
ArenaAllocator* GetArena() { return &arena_; }
+ // Return all of the class descriptors in the profile for a set of dex files.
+ std::unordered_set<std::string> GetClassDescriptors(const std::vector<const DexFile*>& dex_files);
+
private:
enum ProfileLoadSatus {
kProfileLoadWouldOverwiteData,
diff --git a/runtime/verifier/reg_type_test.cc b/runtime/verifier/reg_type_test.cc
index fef13a2..1aa0966 100644
--- a/runtime/verifier/reg_type_test.cc
+++ b/runtime/verifier/reg_type_test.cc
@@ -689,6 +689,9 @@
};
TEST_F(RegTypeOOMTest, ClassJoinOOM) {
+ // TODO: Figure out why FillHeap isn't good enough under CMS.
+ TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS();
+
// Tests that we don't abort with OOMs.
ArenaStack stack(Runtime::Current()->GetArenaPool());
diff --git a/test/066-mismatched-super/expected.txt b/test/066-mismatched-super/expected.txt
index 09c0596..f5b15ca 100644
--- a/test/066-mismatched-super/expected.txt
+++ b/test/066-mismatched-super/expected.txt
@@ -1 +1,2 @@
Got expected ICCE
+Got expected VerifyError
diff --git a/test/066-mismatched-super/info.txt b/test/066-mismatched-super/info.txt
index 7865ffc..2b70e06 100644
--- a/test/066-mismatched-super/info.txt
+++ b/test/066-mismatched-super/info.txt
@@ -1,2 +1,5 @@
-This tests what happens when class A extends abstract class B, but somebody
-turns B into an interface without rebuilding A.
+This tests two cases:
+1. What happens when class A extends abstract class B, but somebody
+ turns B into an interface without rebuilding A.
+2. What happens when class A extends a class B, but somebody
+ turns B into a final class without rebuilding A.
diff --git a/test/066-mismatched-super/src/Indirect.java b/test/066-mismatched-super/src/ExtendsFinal.java
similarity index 65%
copy from test/066-mismatched-super/src/Indirect.java
copy to test/066-mismatched-super/src/ExtendsFinal.java
index 023e409..2f53b3b 100644
--- a/test/066-mismatched-super/src/Indirect.java
+++ b/test/066-mismatched-super/src/ExtendsFinal.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,14 +14,5 @@
* limitations under the License.
*/
-/**
- * Error indirection class.
- *
- * Some VMs will load this class and fail on the "new" call, others will
- * refuse to load this class at all.
- */
-public class Indirect {
- public static void main() {
- Base base = new Base();
- }
+public class ExtendsFinal extends Final {
}
diff --git a/test/066-mismatched-super/src/Indirect.java b/test/066-mismatched-super/src/Final.java
similarity index 65%
rename from test/066-mismatched-super/src/Indirect.java
rename to test/066-mismatched-super/src/Final.java
index 023e409..a44d096 100644
--- a/test/066-mismatched-super/src/Indirect.java
+++ b/test/066-mismatched-super/src/Final.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,14 +14,5 @@
* limitations under the License.
*/
-/**
- * Error indirection class.
- *
- * Some VMs will load this class and fail on the "new" call, others will
- * refuse to load this class at all.
- */
-public class Indirect {
- public static void main() {
- Base base = new Base();
- }
+public /* final */ class Final {
}
diff --git a/test/066-mismatched-super/src/Main.java b/test/066-mismatched-super/src/Main.java
index 55d0bab..6ae1198 100644
--- a/test/066-mismatched-super/src/Main.java
+++ b/test/066-mismatched-super/src/Main.java
@@ -20,10 +20,16 @@
public class Main {
public static void main(String[] args) {
try {
- Indirect.main();
+ Base base = new Base();
System.out.println("Succeeded unexpectedly");
} catch (IncompatibleClassChangeError icce) {
System.out.println("Got expected ICCE");
}
+ try {
+ ExtendsFinal ef = new ExtendsFinal();
+ System.out.println("Succeeded unexpectedly");
+ } catch (VerifyError ve) {
+ System.out.println("Got expected VerifyError");
+ }
}
}
diff --git a/test/066-mismatched-super/src/Indirect.java b/test/066-mismatched-super/src2/Final.java
similarity index 65%
copy from test/066-mismatched-super/src/Indirect.java
copy to test/066-mismatched-super/src2/Final.java
index 023e409..766da9b 100644
--- a/test/066-mismatched-super/src/Indirect.java
+++ b/test/066-mismatched-super/src2/Final.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,14 +14,5 @@
* limitations under the License.
*/
-/**
- * Error indirection class.
- *
- * Some VMs will load this class and fail on the "new" call, others will
- * refuse to load this class at all.
- */
-public class Indirect {
- public static void main() {
- Base base = new Base();
- }
+public final class Final {
}
diff --git a/test/knownfailures.json b/test/knownfailures.json
index 3edb0a8..53611a8 100644
--- a/test/knownfailures.json
+++ b/test/knownfailures.json
@@ -600,15 +600,6 @@
},
{
"tests": [
- "567-checker-compare",
- "988-method-trace"
- ],
- "description": "Checker tests fail because desugar lowers Long.compare to lcmp",
- "env_vars": {"ANDROID_COMPILE_WITH_JACK": "false"},
- "bug": "b/63078894"
- },
- {
- "tests": [
"536-checker-needs-access-check",
"537-checker-inline-and-unverified",
"569-checker-pattern-replacement",
diff --git a/tools/ahat/src/ObjectHandler.java b/tools/ahat/src/ObjectHandler.java
index cc55b7a..8262910 100644
--- a/tools/ahat/src/ObjectHandler.java
+++ b/tools/ahat/src/ObjectHandler.java
@@ -110,7 +110,7 @@
private static void printClassInstanceFields(Doc doc, Query query, AhatClassInstance inst) {
doc.section("Fields");
AhatInstance base = inst.getBaseline();
- printFields(doc, query, INSTANCE_FIELDS_ID, !base.isPlaceHolder(),
+ printFields(doc, query, INSTANCE_FIELDS_ID, inst != base && !base.isPlaceHolder(),
inst.asClassInstance().getInstanceFields(),
base.isPlaceHolder() ? null : base.asClassInstance().getInstanceFields());
}
@@ -211,7 +211,7 @@
doc.section("Static Fields");
AhatInstance base = clsobj.getBaseline();
- printFields(doc, query, STATIC_FIELDS_ID, !base.isPlaceHolder(),
+ printFields(doc, query, STATIC_FIELDS_ID, clsobj != base && !base.isPlaceHolder(),
clsobj.getStaticFieldValues(),
base.isPlaceHolder() ? null : base.asClassObj().getStaticFieldValues());
}