Fix and optimize verify object.
VerifyObject no longer resides in heap. You can now enable
VerifyObject for non-debug builds. VerifyStack is still slow, so it
is now guarded by its own flag.
Fixed the image writer to not use verification at places where
verification fails due to invalid reads.
Fixed RosAlloc to use SizeOf which doesn't call verify object.
Added a flag paremeter to some of the mirror getters / setters to
be able to selectively disable VerifyObject on certain calls.
Optimized the GC to not verify each object multiple times during
object scanning if verify object is enabled.
Added 3 verification options: verify reads, verify this, and verify
writes so that you can select how much verification you want for
mirror getters and setters.
Removed some useless DCHECKs which would slow debug builds without
providing any benefits.
TODO: RosAlloc verification doesn't currently work with verify
objects.
Bug: 12934910
Bug: 12879358
Change-Id: Ic61033104dfc334543f89b0fc0ad8cd4f4015d69
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 8c89cdc..b970df3 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -135,7 +135,7 @@
* searching.
*/
max_allocation_stack_size_(kGCALotMode ? kGcAlotInterval
- : (kDesiredHeapVerification > kVerifyAllFast) ? KB : MB),
+ : (kVerifyObjectSupport > kVerifyObjectModeFast) ? KB : MB),
current_allocator_(kAllocatorTypeDlMalloc),
current_non_moving_allocator_(kAllocatorTypeNonMoving),
bump_pointer_space_(nullptr),
@@ -150,7 +150,7 @@
target_utilization_(target_utilization),
total_wait_time_(0),
total_allocation_time_(0),
- verify_object_mode_(kHeapVerificationNotPermitted),
+ verify_object_mode_(kVerifyObjectModeDisabled),
disable_moving_gc_count_(0),
running_on_valgrind_(RUNNING_ON_VALGRIND),
use_tlab_(use_tlab) {
@@ -314,9 +314,7 @@
bool Heap::IsCompilingBoot() const {
for (const auto& space : continuous_spaces_) {
- if (space->IsImageSpace()) {
- return false;
- } else if (space->IsZygoteSpace()) {
+ if (space->IsImageSpace() || space->IsZygoteSpace()) {
return false;
}
}
@@ -823,14 +821,16 @@
return false;
}
if (bump_pointer_space_ != nullptr && bump_pointer_space_->HasAddress(obj)) {
- mirror::Class* klass = obj->GetClass();
+ mirror::Class* klass = obj->GetClass<kVerifyNone>();
if (obj == klass) {
// This case happens for java.lang.Class.
return true;
}
return VerifyClassClass(klass) && IsLiveObjectLocked(klass);
} else if (temp_space_ != nullptr && temp_space_->HasAddress(obj)) {
- return false;
+ // If we are in the allocated region of the temp space, then we are probably live (e.g. during
+ // a GC). When a GC isn't running End() - Begin() is 0 which means no objects are contained.
+ return temp_space_->Contains(obj);
}
space::ContinuousSpace* c_space = FindContinuousSpaceFromObject(obj, true);
space::DiscontinuousSpace* d_space = NULL;
@@ -886,25 +886,6 @@
return false;
}
-void Heap::VerifyObjectImpl(mirror::Object* obj) {
- if (Thread::Current() == NULL ||
- Runtime::Current()->GetThreadList()->GetLockOwner() == Thread::Current()->GetTid()) {
- return;
- }
- VerifyObjectBody(obj);
-}
-
-bool Heap::VerifyClassClass(const mirror::Class* c) const {
- // Note: we don't use the accessors here as they have internal sanity checks that we don't want
- // to run
- const byte* raw_addr =
- reinterpret_cast<const byte*>(c) + mirror::Object::ClassOffset().Int32Value();
- mirror::Class* c_c = reinterpret_cast<mirror::HeapReference<mirror::Class> const *>(raw_addr)->AsMirrorPtr();
- raw_addr = reinterpret_cast<const byte*>(c_c) + mirror::Object::ClassOffset().Int32Value();
- mirror::Class* c_c_c = reinterpret_cast<mirror::HeapReference<mirror::Class> const *>(raw_addr)->AsMirrorPtr();
- return c_c == c_c_c;
-}
-
void Heap::DumpSpaces(std::ostream& stream) {
for (const auto& space : continuous_spaces_) {
accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap();
@@ -923,36 +904,30 @@
}
void Heap::VerifyObjectBody(mirror::Object* obj) {
- CHECK(IsAligned<kObjectAlignment>(obj)) << "Object isn't aligned: " << obj;
+ if (this == nullptr && verify_object_mode_ == kVerifyObjectModeDisabled) {
+ return;
+ }
// Ignore early dawn of the universe verifications.
if (UNLIKELY(static_cast<size_t>(num_bytes_allocated_.Load()) < 10 * KB)) {
return;
}
- const byte* raw_addr = reinterpret_cast<const byte*>(obj) +
- mirror::Object::ClassOffset().Int32Value();
- mirror::Class* c = reinterpret_cast<mirror::HeapReference<mirror::Class> const *>(raw_addr)->AsMirrorPtr();
- if (UNLIKELY(c == NULL)) {
- LOG(FATAL) << "Null class in object: " << obj;
- } else if (UNLIKELY(!IsAligned<kObjectAlignment>(c))) {
- LOG(FATAL) << "Class isn't aligned: " << c << " in object: " << obj;
- }
+ CHECK(IsAligned<kObjectAlignment>(obj)) << "Object isn't aligned: " << obj;
+ mirror::Class* c = obj->GetFieldObject<mirror::Class, kVerifyNone>(
+ mirror::Object::ClassOffset(), false);
+ CHECK(c != nullptr) << "Null class in object " << obj;
+ CHECK(IsAligned<kObjectAlignment>(c)) << "Class " << c << " not aligned in object " << obj;
CHECK(VerifyClassClass(c));
- if (verify_object_mode_ > kVerifyAllFast) {
- // TODO: the bitmap tests below are racy if VerifyObjectBody is called without the
- // heap_bitmap_lock_.
+ if (verify_object_mode_ > kVerifyObjectModeFast) {
+ // Note: the bitmap tests below are racy since we don't hold the heap bitmap lock.
if (!IsLiveObjectLocked(obj)) {
DumpSpaces();
LOG(FATAL) << "Object is dead: " << obj;
}
- if (!IsLiveObjectLocked(c)) {
- LOG(FATAL) << "Class of object is dead: " << c << " in object: " << obj;
- }
}
}
void Heap::VerificationCallback(mirror::Object* obj, void* arg) {
- DCHECK(obj != NULL);
reinterpret_cast<Heap*>(arg)->VerifyObjectBody(obj);
}
@@ -1790,7 +1765,7 @@
if (bitmap == nullptr) {
LOG(ERROR) << "Object " << obj << " has no bitmap";
- if (!heap_->VerifyClassClass(obj->GetClass())) {
+ if (!VerifyClassClass(obj->GetClass())) {
LOG(ERROR) << "Object " << obj << " failed class verification!";
}
} else {