ObjPtr<>-ify mirror::Class.
And move function definitions that rely on obj_ptr-inl.h
from class.h to class-inl.h .
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 31113334
Change-Id: I5ccc765d0a02b1d37cb39ed68c17b8456faf92ea
diff --git a/runtime/art_field-inl.h b/runtime/art_field-inl.h
index 99943f5..4aeb055 100644
--- a/runtime/art_field-inl.h
+++ b/runtime/art_field-inl.h
@@ -29,6 +29,7 @@
#include "jvalue.h"
#include "mirror/dex_cache-inl.h"
#include "mirror/object-inl.h"
+#include "obj_ptr-inl.h"
#include "thread-current-inl.h"
namespace art {
@@ -400,7 +401,7 @@
return FindFieldWithOffset<kExactOffset>(klass->GetSFields(), field_offset);
}
-inline mirror::ClassLoader* ArtField::GetClassLoader() {
+inline ObjPtr<mirror::ClassLoader> ArtField::GetClassLoader() {
return GetDeclaringClass()->GetClassLoader();
}
diff --git a/runtime/art_field.h b/runtime/art_field.h
index 43adae5..18132ed 100644
--- a/runtime/art_field.h
+++ b/runtime/art_field.h
@@ -43,7 +43,7 @@
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
- mirror::ClassLoader* GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index a81c7e2..2670f91 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -298,13 +298,13 @@
return dex_file->GetTypeDescriptor(dex_file->GetTypeId(type_idx));
}
-inline mirror::ClassLoader* ArtMethod::GetClassLoader() {
+inline ObjPtr<mirror::ClassLoader> ArtMethod::GetClassLoader() {
DCHECK(!IsProxyMethod());
return GetDeclaringClass()->GetClassLoader();
}
template <ReadBarrierOption kReadBarrierOption>
-inline mirror::DexCache* ArtMethod::GetDexCache() {
+inline ObjPtr<mirror::DexCache> ArtMethod::GetDexCache() {
if (LIKELY(!IsObsolete())) {
ObjPtr<mirror::Class> klass = GetDeclaringClass<kReadBarrierOption>();
return klass->GetDexCache<kDefaultVerifyFlags, kReadBarrierOption>();
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index 0f25ec4..32884fa 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -105,7 +105,7 @@
return executable->GetArtMethod();
}
-mirror::DexCache* ArtMethod::GetObsoleteDexCache() {
+ObjPtr<mirror::DexCache> ArtMethod::GetObsoleteDexCache() {
DCHECK(!Runtime::Current()->IsAotCompiler()) << PrettyMethod();
DCHECK(IsObsolete());
ObjPtr<mirror::ClassExt> ext(GetDeclaringClass()->GetExtData());
@@ -212,7 +212,7 @@
result = GetInterfaceMethodIfProxy(pointer_size);
DCHECK(result != nullptr);
} else {
- mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable();
+ ObjPtr<mirror::IfTable> iftable = GetDeclaringClass()->GetIfTable();
for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
ObjPtr<mirror::Class> interface = iftable->GetInterface(i);
for (ArtMethod& interface_method : interface->GetVirtualMethods(pointer_size)) {
@@ -519,8 +519,7 @@
}
bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) {
- auto* dex_cache = GetDexCache();
- auto* dex_file = dex_cache->GetDexFile();
+ const DexFile* dex_file = GetDexFile();
const auto& method_id = dex_file->GetMethodId(GetDexMethodIndex());
const auto& proto_id = dex_file->GetMethodPrototype(method_id);
const dex::TypeList* proto_params = dex_file->GetProtoParameters(proto_id);
diff --git a/runtime/art_method.h b/runtime/art_method.h
index feff91a..d1647cc 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -618,11 +618,11 @@
// calling ResolveType this caused a large number of bugs at call sites.
ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
- mirror::ClassLoader* GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- mirror::DexCache* GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
- mirror::DexCache* GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/cha.cc b/runtime/cha.cc
index 5110b7a..68e7477 100644
--- a/runtime/cha.cc
+++ b/runtime/cha.cc
@@ -605,7 +605,7 @@
}
if (klass->IsInstantiable()) {
- auto* iftable = klass->GetIfTable();
+ ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
const size_t ifcount = klass->GetIfTableCount();
for (size_t i = 0; i < ifcount; ++i) {
mirror::Class* interface = iftable->GetInterface(i);
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 97c3b18..58ac995 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1793,11 +1793,11 @@
for (ArtField& field : klass->GetSFields()) {
CHECK_EQ(field.GetDeclaringClass(), klass);
}
- const auto pointer_size = isc.pointer_size_;
- for (auto& m : klass->GetMethods(pointer_size)) {
+ const PointerSize pointer_size = isc.pointer_size_;
+ for (ArtMethod& m : klass->GetMethods(pointer_size)) {
isc.SanityCheckArtMethod(&m, klass);
}
- auto* vtable = klass->GetVTable();
+ ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
if (vtable != nullptr) {
isc.SanityCheckArtMethodPointerArray(vtable, nullptr);
}
@@ -1812,7 +1812,7 @@
isc.SanityCheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
}
}
- mirror::IfTable* iftable = klass->GetIfTable();
+ ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
if (iftable->GetMethodArrayCount(i) > 0) {
isc.SanityCheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
@@ -6217,7 +6217,7 @@
}
} else {
DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
- auto* super_vtable = super_class->GetVTable();
+ ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
CHECK(super_vtable != nullptr) << super_class->PrettyClass();
// We might need to change vtable if we have new virtual methods or new interfaces (since that
// might give us new default methods). See comment above.
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 5443ace..9c9fdbb 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -476,7 +476,7 @@
return static_cast<JDWP::JdwpTag>(descriptor[0]);
}
-static JDWP::JdwpTag BasicTagFromClass(mirror::Class* klass)
+static JDWP::JdwpTag BasicTagFromClass(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_) {
std::string temp;
const char* descriptor = klass->GetDescriptor(&temp);
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index b10c9801..b4269d3 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -466,7 +466,7 @@
return (ptr != nullptr) ? native_visitor_(ptr) : nullptr;
}
- void VisitPointerArray(mirror::PointerArray* pointer_array)
+ void VisitPointerArray(ObjPtr<mirror::PointerArray> pointer_array)
REQUIRES_SHARED(Locks::mutator_lock_) {
// Fully patch the pointer array, including the `klass_` field.
PatchReferenceField</*kMayBeNull=*/ false>(pointer_array, mirror::Object::ClassOffset());
@@ -565,13 +565,13 @@
}
template <bool kMayBeNull = true>
- ALWAYS_INLINE void PatchReferenceField(mirror::Object* object, MemberOffset offset) const
+ ALWAYS_INLINE void PatchReferenceField(ObjPtr<mirror::Object> object, MemberOffset offset) const
REQUIRES_SHARED(Locks::mutator_lock_) {
- mirror::Object* old_value =
+ ObjPtr<mirror::Object> old_value =
object->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
DCHECK(kMayBeNull || old_value != nullptr);
if (!kMayBeNull || old_value != nullptr) {
- mirror::Object* new_value = heap_visitor_(old_value);
+ ObjPtr<mirror::Object> new_value = heap_visitor_(old_value.Ptr());
object->SetFieldObjectWithoutWriteBarrier</*kTransactionActive=*/ false,
/*kCheckTransaction=*/ true,
kVerifyNone>(offset, new_value);
@@ -1215,14 +1215,15 @@
CHECK(!already_marked) << "App image class already visited";
patch_object_visitor.VisitClass(klass);
// Then patch the non-embedded vtable and iftable.
- mirror::PointerArray* vtable = klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
+ ObjPtr<mirror::PointerArray> vtable =
+ klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
if (vtable != nullptr &&
- app_image_objects.InDest(vtable) &&
- !visited_bitmap->Set(vtable)) {
+ app_image_objects.InDest(vtable.Ptr()) &&
+ !visited_bitmap->Set(vtable.Ptr())) {
patch_object_visitor.VisitPointerArray(vtable);
}
- auto* iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
- if (iftable != nullptr && app_image_objects.InDest(iftable)) {
+ ObjPtr<mirror::IfTable> iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
+ if (iftable != nullptr && app_image_objects.InDest(iftable.Ptr())) {
// Avoid processing the fields of iftable since we will process them later anyways
// below.
int32_t ifcount = klass->GetIfTableCount<kVerifyNone>();
@@ -1613,20 +1614,21 @@
}
}
// Then patch the non-embedded vtable and iftable.
- mirror::PointerArray* vtable = klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
- if (vtable != nullptr && !patched_objects->Set(vtable)) {
+ ObjPtr<mirror::PointerArray> vtable =
+ klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
+ if (vtable != nullptr && !patched_objects->Set(vtable.Ptr())) {
patch_object_visitor.VisitPointerArray(vtable);
}
- auto* iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
+ ObjPtr<mirror::IfTable> iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
if (iftable != nullptr) {
int32_t ifcount = klass->GetIfTableCount<kVerifyNone>();
for (int32_t i = 0; i != ifcount; ++i) {
- mirror::PointerArray* unpatched_ifarray =
+ ObjPtr<mirror::PointerArray> unpatched_ifarray =
iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i);
if (unpatched_ifarray != nullptr) {
// The iftable has not been patched, so we need to explicitly adjust the pointer.
- mirror::PointerArray* ifarray = relocate_visitor(unpatched_ifarray);
- if (!patched_objects->Set(ifarray)) {
+ ObjPtr<mirror::PointerArray> ifarray = relocate_visitor(unpatched_ifarray.Ptr());
+ if (!patched_objects->Set(ifarray.Ptr())) {
patch_object_visitor.VisitPointerArray(ifarray);
}
}
diff --git a/runtime/hprof/hprof.cc b/runtime/hprof/hprof.cc
index 34f645b..f5ac43b 100644
--- a/runtime/hprof/hprof.cc
+++ b/runtime/hprof/hprof.cc
@@ -1254,7 +1254,7 @@
__ AddClassId(LookupClassId(klass));
__ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(klass));
__ AddClassId(LookupClassId(klass->GetSuperClass().Ptr()));
- __ AddObjectId(klass->GetClassLoader());
+ __ AddObjectId(klass->GetClassLoader().Ptr());
__ AddObjectId(nullptr); // no signer
__ AddObjectId(nullptr); // no prot domain
__ AddObjectId(nullptr); // reserved
diff --git a/runtime/interpreter/mterp/mterp.cc b/runtime/interpreter/mterp/mterp.cc
index e8d98a4..73c2cfe 100644
--- a/runtime/interpreter/mterp/mterp.cc
+++ b/runtime/interpreter/mterp/mterp.cc
@@ -787,7 +787,8 @@
// Avoid read barriers, since we need only the pointer to the native (non-movable)
// DexCache field array which we can get even through from-space objects.
ObjPtr<mirror::Class> klass = referrer->GetDeclaringClass<kWithoutReadBarrier>();
- mirror::DexCache* dex_cache = klass->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>();
+ ObjPtr<mirror::DexCache> dex_cache =
+ klass->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>();
// Try to find the desired field in DexCache.
uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c();
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index c1b9a1a..73b9b69 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -701,9 +701,9 @@
DCHECK((cls->IsClass<kDefaultVerifyFlags>()));
// Look at the classloader of the class to know if it has been unloaded.
// This does not need a read barrier because this is called by GC.
- mirror::Object* class_loader =
+ ObjPtr<mirror::Object> class_loader =
cls->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>();
- if (class_loader == nullptr || visitor->IsMarked(class_loader) != nullptr) {
+ if (class_loader == nullptr || visitor->IsMarked(class_loader.Ptr()) != nullptr) {
// The class loader is live, update the entry if the class has moved.
mirror::Class* new_cls = down_cast<mirror::Class*>(visitor->IsMarked(cls));
// Note that new_object can be null for CMS and newly allocated objects.
diff --git a/runtime/jni/java_vm_ext.cc b/runtime/jni/java_vm_ext.cc
index f71ab6f..90ac5f6 100644
--- a/runtime/jni/java_vm_ext.cc
+++ b/runtime/jni/java_vm_ext.cc
@@ -272,7 +272,8 @@
REQUIRES_SHARED(Locks::mutator_lock_) {
std::string jni_short_name(m->JniShortName());
std::string jni_long_name(m->JniLongName());
- mirror::ClassLoader* const declaring_class_loader = m->GetDeclaringClass()->GetClassLoader();
+ ObjPtr<mirror::ClassLoader> const declaring_class_loader =
+ m->GetDeclaringClass()->GetClassLoader();
ScopedObjectAccessUnchecked soa(Thread::Current());
void* const declaring_class_loader_allocator =
Runtime::Current()->GetClassLinker()->GetAllocatorForClassLoader(declaring_class_loader);
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index d9d88e1..3a0cbe2 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -270,7 +270,7 @@
}
template <VerifyObjectFlags kVerifyFlags, typename Visitor>
-inline void PointerArray::Fixup(mirror::PointerArray* dest,
+inline void PointerArray::Fixup(ObjPtr<mirror::PointerArray> dest,
PointerSize pointer_size,
const Visitor& visitor) {
for (size_t i = 0, count = GetLength(); i < count; ++i) {
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 2e894d5..593c0a8 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -249,7 +249,7 @@
// Fixup the pointers in the dest arrays by passing our pointers through the visitor. Only copies
// to dest if visitor(source_ptr) != source_ptr.
template <VerifyObjectFlags kVerifyFlags = kVerifyNone, typename Visitor>
- void Fixup(mirror::PointerArray* dest, PointerSize pointer_size, const Visitor& visitor)
+ void Fixup(ObjPtr<mirror::PointerArray> dest, PointerSize pointer_size, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
// Works like memcpy(), except we guarantee not to allow tearing of array values (ie using smaller
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 220d66b..57e8e8d 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -80,20 +80,25 @@
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, super_class_), new_super_class);
}
+inline bool Class::HasSuperClass() {
+ // No read barrier is needed for comparing with null. See ReadBarrierOption.
+ return GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr;
+}
+
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline ClassLoader* Class::GetClassLoader() {
+inline ObjPtr<ClassLoader> Class::GetClassLoader() {
return GetFieldObject<ClassLoader, kVerifyFlags, kReadBarrierOption>(
OFFSET_OF_OBJECT_MEMBER(Class, class_loader_));
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline ClassExt* Class::GetExtData() {
+inline ObjPtr<ClassExt> Class::GetExtData() {
return GetFieldObject<ClassExt, kVerifyFlags, kReadBarrierOption>(
OFFSET_OF_OBJECT_MEMBER(Class, ext_data_));
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline DexCache* Class::GetDexCache() {
+inline ObjPtr<DexCache> Class::GetDexCache() {
return GetFieldObject<DexCache, kVerifyFlags, kReadBarrierOption>(
OFFSET_OF_OBJECT_MEMBER(Class, dex_cache_));
}
@@ -281,13 +286,13 @@
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline PointerArray* Class::GetVTable() {
+inline ObjPtr<PointerArray> Class::GetVTable() {
DCHECK(IsLoaded<kVerifyFlags>() || IsErroneous<kVerifyFlags>());
return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
}
-inline PointerArray* Class::GetVTableDuringLinking() {
+inline ObjPtr<PointerArray> Class::GetVTableDuringLinking() {
DCHECK(IsLoaded() || IsErroneous());
return GetFieldObject<PointerArray>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
}
@@ -296,8 +301,18 @@
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_), new_vtable);
}
+template<VerifyObjectFlags kVerifyFlags>
+inline bool Class::ShouldHaveImt() {
+ return ShouldHaveEmbeddedVTable<kVerifyFlags>();
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline bool Class::ShouldHaveEmbeddedVTable() {
+ return IsInstantiable<kVerifyFlags>();
+}
+
inline bool Class::HasVTable() {
- // No read barrier is needed for comparing with null.
+ // No read barrier is needed for comparing with null. See ReadBarrierOption.
return GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr ||
ShouldHaveEmbeddedVTable();
}
@@ -355,7 +370,7 @@
}
inline void Class::SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method, PointerSize pointer_size) {
- auto* vtable = GetVTableDuringLinking();
+ ObjPtr<PointerArray> vtable = GetVTableDuringLinking();
CHECK_EQ(method, vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size));
SetEmbeddedVTableEntryUnchecked(i, method, pointer_size);
}
@@ -624,10 +639,10 @@
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline IfTable* Class::GetIfTable() {
+inline ObjPtr<IfTable> Class::GetIfTable() {
ObjPtr<IfTable> ret = GetFieldObject<IfTable, kVerifyFlags, kReadBarrierOption>(IfTableOffset());
DCHECK(ret != nullptr) << PrettyClass(this);
- return ret.Ptr();
+ return ret;
}
template<VerifyObjectFlags kVerifyFlags>
@@ -851,24 +866,29 @@
}
}
-inline ObjectArray<Class>* Class::GetProxyInterfaces() {
+inline ObjPtr<ObjectArray<Class>> Class::GetProxyInterfaces() {
CHECK(IsProxyClass());
// First static field.
- auto* field = GetStaticField(0);
+ ArtField* field = GetStaticField(0);
DCHECK_STREQ(field->GetName(), "interfaces");
MemberOffset field_offset = field->GetOffset();
return GetFieldObject<ObjectArray<Class>>(field_offset);
}
-inline ObjectArray<ObjectArray<Class>>* Class::GetProxyThrows() {
+inline ObjPtr<ObjectArray<ObjectArray<Class>>> Class::GetProxyThrows() {
CHECK(IsProxyClass());
// Second static field.
- auto* field = GetStaticField(1);
+ ArtField* field = GetStaticField(1);
DCHECK_STREQ(field->GetName(), "throws");
MemberOffset field_offset = field->GetOffset();
return GetFieldObject<ObjectArray<ObjectArray<Class>>>(field_offset);
}
+inline bool Class::IsBootStrapClassLoaded() {
+ // No read barrier is needed for comparing with null. See ReadBarrierOption.
+ return GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() == nullptr;
+}
+
inline void Class::InitializeClassVisitor::operator()(ObjPtr<Object> obj,
size_t usable_size) const {
DCHECK_LE(class_size_, usable_size);
@@ -909,7 +929,7 @@
} else if (IsArrayClass()) {
return 2;
} else if (IsProxyClass()) {
- ObjectArray<Class>* interfaces = GetProxyInterfaces();
+ ObjPtr<ObjectArray<Class>> interfaces = GetProxyInterfaces();
return interfaces != nullptr ? interfaces->GetLength() : 0;
} else {
const dex::TypeList* interfaces = GetInterfaceTypeList();
@@ -976,10 +996,44 @@
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline Class* Class::GetComponentType() {
+inline ObjPtr<Class> Class::GetComponentType() {
return GetFieldObject<Class, kVerifyFlags, kReadBarrierOption>(ComponentTypeOffset());
}
+inline void Class::SetComponentType(ObjPtr<Class> new_component_type) {
+ DCHECK(GetComponentType() == nullptr);
+ DCHECK(new_component_type != nullptr);
+ // Component type is invariant: use non-transactional mode without check.
+ SetFieldObject<false, false>(ComponentTypeOffset(), new_component_type);
+}
+
+template<ReadBarrierOption kReadBarrierOption>
+inline size_t Class::GetComponentSize() {
+ return 1U << GetComponentSizeShift<kReadBarrierOption>();
+}
+
+template<ReadBarrierOption kReadBarrierOption>
+inline size_t Class::GetComponentSizeShift() {
+ return GetComponentType<kDefaultVerifyFlags, kReadBarrierOption>()->GetPrimitiveTypeSizeShift();
+}
+
+inline bool Class::IsObjectClass() {
+ // No read barrier is needed for comparing with null. See ReadBarrierOption.
+ return !IsPrimitive() && GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>() == nullptr;
+}
+
+inline bool Class::IsInstantiableNonArray() {
+ return !IsPrimitive() && !IsInterface() && !IsAbstract() && !IsArrayClass();
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+bool Class::IsInstantiable() {
+ return (!IsPrimitive<kVerifyFlags>() &&
+ !IsInterface<kVerifyFlags>() &&
+ !IsAbstract<kVerifyFlags>()) ||
+ (IsAbstract<kVerifyFlags>() && IsArrayClass<kVerifyFlags>());
+}
+
template<VerifyObjectFlags kVerifyFlags>
inline bool Class::IsArrayClass() {
// We do not need a read barrier for comparing with null.
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 74b22c4..f916d5e 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -94,10 +94,10 @@
}
}
-ClassExt* Class::EnsureExtDataPresent(Thread* self) {
+ObjPtr<ClassExt> Class::EnsureExtDataPresent(Thread* self) {
ObjPtr<ClassExt> existing(GetExtData());
if (!existing.IsNull()) {
- return existing.Ptr();
+ return existing;
}
StackHandleScope<3> hs(self);
// Handlerize 'this' since we are allocating here.
@@ -136,7 +136,7 @@
if (throwable != nullptr) {
self->SetException(throwable.Get());
}
- return ret.Ptr();
+ return ret;
}
}
@@ -1123,7 +1123,7 @@
}
void Class::PopulateEmbeddedVTable(PointerSize pointer_size) {
- PointerArray* table = GetVTableDuringLinking();
+ ObjPtr<PointerArray> table = GetVTableDuringLinking();
CHECK(table != nullptr) << PrettyClass();
const size_t table_length = table->GetLength();
SetEmbeddedVTableLength(table_length);
@@ -1203,7 +1203,8 @@
DISALLOW_COPY_AND_ASSIGN(CopyClassVisitor);
};
-Class* Class::CopyOf(Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size) {
+ObjPtr<Class> Class::CopyOf(
+ Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size) {
DCHECK_GE(new_length, static_cast<int32_t>(sizeof(Class)));
// We may get copied by a compacting GC.
StackHandleScope<1> hs(self);
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 6ae3c79..687b510 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -442,41 +442,22 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- Class* GetComponentType() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<Class> GetComponentType() REQUIRES_SHARED(Locks::mutator_lock_);
- void SetComponentType(ObjPtr<Class> new_component_type) REQUIRES_SHARED(Locks::mutator_lock_) {
- DCHECK(GetComponentType() == nullptr);
- DCHECK(new_component_type != nullptr);
- // Component type is invariant: use non-transactional mode without check.
- SetFieldObject<false, false>(ComponentTypeOffset(), new_component_type);
- }
+ void SetComponentType(ObjPtr<Class> new_component_type) REQUIRES_SHARED(Locks::mutator_lock_);
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- size_t GetComponentSize() REQUIRES_SHARED(Locks::mutator_lock_) {
- return 1U << GetComponentSizeShift<kReadBarrierOption>();
- }
+ size_t GetComponentSize() REQUIRES_SHARED(Locks::mutator_lock_);
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- size_t GetComponentSizeShift() REQUIRES_SHARED(Locks::mutator_lock_) {
- return GetComponentType<kDefaultVerifyFlags, kReadBarrierOption>()->GetPrimitiveTypeSizeShift();
- }
+ size_t GetComponentSizeShift() REQUIRES_SHARED(Locks::mutator_lock_);
- bool IsObjectClass() REQUIRES_SHARED(Locks::mutator_lock_) {
- // No read barrier is needed for comparing with null.
- return !IsPrimitive() && GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>() == nullptr;
- }
+ bool IsObjectClass() REQUIRES_SHARED(Locks::mutator_lock_);
- bool IsInstantiableNonArray() REQUIRES_SHARED(Locks::mutator_lock_) {
- return !IsPrimitive() && !IsInterface() && !IsAbstract() && !IsArrayClass();
- }
+ bool IsInstantiableNonArray() REQUIRES_SHARED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- bool IsInstantiable() REQUIRES_SHARED(Locks::mutator_lock_) {
- return (!IsPrimitive<kVerifyFlags>() &&
- !IsInterface<kVerifyFlags>() &&
- !IsAbstract<kVerifyFlags>()) ||
- (IsAbstract<kVerifyFlags>() && IsArrayClass<kVerifyFlags>());
- }
+ bool IsInstantiable() REQUIRES_SHARED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
ALWAYS_INLINE bool IsObjectArrayClass() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -618,10 +599,7 @@
void SetSuperClass(ObjPtr<Class> new_super_class) REQUIRES_SHARED(Locks::mutator_lock_);
- bool HasSuperClass() REQUIRES_SHARED(Locks::mutator_lock_) {
- // No read barrier is needed for comparing with null.
- return GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr;
- }
+ bool HasSuperClass() REQUIRES_SHARED(Locks::mutator_lock_);
static constexpr MemberOffset SuperClassOffset() {
return MemberOffset(OFFSETOF_MEMBER(Class, super_class_));
@@ -629,7 +607,7 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- ClassLoader* GetClassLoader() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<ClassLoader> GetClassLoader() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
template <bool kCheckTransaction = true>
void SetClassLoader(ObjPtr<ClassLoader> new_cl) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -652,7 +630,7 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- DexCache* GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
// Also updates the dex_cache_strings_ variable from new_dex_cache.
void SetDexCache(ObjPtr<DexCache> new_dex_cache) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -768,9 +746,9 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- ALWAYS_INLINE PointerArray* GetVTable() REQUIRES_SHARED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjPtr<PointerArray> GetVTable() REQUIRES_SHARED(Locks::mutator_lock_);
- ALWAYS_INLINE PointerArray* GetVTableDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjPtr<PointerArray> GetVTableDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
void SetVTable(ObjPtr<PointerArray> new_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -789,14 +767,10 @@
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- bool ShouldHaveImt() REQUIRES_SHARED(Locks::mutator_lock_) {
- return ShouldHaveEmbeddedVTable<kVerifyFlags>();
- }
+ bool ShouldHaveImt() REQUIRES_SHARED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- bool ShouldHaveEmbeddedVTable() REQUIRES_SHARED(Locks::mutator_lock_) {
- return IsInstantiable<kVerifyFlags>();
- }
+ bool ShouldHaveEmbeddedVTable() REQUIRES_SHARED(Locks::mutator_lock_);
bool HasVTable() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -942,7 +916,7 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- ALWAYS_INLINE IfTable* GetIfTable() REQUIRES_SHARED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjPtr<IfTable> GetIfTable() REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE void SetIfTable(ObjPtr<IfTable> new_iftable)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1088,12 +1062,12 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- ClassExt* GetExtData() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<ClassExt> GetExtData() REQUIRES_SHARED(Locks::mutator_lock_);
// Returns the ExtData for this class, allocating one if necessary. This should be the only way
// to force ext_data_ to be set. No functions are available for changing an already set ext_data_
// since doing so is not allowed.
- ClassExt* EnsureExtDataPresent(Thread* self)
+ ObjPtr<ClassExt> EnsureExtDataPresent(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
uint16_t GetDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -1166,14 +1140,14 @@
void AssertInitializedOrInitializingInThread(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_);
- Class* CopyOf(Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size)
+ ObjPtr<Class> CopyOf(Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
// For proxy class only.
- ObjectArray<Class>* GetProxyInterfaces() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<ObjectArray<Class>> GetProxyInterfaces() REQUIRES_SHARED(Locks::mutator_lock_);
// For proxy class only.
- ObjectArray<ObjectArray<Class>>* GetProxyThrows() REQUIRES_SHARED(Locks::mutator_lock_);
+ ObjPtr<ObjectArray<ObjectArray<Class>>> GetProxyThrows() REQUIRES_SHARED(Locks::mutator_lock_);
// May cause thread suspension due to EqualParameters.
ArtMethod* GetDeclaredConstructor(Thread* self,
@@ -1201,10 +1175,7 @@
};
// Returns true if the class loader is null, ie the class loader is the boot strap class loader.
- bool IsBootStrapClassLoaded() REQUIRES_SHARED(Locks::mutator_lock_) {
- // No read barrier is needed for comparing with null.
- return GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() == nullptr;
- }
+ bool IsBootStrapClassLoaded() REQUIRES_SHARED(Locks::mutator_lock_);
static size_t ImTableEntrySize(PointerSize pointer_size) {
return static_cast<size_t>(pointer_size);
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index f7c1c02..0728bab 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -103,7 +103,7 @@
Handle<mirror::Class> klass2 =
hs.NewHandle(class_linker_->FindClass(soa.Self(), "Lpackage2/Package2;", class_loader));
ASSERT_TRUE(klass2 != nullptr);
- EXPECT_EQ(klass1->GetDexCache(), klass2->GetDexCache());
+ EXPECT_OBJ_PTR_EQ(klass1->GetDexCache(), klass2->GetDexCache());
EXPECT_NE(klass1->NumStaticFields(), 0u);
for (ArtField& field : klass2->GetSFields()) {
diff --git a/runtime/mirror/field.cc b/runtime/mirror/field.cc
index a2b51d8..1af0778 100644
--- a/runtime/mirror/field.cc
+++ b/runtime/mirror/field.cc
@@ -37,7 +37,7 @@
return &declaring_class->GetSFieldsPtr()->At(1);
}
}
- mirror::DexCache* const dex_cache = declaring_class->GetDexCache();
+ ObjPtr<mirror::DexCache> const dex_cache = declaring_class->GetDexCache();
ArtField* art_field = dex_cache->GetResolvedField(GetDexFieldIndex(), kRuntimePointerSize);
if (UNLIKELY(art_field == nullptr)) {
if (IsStatic()) {
diff --git a/runtime/mirror/object_array-inl.h b/runtime/mirror/object_array-inl.h
index b984474..054a2bb 100644
--- a/runtime/mirror/object_array-inl.h
+++ b/runtime/mirror/object_array-inl.h
@@ -46,7 +46,7 @@
template<class T> template<VerifyObjectFlags kVerifyFlags>
inline bool ObjectArray<T>::CheckAssignable(ObjPtr<T> object) {
if (object != nullptr) {
- Class* element_class = GetClass<kVerifyFlags>()->GetComponentType();
+ ObjPtr<Class> element_class = GetClass<kVerifyFlags>()->GetComponentType();
if (UNLIKELY(!object->InstanceOf(element_class))) {
ThrowArrayStoreException(object);
return false;
@@ -236,10 +236,10 @@
<< "This case should be handled with memmove that handles overlaps correctly";
// We want to avoid redundant IsAssignableFrom checks where possible, so we cache a class that
// we know is assignable to the destination array's component type.
- Class* dst_class = GetClass()->GetComponentType();
- Class* lastAssignableElementClass = dst_class;
+ ObjPtr<Class> dst_class = GetClass()->GetComponentType();
+ ObjPtr<Class> lastAssignableElementClass = dst_class;
- T* o = nullptr;
+ ObjPtr<T> o = nullptr;
int i = 0;
bool baker_non_gray_case = false;
if (kUseReadBarrier && kUseBakerReadBarrier) {
@@ -259,7 +259,7 @@
SetWithoutChecks<kTransactionActive>(dst_pos + i, nullptr);
} else {
// TODO: use the underlying class reference to avoid uncompression when not necessary.
- Class* o_class = o->GetClass();
+ ObjPtr<Class> o_class = o->GetClass();
if (LIKELY(lastAssignableElementClass == o_class)) {
SetWithoutChecks<kTransactionActive>(dst_pos + i, o);
} else if (LIKELY(dst_class->IsAssignableFrom(o_class))) {
diff --git a/runtime/trace.cc b/runtime/trace.cc
index 074e846..f2bfb0b 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -805,8 +805,7 @@
}
bool Trace::RegisterMethod(ArtMethod* method) {
- mirror::DexCache* dex_cache = method->GetDexCache();
- const DexFile* dex_file = dex_cache->GetDexFile();
+ const DexFile* dex_file = method->GetDexFile();
if (seen_methods_.find(dex_file) == seen_methods_.end()) {
seen_methods_.insert(std::make_pair(dex_file, new DexIndexBitSet()));
}