Replace StackHandleScopeCollection with VariableSizedHandleScope
VariableSizedHandleScope's internal handle scopes are not pushed
directly on the thread. This means that it is safe to intermix with
other types of handle scopes.
Added test.
Test: clean-oat-host && test-art-host
Change-Id: Id2fd1155788428f394d49615d337d9134824c8f0
diff --git a/runtime/handle_scope_test.cc b/runtime/handle_scope_test.cc
index c269a37..92063c4 100644
--- a/runtime/handle_scope_test.cc
+++ b/runtime/handle_scope_test.cc
@@ -15,6 +15,7 @@
*/
#include "base/enums.h"
+#include "common_runtime_test.h"
#include "gtest/gtest.h"
#include "handle_scope-inl.h"
#include "scoped_thread_state_change-inl.h"
@@ -22,51 +23,85 @@
namespace art {
-// Handle scope with a fixed size which is allocated on the stack.
-template<size_t kNumReferences>
-class NoThreadStackHandleScope : public HandleScope {
- public:
- explicit NoThreadStackHandleScope(HandleScope* link) : HandleScope(link, kNumReferences) {
- }
- ~NoThreadStackHandleScope() {
- }
-
- private:
- // references_storage_ needs to be first so that it matches the address of references_
- StackReference<mirror::Object> references_storage_[kNumReferences];
-};
+class HandleScopeTest : public CommonRuntimeTest {};
// Test the offsets computed for members of HandleScope. Because of cross-compiling
// it is impossible the use OFFSETOF_MEMBER, so we do some reasonable computations ourselves. This
// test checks whether we do the right thing.
-TEST(HandleScopeTest, Offsets) NO_THREAD_SAFETY_ANALYSIS {
+TEST_F(HandleScopeTest, Offsets) {
+ ScopedObjectAccess soa(Thread::Current());
+ ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
// As the members of HandleScope are private, we cannot use OFFSETOF_MEMBER
// here. So do the inverse: set some data, and access it through pointers created from the offsets.
- NoThreadStackHandleScope<0x9ABC> test_table(reinterpret_cast<HandleScope*>(0x5678));
- test_table.SetReference(0, reinterpret_cast<mirror::Object*>(0x1234));
+ StackHandleScope<0x1> hs0(soa.Self());
+ static const size_t kNumReferences = 0x9ABC;
+ StackHandleScope<kNumReferences> test_table(soa.Self());
+ ObjPtr<mirror::Class> c = class_linker->FindSystemClass(soa.Self(), "Ljava/lang/Object;");
+ test_table.SetReference(0, c.Ptr());
uint8_t* table_base_ptr = reinterpret_cast<uint8_t*>(&test_table);
{
- uintptr_t* link_ptr = reinterpret_cast<uintptr_t*>(table_base_ptr +
+ BaseHandleScope** link_ptr = reinterpret_cast<BaseHandleScope**>(table_base_ptr +
HandleScope::LinkOffset(kRuntimePointerSize));
- EXPECT_EQ(*link_ptr, static_cast<size_t>(0x5678));
+ EXPECT_EQ(*link_ptr, &hs0);
}
{
uint32_t* num_ptr = reinterpret_cast<uint32_t*>(table_base_ptr +
HandleScope::NumberOfReferencesOffset(kRuntimePointerSize));
- EXPECT_EQ(*num_ptr, static_cast<size_t>(0x9ABC));
+ EXPECT_EQ(*num_ptr, static_cast<size_t>(kNumReferences));
}
{
- // Assume sizeof(StackReference<mirror::Object>) == sizeof(uint32_t)
- // TODO: How can we make this assumption-less but still access directly and fully?
- EXPECT_EQ(sizeof(StackReference<mirror::Object>), sizeof(uint32_t));
-
- uint32_t* ref_ptr = reinterpret_cast<uint32_t*>(table_base_ptr +
+ auto* ref_ptr = reinterpret_cast<StackReference<mirror::Object>*>(table_base_ptr +
HandleScope::ReferencesOffset(kRuntimePointerSize));
- EXPECT_EQ(*ref_ptr, static_cast<uint32_t>(0x1234));
+ EXPECT_OBJ_PTR_EQ(ref_ptr->AsMirrorPtr(), c);
+ }
+}
+
+class CollectVisitor {
+ public:
+ void VisitRootIfNonNull(StackReference<mirror::Object>* ref) {
+ if (!ref->IsNull()) {
+ visited.insert(ref);
+ }
+ ++total_visited;
+ }
+
+ std::set<StackReference<mirror::Object>*> visited;
+ size_t total_visited = 0; // including null.
+};
+
+// Test functionality of variable sized handle scopes.
+TEST_F(HandleScopeTest, VariableSized) {
+ ScopedObjectAccess soa(Thread::Current());
+ VariableSizedHandleScope hs(soa.Self());
+ ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
+ Handle<mirror::Class> c =
+ hs.NewHandle(class_linker->FindSystemClass(soa.Self(), "Ljava/lang/Object;"));
+ // Test nested scopes.
+ StackHandleScope<1> inner(soa.Self());
+ inner.NewHandle(c->AllocObject(soa.Self()));
+ // Add a bunch of handles and make sure callbacks work.
+ static const size_t kNumHandles = 100;
+ std::vector<Handle<mirror::Object>> handles;
+ for (size_t i = 0; i < kNumHandles; ++i) {
+ BaseHandleScope* base = &hs;
+ ObjPtr<mirror::Object> o = c->AllocObject(soa.Self());
+ handles.push_back(hs.NewHandle(o));
+ EXPECT_OBJ_PTR_EQ(o, handles.back().Get());
+ EXPECT_TRUE(hs.Contains(handles.back().GetReference()));
+ EXPECT_TRUE(base->Contains(handles.back().GetReference()));
+ EXPECT_EQ(hs.NumberOfReferences(), base->NumberOfReferences());
+ }
+ CollectVisitor visitor;
+ BaseHandleScope* base = &hs;
+ base->VisitRoots(visitor);
+ EXPECT_LE(visitor.visited.size(), base->NumberOfReferences());
+ EXPECT_EQ(visitor.total_visited, base->NumberOfReferences());
+ for (StackReference<mirror::Object>* ref : visitor.visited) {
+ EXPECT_TRUE(base->Contains(ref));
}
}