Igor Murashkin | 495e783 | 2017-10-27 10:56:20 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "subtype_check.h" |
| 18 | |
| 19 | #include "gtest/gtest.h" |
| 20 | #include "android-base/logging.h" |
| 21 | |
| 22 | namespace art { |
| 23 | |
| 24 | constexpr size_t BitString::kBitSizeAtPosition[BitString::kCapacity]; |
| 25 | constexpr size_t BitString::kCapacity; |
| 26 | |
Igor Murashkin | 495e783 | 2017-10-27 10:56:20 -0700 | [diff] [blame] | 27 | struct MockClass { |
Igor Murashkin | 2ffb703 | 2017-11-08 13:35:21 -0800 | [diff] [blame] | 28 | explicit MockClass(MockClass* parent, size_t x = 0, size_t y = 0) { |
Igor Murashkin | 495e783 | 2017-10-27 10:56:20 -0700 | [diff] [blame] | 29 | parent_ = parent; |
| 30 | memset(&subtype_check_info_and_status_, 0u, sizeof(subtype_check_info_and_status_)); |
| 31 | |
| 32 | // Start the numbering at '1' to match the bitstring numbering. |
| 33 | // A bitstring numbering never starts at '0' which just means 'no value'. |
| 34 | x_ = 1; |
| 35 | if (parent_ != nullptr) { |
| 36 | if (parent_->GetMaxChild() != nullptr) { |
| 37 | x_ = parent_->GetMaxChild()->x_ + 1u; |
| 38 | } |
| 39 | |
| 40 | parent_->children_.push_back(this); |
| 41 | if (parent_->path_to_root_ != "") { |
| 42 | path_to_root_ = parent->path_to_root_ + ","; |
| 43 | } |
| 44 | path_to_root_ += std::to_string(x_); |
| 45 | } else { |
| 46 | path_to_root_ = ""; // root has no path. |
| 47 | } |
| 48 | y_ = y; |
| 49 | UNUSED(x); |
| 50 | } |
| 51 | |
| 52 | MockClass() : MockClass(nullptr) { |
| 53 | } |
| 54 | |
| 55 | /////////////////////////////////////////////////////////////// |
| 56 | // Implementation of the SubtypeCheck::KlassT static interface. |
| 57 | /////////////////////////////////////////////////////////////// |
| 58 | |
| 59 | MockClass* GetSuperClass() const { |
| 60 | return parent_; |
| 61 | } |
| 62 | |
| 63 | bool HasSuperClass() const { |
| 64 | return GetSuperClass() != nullptr; |
| 65 | } |
| 66 | |
| 67 | size_t Depth() const { |
| 68 | if (parent_ == nullptr) { |
| 69 | return 0u; |
| 70 | } else { |
| 71 | return parent_->Depth() + 1u; |
| 72 | } |
| 73 | } |
| 74 | |
| 75 | std::string PrettyClass() const |
| 76 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 77 | return path_to_root_; |
| 78 | } |
| 79 | |
| 80 | int32_t GetField32Volatile(art::MemberOffset offset = art::MemberOffset(0u)) const |
| 81 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 82 | UNUSED(offset); |
| 83 | int32_t field_32 = 0; |
| 84 | memcpy(&field_32, &subtype_check_info_and_status_, sizeof(int32_t)); |
| 85 | return field_32; |
| 86 | } |
| 87 | |
| 88 | template <bool kTransactionActive> |
| 89 | bool CasFieldWeakSequentiallyConsistent32(art::MemberOffset offset, |
| 90 | int32_t old_value, |
| 91 | int32_t new_value) |
| 92 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 93 | UNUSED(offset); |
| 94 | if (old_value == GetField32Volatile(offset)) { |
| 95 | memcpy(&subtype_check_info_and_status_, &new_value, sizeof(int32_t)); |
| 96 | return true; |
| 97 | } |
| 98 | return false; |
| 99 | } |
| 100 | |
| 101 | MemberOffset StatusOffset() const { |
| 102 | return MemberOffset(0); // Doesn't matter. We ignore offset. |
| 103 | } |
| 104 | |
| 105 | /////////////////////////////////////////////////////////////// |
| 106 | // Convenience functions to make the testing easier |
| 107 | /////////////////////////////////////////////////////////////// |
| 108 | |
| 109 | size_t GetNumberOfChildren() const { |
| 110 | return children_.size(); |
| 111 | } |
| 112 | |
| 113 | MockClass* GetParent() const { |
| 114 | return parent_; |
| 115 | } |
| 116 | |
| 117 | MockClass* GetMaxChild() const { |
| 118 | if (GetNumberOfChildren() > 0u) { |
| 119 | return GetChild(GetNumberOfChildren() - 1); |
| 120 | } |
| 121 | return nullptr; |
| 122 | } |
| 123 | |
| 124 | MockClass* GetChild(size_t idx) const { |
| 125 | if (idx >= GetNumberOfChildren()) { |
| 126 | return nullptr; |
| 127 | } |
| 128 | return children_[idx]; |
| 129 | } |
| 130 | |
| 131 | // Traverse the sibling at "X" at each level. |
| 132 | // Once we get to level==depth, return yourself. |
| 133 | MockClass* FindChildAt(size_t x, size_t depth) { |
| 134 | if (Depth() == depth) { |
| 135 | return this; |
| 136 | } else if (GetNumberOfChildren() > 0) { |
| 137 | return GetChild(x)->FindChildAt(x, depth); |
| 138 | } |
| 139 | return nullptr; |
| 140 | } |
| 141 | |
| 142 | template <typename T> |
| 143 | MockClass* Visit(T visitor, bool recursive = true) { |
| 144 | if (!visitor(this)) { |
| 145 | return this; |
| 146 | } |
| 147 | |
| 148 | if (!recursive) { |
| 149 | return this; |
| 150 | } |
| 151 | |
| 152 | for (MockClass* child : children_) { |
| 153 | MockClass* visit_res = child->Visit(visitor); |
| 154 | if (visit_res != nullptr) { |
| 155 | return visit_res; |
| 156 | } |
| 157 | } |
| 158 | |
| 159 | return nullptr; |
| 160 | } |
| 161 | |
| 162 | size_t GetX() const { |
| 163 | return x_; |
| 164 | } |
| 165 | |
| 166 | bool SlowIsSubtypeOf(const MockClass* target) const { |
| 167 | DCHECK(target != nullptr); |
| 168 | const MockClass* kls = this; |
| 169 | while (kls != nullptr) { |
| 170 | if (kls == target) { |
| 171 | return true; |
| 172 | } |
| 173 | kls = kls->GetSuperClass(); |
| 174 | } |
| 175 | |
| 176 | return false; |
| 177 | } |
| 178 | |
| 179 | std::string ToDotGraph() const { |
| 180 | std::stringstream ss; |
| 181 | ss << std::endl; |
| 182 | ss << "digraph MockClass {" << std::endl; |
| 183 | ss << " node [fontname=\"Arial\"];" << std::endl; |
| 184 | ToDotGraphImpl(ss); |
| 185 | ss << "}" << std::endl; |
| 186 | return ss.str(); |
| 187 | } |
| 188 | |
| 189 | void ToDotGraphImpl(std::ostream& os) const { |
| 190 | for (MockClass* child : children_) { |
| 191 | os << " '" << path_to_root_ << "' -> '" << child->path_to_root_ << "';" << std::endl; |
| 192 | child->ToDotGraphImpl(os); |
| 193 | } |
| 194 | } |
| 195 | |
| 196 | std::vector<MockClass*> children_; |
| 197 | MockClass* parent_; |
| 198 | SubtypeCheckBitsAndStatus subtype_check_info_and_status_; |
| 199 | size_t x_; |
| 200 | size_t y_; |
| 201 | std::string path_to_root_; |
| 202 | }; |
| 203 | |
| 204 | std::ostream& operator<<(std::ostream& os, const MockClass& kls) { |
| 205 | SubtypeCheckBits iod = kls.subtype_check_info_and_status_.subtype_check_info_; |
| 206 | os << "MClass{D:" << kls.Depth() << ",W:" << kls.x_ |
| 207 | << ", OF:" |
| 208 | << (iod.overflow_ ? "true" : "false") |
| 209 | << ", bitstring: " << iod.bitstring_ |
| 210 | << ", mock_path: " << kls.path_to_root_ |
| 211 | << "}"; |
| 212 | return os; |
| 213 | } |
| 214 | |
| 215 | struct MockSubtypeCheck { |
| 216 | using SC = SubtypeCheck<MockClass*>; |
| 217 | |
| 218 | static MockSubtypeCheck Lookup(MockClass* klass) { |
| 219 | MockSubtypeCheck mock; |
| 220 | mock.klass_ = klass; |
| 221 | return mock; |
| 222 | } |
| 223 | |
| 224 | // Convenience functions to avoid using statics everywhere. |
| 225 | // static(class, args...) -> instance.method(args...) |
| 226 | SubtypeCheckInfo::State EnsureInitialized() |
| 227 | REQUIRES(Locks::subtype_check_lock_) |
| 228 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 229 | return SC::EnsureInitialized(klass_); |
| 230 | } |
| 231 | |
| 232 | SubtypeCheckInfo::State EnsureAssigned() |
| 233 | REQUIRES(Locks::subtype_check_lock_) |
| 234 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 235 | return SC::EnsureAssigned(klass_); |
| 236 | } |
| 237 | |
| 238 | SubtypeCheckInfo::State ForceUninitialize() |
| 239 | REQUIRES(Locks::subtype_check_lock_) |
| 240 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 241 | return SC::ForceUninitialize(klass_); |
| 242 | } |
| 243 | |
| 244 | BitString::StorageType GetEncodedPathToRootForSource() const |
| 245 | REQUIRES(Locks::subtype_check_lock_) |
| 246 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 247 | return SC::GetEncodedPathToRootForSource(klass_); |
| 248 | } |
| 249 | |
| 250 | BitString::StorageType GetEncodedPathToRootForTarget() const |
| 251 | REQUIRES(Locks::subtype_check_lock_) |
| 252 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 253 | return SC::GetEncodedPathToRootForTarget(klass_); |
| 254 | } |
| 255 | |
| 256 | BitString::StorageType GetEncodedPathToRootMask() const |
| 257 | REQUIRES(Locks::subtype_check_lock_) |
| 258 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 259 | return SC::GetEncodedPathToRootMask(klass_); |
| 260 | } |
| 261 | |
| 262 | SubtypeCheckInfo::Result IsSubtypeOf(const MockSubtypeCheck& target) |
| 263 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 264 | return SC::IsSubtypeOf(klass_, target.klass_); |
| 265 | } |
| 266 | |
| 267 | friend std::ostream& operator<<(std::ostream& os, const MockSubtypeCheck& tree) |
| 268 | NO_THREAD_SAFETY_ANALYSIS { |
| 269 | os << "(MockSubtypeCheck io:"; |
| 270 | SC::Dump(tree.klass_, os); |
| 271 | os << ", class: " << tree.klass_->PrettyClass() << ")"; |
| 272 | return os; |
Igor Murashkin | 2ffb703 | 2017-11-08 13:35:21 -0800 | [diff] [blame] | 273 | } |
Igor Murashkin | 495e783 | 2017-10-27 10:56:20 -0700 | [diff] [blame] | 274 | |
| 275 | // Additional convenience functions. |
| 276 | SubtypeCheckInfo::State GetState() const |
| 277 | REQUIRES(Locks::subtype_check_lock_) |
| 278 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 279 | return SC::GetSubtypeCheckInfo(klass_).GetState(); |
| 280 | } |
| 281 | |
| 282 | MockClass& GetClass() const { |
| 283 | return *klass_; |
| 284 | } |
| 285 | |
| 286 | private: |
| 287 | MockClass* klass_; |
| 288 | }; |
| 289 | |
| 290 | struct MockScopedLockSubtypeCheck { |
| 291 | MockScopedLockSubtypeCheck() ACQUIRE(*Locks::subtype_check_lock_) {} |
| 292 | ~MockScopedLockSubtypeCheck() RELEASE(*Locks::subtype_check_lock_) {} |
| 293 | }; |
| 294 | |
| 295 | struct MockScopedLockMutator { |
| 296 | MockScopedLockMutator() ACQUIRE_SHARED(*Locks::mutator_lock_) {} |
| 297 | ~MockScopedLockMutator() RELEASE_SHARED(*Locks::mutator_lock_) {} |
| 298 | }; |
| 299 | |
| 300 | struct SubtypeCheckTest : public ::testing::Test { |
| 301 | protected: |
| 302 | virtual void SetUp() { |
| 303 | android::base::InitLogging(/*argv*/nullptr); |
| 304 | |
| 305 | CreateRootedTree(BitString::kCapacity + 2u, BitString::kCapacity + 2u); |
| 306 | } |
| 307 | |
| 308 | virtual void TearDown() { |
| 309 | } |
| 310 | |
| 311 | void CreateRootedTree(size_t width, size_t height) { |
| 312 | all_classes_.clear(); |
| 313 | root_ = CreateClassFor(/*parent*/nullptr, /*x*/0, /*y*/0); |
| 314 | CreateTreeFor(root_, /*width*/width, /*depth*/height); |
| 315 | } |
| 316 | |
| 317 | MockClass* CreateClassFor(MockClass* parent, size_t x, size_t y) { |
| 318 | MockClass* kls = new MockClass(parent, x, y); |
| 319 | all_classes_.push_back(std::unique_ptr<MockClass>(kls)); |
| 320 | return kls; |
| 321 | } |
| 322 | |
| 323 | void CreateTreeFor(MockClass* parent, size_t width, size_t levels) { |
| 324 | DCHECK(parent != nullptr); |
| 325 | if (levels == 0) { |
| 326 | return; |
| 327 | } |
| 328 | |
| 329 | for (size_t i = 0; i < width; ++i) { |
| 330 | MockClass* child = CreateClassFor(parent, i, parent->y_ + 1); |
| 331 | CreateTreeFor(child, width, levels - 1); |
| 332 | } |
| 333 | } |
| 334 | |
| 335 | MockClass* root_ = nullptr; |
| 336 | std::vector<std::unique_ptr<MockClass>> all_classes_; |
| 337 | }; |
| 338 | |
| 339 | TEST_F(SubtypeCheckTest, LookupAllChildren) { |
| 340 | MockScopedLockSubtypeCheck lock_a; |
| 341 | MockScopedLockMutator lock_b; |
| 342 | using SCTree = MockSubtypeCheck; |
| 343 | |
| 344 | root_->Visit([&](MockClass* kls) { |
| 345 | MockScopedLockSubtypeCheck lock_a; |
| 346 | MockScopedLockMutator lock_b; |
| 347 | |
| 348 | EXPECT_EQ(SubtypeCheckInfo::kUninitialized, SCTree::Lookup(kls).GetState()); |
| 349 | return true; // Keep visiting. |
| 350 | }); |
| 351 | } |
| 352 | |
| 353 | TEST_F(SubtypeCheckTest, LookupRoot) { |
| 354 | MockScopedLockSubtypeCheck lock_a; |
| 355 | MockScopedLockMutator lock_b; |
| 356 | using SCTree = MockSubtypeCheck; |
| 357 | |
| 358 | SCTree root = SCTree::Lookup(root_); |
| 359 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized()); |
| 360 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, root.IsSubtypeOf(root)) << root; |
| 361 | } |
| 362 | |
| 363 | TEST_F(SubtypeCheckTest, EnsureInitializedFirstLevel) { |
| 364 | MockScopedLockSubtypeCheck lock_a; |
| 365 | MockScopedLockMutator lock_b; |
| 366 | using SCTree = MockSubtypeCheck; |
| 367 | |
| 368 | SCTree root = SCTree::Lookup(root_); |
| 369 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized()); |
| 370 | |
| 371 | ASSERT_LT(0u, root_->GetNumberOfChildren()); |
| 372 | |
| 373 | // Initialize root's children only. |
| 374 | for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) { |
| 375 | MockClass* child = root_->GetChild(i); |
| 376 | SCTree child_tree = SCTree::Lookup(child); |
| 377 | // Before: all unknown. |
| 378 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree; |
| 379 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree; |
| 380 | // Transition. |
| 381 | EXPECT_EQ(SubtypeCheckInfo::kInitialized, child_tree.EnsureInitialized()); |
| 382 | // After: "src instanceof target" known, but "target instanceof src" unknown. |
| 383 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree; |
| 384 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree; |
| 385 | } |
| 386 | } |
| 387 | |
| 388 | TEST_F(SubtypeCheckTest, EnsureAssignedFirstLevel) { |
| 389 | MockScopedLockSubtypeCheck lock_a; |
| 390 | MockScopedLockMutator lock_b; |
| 391 | using SCTree = MockSubtypeCheck; |
| 392 | |
| 393 | SCTree root = SCTree::Lookup(root_); |
| 394 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized()); |
| 395 | |
| 396 | ASSERT_LT(0u, root_->GetNumberOfChildren()); |
| 397 | |
| 398 | // Initialize root's children only. |
| 399 | for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) { |
| 400 | MockClass* child = root_->GetChild(i); |
| 401 | SCTree child_tree = SCTree::Lookup(child); |
| 402 | // Before: all unknown. |
| 403 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree; |
| 404 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree; |
| 405 | // Transition. |
| 406 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.EnsureAssigned()); |
| 407 | // After: "src instanceof target" known, and "target instanceof src" known. |
| 408 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree; |
| 409 | EXPECT_EQ(SubtypeCheckInfo::kNotSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree; |
| 410 | } |
| 411 | } |
| 412 | |
| 413 | TEST_F(SubtypeCheckTest, EnsureInitializedSecondLevelWithPreassign) { |
| 414 | MockScopedLockSubtypeCheck lock_a; |
| 415 | MockScopedLockMutator lock_b; |
| 416 | using SCTree = MockSubtypeCheck; |
| 417 | |
| 418 | SCTree root = SCTree::Lookup(root_); |
| 419 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized()); |
| 420 | |
| 421 | ASSERT_LT(0u, root_->GetNumberOfChildren()); |
| 422 | |
| 423 | // Initialize root's children. |
| 424 | for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) { |
| 425 | MockClass* child = root_->GetChild(i); |
| 426 | SCTree child_tree = SCTree::Lookup(child); |
| 427 | |
| 428 | ASSERT_EQ(1u, child->Depth()); |
| 429 | |
| 430 | EXPECT_EQ(SubtypeCheckInfo::kInitialized, child_tree.EnsureInitialized()) << *child; |
| 431 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.EnsureAssigned()) |
| 432 | << *child << ", root:" << *root_; |
| 433 | for (size_t j = 0; j < child->GetNumberOfChildren(); ++j) { |
| 434 | MockClass* child2 = child->GetChild(j); |
| 435 | ASSERT_EQ(2u, child2->Depth()); |
| 436 | SCTree child2_tree = SCTree::Lookup(child2); |
| 437 | |
| 438 | // Before: all unknown. |
| 439 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree; |
| 440 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree)) |
| 441 | << child2_tree; |
| 442 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(root)) |
| 443 | << child2_tree; |
| 444 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(child_tree)) |
| 445 | << child2_tree; |
| 446 | |
| 447 | EXPECT_EQ(SubtypeCheckInfo::kUninitialized, child2_tree.GetState()) << *child2; |
| 448 | EXPECT_EQ(SubtypeCheckInfo::kInitialized, child2_tree.EnsureInitialized()) << *child2; |
| 449 | |
| 450 | // After: src=child2_tree is known, otherwise unknown. |
| 451 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree; |
| 452 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree)) |
| 453 | << child2_tree; |
| 454 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(root)) << child2_tree; |
| 455 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(child_tree)) << child2_tree; |
| 456 | } |
| 457 | |
| 458 | // The child is "assigned" as a side-effect of initializing sub-children. |
| 459 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.GetState()); |
| 460 | } |
| 461 | } |
| 462 | |
| 463 | TEST_F(SubtypeCheckTest, EnsureInitializedSecondLevelDontPreassign) { |
| 464 | MockScopedLockSubtypeCheck lock_a; |
| 465 | MockScopedLockMutator lock_b; |
| 466 | using SCTree = MockSubtypeCheck; |
| 467 | |
| 468 | SCTree root = SCTree::Lookup(root_); |
| 469 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized()); |
| 470 | |
| 471 | ASSERT_LT(0u, root_->GetNumberOfChildren()); |
| 472 | |
| 473 | // Initialize root's children only. |
| 474 | for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) { |
| 475 | MockClass* child = root_->GetChild(i); |
| 476 | SCTree child_tree = SCTree::Lookup(child); |
| 477 | |
| 478 | ASSERT_EQ(1u, child->Depth()); |
| 479 | |
| 480 | for (size_t j = 0; j < child->GetNumberOfChildren(); ++j) { |
| 481 | MockClass* child2 = child->GetChild(j); |
| 482 | ASSERT_EQ(2u, child2->Depth()); |
| 483 | SCTree child2_tree = SCTree::Lookup(child2); |
| 484 | // Before: all unknown. |
| 485 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree; |
| 486 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree)) |
| 487 | << child2_tree; |
| 488 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(root)) << child2_tree; |
| 489 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(child_tree)) |
| 490 | << child2_tree; |
| 491 | // Transition. |
| 492 | EXPECT_EQ(SubtypeCheckInfo::kUninitialized, child2_tree.GetState()) << *child2; |
| 493 | EXPECT_EQ(SubtypeCheckInfo::kInitialized, child2_tree.EnsureInitialized()) << *child2; |
| 494 | // After: src=child2_tree is known, otherwise unknown. |
| 495 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree; |
| 496 | EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree)) |
| 497 | << child2_tree; |
| 498 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(root)) << child2_tree; |
| 499 | EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(child_tree)) << child2_tree; |
| 500 | } |
| 501 | |
| 502 | // The child is "assigned" as a side-effect of initializing sub-children. |
| 503 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.GetState()); |
| 504 | } |
| 505 | } |
| 506 | |
| 507 | void ApplyTransition(MockSubtypeCheck sc_tree, |
| 508 | SubtypeCheckInfo::State transition, |
| 509 | SubtypeCheckInfo::State expected) { |
| 510 | MockScopedLockSubtypeCheck lock_a; |
| 511 | MockScopedLockMutator lock_b; |
| 512 | |
| 513 | EXPECT_EQ(SubtypeCheckInfo::kUninitialized, sc_tree.GetState()) << sc_tree.GetClass(); |
| 514 | |
| 515 | if (transition == SubtypeCheckInfo::kUninitialized) { |
| 516 | EXPECT_EQ(expected, sc_tree.ForceUninitialize()) << sc_tree.GetClass(); |
| 517 | } else if (transition == SubtypeCheckInfo::kInitialized) { |
| 518 | EXPECT_EQ(expected, sc_tree.EnsureInitialized()) << sc_tree.GetClass(); |
| 519 | } else if (transition == SubtypeCheckInfo::kAssigned) { |
| 520 | EXPECT_EQ(expected, sc_tree.EnsureAssigned()) << sc_tree.GetClass(); |
| 521 | } |
| 522 | } |
| 523 | |
| 524 | enum MockSubtypeOfTransition { |
| 525 | kNone, |
| 526 | kUninitialized, |
| 527 | kInitialized, |
| 528 | kAssigned, |
| 529 | }; |
| 530 | |
| 531 | std::ostream& operator<<(std::ostream& os, const MockSubtypeOfTransition& transition) { |
| 532 | if (transition == MockSubtypeOfTransition::kUninitialized) { |
| 533 | os << "kUninitialized"; |
| 534 | } else if (transition == MockSubtypeOfTransition::kInitialized) { |
| 535 | os << "kInitialized"; |
| 536 | } else if (transition == MockSubtypeOfTransition::kAssigned) { |
| 537 | os << "kAssigned"; |
| 538 | } else { |
| 539 | os << "kNone"; |
| 540 | } |
| 541 | return os; |
| 542 | } |
| 543 | |
| 544 | SubtypeCheckInfo::State ApplyTransition(MockSubtypeCheck sc_tree, |
| 545 | MockSubtypeOfTransition transition) { |
| 546 | MockScopedLockSubtypeCheck lock_a; |
| 547 | MockScopedLockMutator lock_b; |
| 548 | |
| 549 | if (transition == MockSubtypeOfTransition::kUninitialized) { |
| 550 | return sc_tree.ForceUninitialize(); |
| 551 | } else if (transition == MockSubtypeOfTransition::kInitialized) { |
| 552 | return sc_tree.EnsureInitialized(); |
| 553 | } else if (transition == MockSubtypeOfTransition::kAssigned) { |
| 554 | return sc_tree.EnsureAssigned(); |
| 555 | } |
| 556 | |
| 557 | return sc_tree.GetState(); |
| 558 | } |
| 559 | |
| 560 | enum { |
| 561 | kBeforeTransition = 0, |
| 562 | kAfterTransition = 1, |
| 563 | kAfterChildren = 2, |
| 564 | }; |
| 565 | |
| 566 | const char* StringifyTransition(int x) { |
| 567 | if (x == kBeforeTransition) { |
| 568 | return "kBeforeTransition"; |
| 569 | } else if (x == kAfterTransition) { |
| 570 | return "kAfterTransition"; |
| 571 | } else if (x == kAfterChildren) { |
| 572 | return "kAfterChildren"; |
| 573 | } |
| 574 | |
| 575 | return "<<Unknown>>"; |
| 576 | } |
| 577 | |
| 578 | struct TransitionHistory { |
| 579 | void Record(int transition_label, MockClass* kls) { |
| 580 | ss_ << "<<<" << StringifyTransition(transition_label) << ">>>"; |
| 581 | ss_ << "{Self}: " << *kls; |
| 582 | |
| 583 | if (kls->HasSuperClass()) { |
| 584 | ss_ << "{Parent}: " << *(kls->GetSuperClass()); |
| 585 | } |
| 586 | ss_ << "================== "; |
| 587 | } |
| 588 | |
| 589 | friend std::ostream& operator<<(std::ostream& os, const TransitionHistory& t) { |
| 590 | os << t.ss_.str(); |
| 591 | return os; |
| 592 | } |
| 593 | |
| 594 | std::stringstream ss_; |
| 595 | }; |
| 596 | |
| 597 | template <typename T, typename T2> |
| 598 | void EnsureStateChangedTestRecursiveGeneric(MockClass* klass, |
| 599 | size_t cur_depth, |
| 600 | size_t total_depth, |
| 601 | T2 transition_func, |
| 602 | T expect_checks) { |
| 603 | MockScopedLockSubtypeCheck lock_a; |
| 604 | MockScopedLockMutator lock_b; |
| 605 | using SCTree = MockSubtypeCheck; |
| 606 | |
| 607 | SCTree sc_tree = SCTree::Lookup(klass); |
| 608 | MockSubtypeOfTransition requested_transition = transition_func(klass); |
| 609 | |
| 610 | // FIXME: need to print before(self, parent) and after(self, parent) |
| 611 | // to make any sense of what's going on. |
| 612 | |
| 613 | auto do_expect_checks = [&](int transition_label, TransitionHistory& transition_details) { |
| 614 | MockScopedLockSubtypeCheck lock_a; |
| 615 | MockScopedLockMutator lock_b; |
| 616 | |
| 617 | transition_details.Record(transition_label, klass); |
| 618 | |
| 619 | SCOPED_TRACE(transition_details); |
| 620 | ASSERT_EQ(cur_depth, klass->Depth()); |
| 621 | |
| 622 | ASSERT_NO_FATAL_FAILURE(expect_checks(klass, |
| 623 | transition_label, |
| 624 | sc_tree.GetState(), |
| 625 | requested_transition)); |
| 626 | }; |
| 627 | |
| 628 | TransitionHistory transition_history; |
| 629 | do_expect_checks(kBeforeTransition, transition_history); |
| 630 | SubtypeCheckInfo::State state = ApplyTransition(sc_tree, requested_transition); |
| 631 | UNUSED(state); |
| 632 | do_expect_checks(kAfterTransition, transition_history); |
| 633 | |
| 634 | if (total_depth == cur_depth) { |
| 635 | return; |
| 636 | } |
| 637 | |
| 638 | // Initialize root's children only. |
| 639 | for (size_t i = 0; i < klass->GetNumberOfChildren(); ++i) { |
| 640 | MockClass* child = klass->GetChild(i); |
| 641 | EnsureStateChangedTestRecursiveGeneric(child, |
| 642 | cur_depth + 1u, |
| 643 | total_depth, |
| 644 | transition_func, |
| 645 | expect_checks); |
| 646 | } |
| 647 | |
| 648 | do_expect_checks(kAfterChildren, transition_history); |
| 649 | } |
| 650 | |
| 651 | void EnsureStateChangedTestRecursive( |
| 652 | MockClass* klass, |
| 653 | size_t cur_depth, |
| 654 | size_t total_depth, |
| 655 | std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>> transitions) { |
| 656 | MockScopedLockSubtypeCheck lock_a; |
| 657 | MockScopedLockMutator lock_b; |
| 658 | using SCTree = MockSubtypeCheck; |
| 659 | |
| 660 | ASSERT_EQ(cur_depth, klass->Depth()); |
| 661 | ApplyTransition(SCTree::Lookup(klass), transitions[cur_depth].first, transitions[cur_depth].second); |
| 662 | |
| 663 | if (total_depth == cur_depth + 1) { |
| 664 | return; |
| 665 | } |
| 666 | |
| 667 | // Initialize root's children only. |
| 668 | for (size_t i = 0; i < klass->GetNumberOfChildren(); ++i) { |
| 669 | MockClass* child = klass->GetChild(i); |
| 670 | EnsureStateChangedTestRecursive(child, cur_depth + 1u, total_depth, transitions); |
| 671 | } |
| 672 | } |
| 673 | |
| 674 | void EnsureStateChangedTest( |
| 675 | MockClass* root, |
| 676 | size_t depth, |
| 677 | std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>> transitions) { |
| 678 | ASSERT_EQ(depth, transitions.size()); |
| 679 | |
| 680 | EnsureStateChangedTestRecursive(root, /*cur_depth*/0u, depth, transitions); |
| 681 | } |
| 682 | |
| 683 | TEST_F(SubtypeCheckTest, EnsureInitialized_NoOverflow) { |
| 684 | auto transitions = [](MockClass* kls) { |
| 685 | UNUSED(kls); |
| 686 | return MockSubtypeOfTransition::kInitialized; |
| 687 | }; |
| 688 | |
| 689 | constexpr size_t kMaxDepthForThisTest = BitString::kCapacity; |
| 690 | auto expected = [=](MockClass* kls, |
| 691 | int expect_when, |
| 692 | SubtypeCheckInfo::State actual_state, |
| 693 | MockSubtypeOfTransition transition) { |
| 694 | if (expect_when == kBeforeTransition) { |
| 695 | EXPECT_EQ(SubtypeCheckInfo::kUninitialized, actual_state); |
| 696 | return; |
| 697 | } |
| 698 | |
| 699 | if (expect_when == kAfterTransition) { |
| 700 | // After explicit transition has been completed. |
| 701 | switch (kls->Depth()) { |
| 702 | case 0: |
| 703 | if (transition >= MockSubtypeOfTransition::kInitialized) { |
| 704 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state); |
| 705 | } |
| 706 | break; |
| 707 | default: |
| 708 | if (transition >= MockSubtypeOfTransition::kInitialized) { |
| 709 | if (transition == MockSubtypeOfTransition::kInitialized) { |
| 710 | EXPECT_EQ(SubtypeCheckInfo::kInitialized, actual_state); |
| 711 | } else if (transition == MockSubtypeOfTransition::kAssigned) { |
| 712 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state); |
| 713 | } |
| 714 | } |
| 715 | break; |
| 716 | } |
| 717 | } |
| 718 | |
| 719 | if (expect_when == kAfterChildren) { |
| 720 | if (transition >= MockSubtypeOfTransition::kInitialized) { |
| 721 | ASSERT_NE(kls->Depth(), kMaxDepthForThisTest); |
| 722 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state); |
| 723 | } |
| 724 | } |
| 725 | }; |
| 726 | |
| 727 | // Initialize every level 0-3. |
| 728 | // Intermediate levels become "assigned", max levels become initialized. |
| 729 | EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected); |
| 730 | |
| 731 | auto transitions_uninitialize = [](MockClass* kls) { |
| 732 | UNUSED(kls); |
| 733 | return MockSubtypeOfTransition::kUninitialized; |
| 734 | }; |
| 735 | |
| 736 | auto expected_uninitialize = [](MockClass* kls, |
| 737 | int expect_when, |
| 738 | SubtypeCheckInfo::State actual_state, |
| 739 | MockSubtypeOfTransition transition) { |
| 740 | UNUSED(kls); |
| 741 | UNUSED(transition); |
| 742 | if (expect_when >= kAfterTransition) { |
| 743 | EXPECT_EQ(SubtypeCheckInfo::kUninitialized, actual_state); |
| 744 | } |
| 745 | }; |
| 746 | |
| 747 | // Uninitialize the entire tree after it was assigned. |
| 748 | EnsureStateChangedTestRecursiveGeneric(root_, |
| 749 | 0u, |
| 750 | kMaxDepthForThisTest, |
| 751 | transitions_uninitialize, |
| 752 | expected_uninitialize); |
| 753 | } |
| 754 | |
| 755 | TEST_F(SubtypeCheckTest, EnsureAssigned_TooDeep) { |
| 756 | auto transitions = [](MockClass* kls) { |
| 757 | UNUSED(kls); |
| 758 | return MockSubtypeOfTransition::kAssigned; |
| 759 | }; |
| 760 | |
| 761 | constexpr size_t kMaxDepthForThisTest = BitString::kCapacity + 1u; |
| 762 | auto expected = [=](MockClass* kls, |
| 763 | int expect_when, |
| 764 | SubtypeCheckInfo::State actual_state, |
| 765 | MockSubtypeOfTransition transition) { |
| 766 | UNUSED(transition); |
| 767 | if (expect_when == kAfterTransition) { |
| 768 | if (kls->Depth() > BitString::kCapacity) { |
| 769 | EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state); |
| 770 | } |
| 771 | } |
| 772 | }; |
| 773 | |
| 774 | // Assign every level 0-4. |
| 775 | // We cannot assign 4th level, so it will overflow instead. |
| 776 | EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected); |
| 777 | } |
| 778 | |
| 779 | TEST_F(SubtypeCheckTest, EnsureAssigned_TooDeep_OfTooDeep) { |
| 780 | auto transitions = [](MockClass* kls) { |
| 781 | UNUSED(kls); |
| 782 | return MockSubtypeOfTransition::kAssigned; |
| 783 | }; |
| 784 | |
| 785 | constexpr size_t kMaxDepthForThisTest = BitString::kCapacity + 2u; |
| 786 | auto expected = [=](MockClass* kls, |
| 787 | int expect_when, |
| 788 | SubtypeCheckInfo::State actual_state, |
| 789 | MockSubtypeOfTransition transition) { |
| 790 | UNUSED(transition); |
| 791 | if (expect_when == kAfterTransition) { |
| 792 | if (kls->Depth() > BitString::kCapacity) { |
| 793 | EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state); |
| 794 | } |
| 795 | } |
| 796 | }; |
| 797 | |
| 798 | // Assign every level 0-5. |
| 799 | // We cannot assign 4th level, so it will overflow instead. |
| 800 | // In addition, level 5th cannot be assigned (parent is overflowed), so it will also fail. |
| 801 | EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected); |
| 802 | } |
| 803 | |
| 804 | constexpr size_t MaxWidthCutOff(size_t depth) { |
| 805 | if (depth == 0) { |
| 806 | return 1; |
| 807 | } |
| 808 | if (depth > BitString::kCapacity) { |
| 809 | return std::numeric_limits<size_t>::max(); |
| 810 | } |
| 811 | return MaxInt<size_t>(BitString::kBitSizeAtPosition[depth - 1]); |
| 812 | } |
| 813 | |
| 814 | // Either itself is too wide, or any of the parents were too wide. |
| 815 | bool IsTooWide(MockClass* kls) { |
| 816 | if (kls == nullptr || kls->Depth() == 0u) { |
| 817 | // Root is never too wide. |
| 818 | return false; |
| 819 | } else { |
| 820 | if (kls->GetX() >= MaxWidthCutOff(kls->Depth())) { |
| 821 | return true; |
| 822 | } |
| 823 | } |
| 824 | return IsTooWide(kls->GetParent()); |
Igor Murashkin | 2ffb703 | 2017-11-08 13:35:21 -0800 | [diff] [blame] | 825 | } |
Igor Murashkin | 495e783 | 2017-10-27 10:56:20 -0700 | [diff] [blame] | 826 | |
| 827 | // Either itself is too deep, or any of the parents were too deep. |
| 828 | bool IsTooDeep(MockClass* kls) { |
| 829 | if (kls == nullptr || kls->Depth() == 0u) { |
| 830 | // Root is never too deep. |
| 831 | return false; |
| 832 | } else { |
| 833 | if (kls->Depth() > BitString::kCapacity) { |
| 834 | return true; |
| 835 | } |
| 836 | } |
| 837 | return false; |
Igor Murashkin | 2ffb703 | 2017-11-08 13:35:21 -0800 | [diff] [blame] | 838 | } |
Igor Murashkin | 495e783 | 2017-10-27 10:56:20 -0700 | [diff] [blame] | 839 | |
| 840 | TEST_F(SubtypeCheckTest, EnsureInitialized_TooWide) { |
| 841 | auto transitions = [](MockClass* kls) { |
| 842 | UNUSED(kls); |
| 843 | return MockSubtypeOfTransition::kAssigned; |
| 844 | }; |
| 845 | |
| 846 | // Pick the 2nd level because has the most narrow # of bits. |
| 847 | constexpr size_t kTargetDepth = 2; |
| 848 | constexpr size_t kMaxWidthCutOff = MaxWidthCutOff(kTargetDepth); |
| 849 | |
| 850 | constexpr size_t kMaxDepthForThisTest = std::numeric_limits<size_t>::max(); |
| 851 | auto expected = [=](MockClass* kls, |
| 852 | int expect_when, |
| 853 | SubtypeCheckInfo::State actual_state, |
| 854 | MockSubtypeOfTransition transition) { |
| 855 | UNUSED(transition); |
| 856 | // Note: purposefuly ignore the too-deep children in the premade tree. |
| 857 | if (expect_when == kAfterTransition && kls->Depth() <= BitString::kCapacity) { |
| 858 | if (IsTooWide(kls)) { |
| 859 | EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state); |
| 860 | } else { |
| 861 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state); |
| 862 | } |
| 863 | } |
| 864 | }; |
| 865 | |
| 866 | { |
| 867 | // Create too-wide siblings at the kTargetDepth level. |
| 868 | MockClass* child = root_->FindChildAt(/*x*/0, kTargetDepth - 1u); |
| 869 | CreateTreeFor(child, kMaxWidthCutOff*2, /*depth*/1); |
| 870 | ASSERT_LE(kMaxWidthCutOff*2, child->GetNumberOfChildren()); |
| 871 | ASSERT_TRUE(IsTooWide(child->GetMaxChild())) << *(child->GetMaxChild()); |
| 872 | // Leave the rest of the tree as the default. |
| 873 | } |
| 874 | |
| 875 | // Try to assign every level |
| 876 | // It will fail once it gets to the "too wide" siblings and cause overflows. |
| 877 | EnsureStateChangedTestRecursiveGeneric(root_, |
| 878 | 0u, |
| 879 | kMaxDepthForThisTest, |
| 880 | transitions, |
| 881 | expected); |
| 882 | } |
| 883 | |
| 884 | TEST_F(SubtypeCheckTest, EnsureInitialized_TooWide_TooWide) { |
| 885 | auto transitions = [](MockClass* kls) { |
| 886 | UNUSED(kls); |
| 887 | return MockSubtypeOfTransition::kAssigned; |
| 888 | }; |
| 889 | |
| 890 | // Pick the 2nd level because has the most narrow # of bits. |
| 891 | constexpr size_t kTargetDepth = 2; |
| 892 | constexpr size_t kMaxWidthCutOff = MaxWidthCutOff(kTargetDepth); |
| 893 | constexpr size_t kMaxWidthCutOffSub = MaxWidthCutOff(kTargetDepth+1u); |
| 894 | |
| 895 | constexpr size_t kMaxDepthForThisTest = std::numeric_limits<size_t>::max(); |
| 896 | auto expected = [=](MockClass* kls, |
| 897 | int expect_when, |
| 898 | SubtypeCheckInfo::State actual_state, |
| 899 | MockSubtypeOfTransition transition) { |
| 900 | UNUSED(transition); |
| 901 | // Note: purposefuly ignore the too-deep children in the premade tree. |
| 902 | if (expect_when == kAfterTransition && kls->Depth() <= BitString::kCapacity) { |
| 903 | if (IsTooWide(kls)) { |
| 904 | EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state); |
| 905 | } else { |
| 906 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state); |
| 907 | } |
| 908 | } |
| 909 | }; |
| 910 | |
| 911 | { |
| 912 | // Create too-wide siblings at the kTargetDepth level. |
| 913 | MockClass* child = root_->FindChildAt(/*x*/0, kTargetDepth - 1); |
| 914 | CreateTreeFor(child, kMaxWidthCutOff*2, /*depth*/1); |
| 915 | ASSERT_LE(kMaxWidthCutOff*2, child->GetNumberOfChildren()) << *child; |
| 916 | ASSERT_TRUE(IsTooWide(child->GetMaxChild())) << *(child->GetMaxChild()); |
| 917 | // Leave the rest of the tree as the default. |
| 918 | |
| 919 | // Create too-wide children for a too-wide parent. |
| 920 | MockClass* child_subchild = child->FindChildAt(/*x*/0, kTargetDepth); |
| 921 | CreateTreeFor(child_subchild, kMaxWidthCutOffSub*2, /*depth*/1); |
| 922 | ASSERT_LE(kMaxWidthCutOffSub*2, child_subchild->GetNumberOfChildren()) << *child_subchild; |
| 923 | ASSERT_TRUE(IsTooWide(child_subchild->GetMaxChild())) << *(child_subchild->GetMaxChild()); |
| 924 | } |
| 925 | |
| 926 | // Try to assign every level |
| 927 | // It will fail once it gets to the "too wide" siblings and cause overflows. |
| 928 | // Furthermore, assigning any subtree whose ancestor is too wide will also fail. |
| 929 | EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected); |
| 930 | } |
| 931 | |
| 932 | void EnsureSubtypeOfCorrect(MockClass* a, MockClass* b) { |
| 933 | MockScopedLockSubtypeCheck lock_a; |
| 934 | MockScopedLockMutator lock_b; |
| 935 | using SCTree = MockSubtypeCheck; |
| 936 | |
| 937 | auto IsAssigned = [](SCTree& tree) { |
| 938 | MockScopedLockSubtypeCheck lock_a; |
| 939 | MockScopedLockMutator lock_b; |
| 940 | // This assumes that MockClass is always called with EnsureAssigned. |
| 941 | EXPECT_NE(SubtypeCheckInfo::kInitialized, tree.GetState()); |
| 942 | EXPECT_NE(SubtypeCheckInfo::kUninitialized, tree.GetState()); |
| 943 | // Use our own test checks, so we are actually testing different logic than the impl. |
| 944 | return !(IsTooDeep(&tree.GetClass()) || IsTooWide(&tree.GetClass())); |
| 945 | }; |
| 946 | |
| 947 | SCTree src_tree = SCTree::Lookup(a); |
| 948 | SCTree target_tree = SCTree::Lookup(b); |
| 949 | |
| 950 | SCOPED_TRACE("class A"); |
| 951 | SCOPED_TRACE(*a); |
| 952 | SCOPED_TRACE("class B"); |
| 953 | SCOPED_TRACE(*b); |
| 954 | |
| 955 | SubtypeCheckInfo::Result slow_result = |
| 956 | a->SlowIsSubtypeOf(b) ? SubtypeCheckInfo::kSubtypeOf : SubtypeCheckInfo::kNotSubtypeOf; |
| 957 | SubtypeCheckInfo::Result fast_result = src_tree.IsSubtypeOf(target_tree); |
| 958 | |
| 959 | // Target must be Assigned for this check to succeed. |
| 960 | // Source is either Overflowed | Assigned (in this case). |
| 961 | |
| 962 | if (IsAssigned(src_tree) && IsAssigned(target_tree)) { |
| 963 | ASSERT_EQ(slow_result, fast_result); |
| 964 | } else if (IsAssigned(src_tree)) { |
| 965 | // A is assigned. B is >= initialized. |
| 966 | ASSERT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, fast_result); |
| 967 | } else if (IsAssigned(target_tree)) { |
| 968 | // B is assigned. A is >= initialized. |
| 969 | ASSERT_EQ(slow_result, fast_result); |
| 970 | } else { |
| 971 | // Neither A,B are assigned. |
| 972 | ASSERT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, fast_result); |
| 973 | } |
| 974 | |
| 975 | // Use asserts, not expects to immediately fail. |
| 976 | // Otherwise the entire tree (very large) could potentially be broken. |
| 977 | } |
| 978 | |
| 979 | void EnsureSubtypeOfRecursive(MockClass* kls_root) { |
| 980 | MockScopedLockMutator mutator_lock_fake_; |
| 981 | |
| 982 | auto visit_func = [&](MockClass* kls) { |
| 983 | kls->Visit([&](MockClass* inner_class) { |
| 984 | EnsureSubtypeOfCorrect(kls, inner_class); |
| 985 | EnsureSubtypeOfCorrect(inner_class, kls); |
| 986 | |
| 987 | if (::testing::Test::HasFatalFailure()) { |
| 988 | return false; |
| 989 | } |
| 990 | |
| 991 | return true; // Keep visiting. |
| 992 | }); |
| 993 | |
| 994 | if (::testing::Test::HasFatalFailure()) { |
| 995 | return false; |
| 996 | } |
| 997 | |
| 998 | return true; // Keep visiting. |
| 999 | }; |
| 1000 | |
| 1001 | ASSERT_NO_FATAL_FAILURE(kls_root->Visit(visit_func)); |
| 1002 | } |
| 1003 | |
| 1004 | TEST_F(SubtypeCheckTest, EnsureInitialized_TooWide_TooDeep) { |
| 1005 | auto transitions = [](MockClass* kls) { |
| 1006 | UNUSED(kls); |
| 1007 | return MockSubtypeOfTransition::kAssigned; |
| 1008 | }; |
| 1009 | |
| 1010 | // Pick the 2nd level because has the most narrow # of bits. |
| 1011 | constexpr size_t kTargetDepth = 2; |
| 1012 | constexpr size_t kTooDeepTargetDepth = BitString::kCapacity + 1; |
| 1013 | constexpr size_t kMaxWidthCutOff = MaxWidthCutOff(kTargetDepth); |
| 1014 | |
| 1015 | constexpr size_t kMaxDepthForThisTest = std::numeric_limits<size_t>::max(); |
| 1016 | auto expected = [=](MockClass* kls, |
| 1017 | int expect_when, |
| 1018 | SubtypeCheckInfo::State actual_state, |
| 1019 | MockSubtypeOfTransition transition) { |
| 1020 | UNUSED(transition); |
| 1021 | if (expect_when == kAfterTransition) { |
| 1022 | if (IsTooDeep(kls)) { |
| 1023 | EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state); |
| 1024 | } else if (IsTooWide(kls)) { |
| 1025 | EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state); |
| 1026 | } else { |
| 1027 | EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state); |
| 1028 | } |
| 1029 | } |
| 1030 | }; |
| 1031 | |
| 1032 | { |
| 1033 | // Create too-wide siblings at the kTargetDepth level. |
| 1034 | MockClass* child = root_->FindChildAt(/*x*/0, kTargetDepth - 1u); |
| 1035 | CreateTreeFor(child, kMaxWidthCutOff*2, /*depth*/1); |
| 1036 | ASSERT_LE(kMaxWidthCutOff*2, child->GetNumberOfChildren()); |
| 1037 | ASSERT_TRUE(IsTooWide(child->GetMaxChild())) << *(child->GetMaxChild()); |
| 1038 | // Leave the rest of the tree as the default. |
| 1039 | |
| 1040 | // Create too-deep children for a too-wide parent. |
| 1041 | MockClass* child_subchild = child->GetMaxChild(); |
| 1042 | ASSERT_TRUE(child_subchild != nullptr); |
| 1043 | ASSERT_EQ(0u, child_subchild->GetNumberOfChildren()) << *child_subchild; |
| 1044 | CreateTreeFor(child_subchild, /*width*/1, /*levels*/kTooDeepTargetDepth); |
| 1045 | MockClass* too_deep_child = child_subchild->FindChildAt(0, kTooDeepTargetDepth + 2); |
| 1046 | ASSERT_TRUE(too_deep_child != nullptr) << child_subchild->ToDotGraph(); |
| 1047 | ASSERT_TRUE(IsTooWide(too_deep_child)) << *(too_deep_child); |
| 1048 | ASSERT_TRUE(IsTooDeep(too_deep_child)) << *(too_deep_child); |
| 1049 | } |
| 1050 | |
| 1051 | // Try to assign every level |
| 1052 | // It will fail once it gets to the "too wide" siblings and cause overflows. |
| 1053 | EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected); |
| 1054 | |
| 1055 | // Check every class against every class for "x instanceof y". |
| 1056 | EnsureSubtypeOfRecursive(root_); |
| 1057 | } |
| 1058 | |
| 1059 | // TODO: add dcheck for child-parent invariants (e.g. child < parent.next) and death tests |
Vladimir Marko | 38b8b25 | 2018-01-02 19:07:06 +0000 | [diff] [blame] | 1060 | |
| 1061 | } // namespace art |