blob: 719e5d917c5eeb81c8d3578533defd04c1311fff [file] [log] [blame]
Igor Murashkin495e7832017-10-27 10:56:20 -07001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "subtype_check.h"
18
19#include "gtest/gtest.h"
20#include "android-base/logging.h"
21
22namespace art {
23
24constexpr size_t BitString::kBitSizeAtPosition[BitString::kCapacity];
25constexpr size_t BitString::kCapacity;
26
Igor Murashkin495e7832017-10-27 10:56:20 -070027struct MockClass {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080028 explicit MockClass(MockClass* parent, size_t x = 0, size_t y = 0) {
Igor Murashkin495e7832017-10-27 10:56:20 -070029 parent_ = parent;
30 memset(&subtype_check_info_and_status_, 0u, sizeof(subtype_check_info_and_status_));
31
32 // Start the numbering at '1' to match the bitstring numbering.
33 // A bitstring numbering never starts at '0' which just means 'no value'.
34 x_ = 1;
35 if (parent_ != nullptr) {
36 if (parent_->GetMaxChild() != nullptr) {
37 x_ = parent_->GetMaxChild()->x_ + 1u;
38 }
39
40 parent_->children_.push_back(this);
41 if (parent_->path_to_root_ != "") {
42 path_to_root_ = parent->path_to_root_ + ",";
43 }
44 path_to_root_ += std::to_string(x_);
45 } else {
46 path_to_root_ = ""; // root has no path.
47 }
48 y_ = y;
49 UNUSED(x);
50 }
51
52 MockClass() : MockClass(nullptr) {
53 }
54
55 ///////////////////////////////////////////////////////////////
56 // Implementation of the SubtypeCheck::KlassT static interface.
57 ///////////////////////////////////////////////////////////////
58
59 MockClass* GetSuperClass() const {
60 return parent_;
61 }
62
63 bool HasSuperClass() const {
64 return GetSuperClass() != nullptr;
65 }
66
67 size_t Depth() const {
68 if (parent_ == nullptr) {
69 return 0u;
70 } else {
71 return parent_->Depth() + 1u;
72 }
73 }
74
75 std::string PrettyClass() const
76 REQUIRES_SHARED(Locks::mutator_lock_) {
77 return path_to_root_;
78 }
79
80 int32_t GetField32Volatile(art::MemberOffset offset = art::MemberOffset(0u)) const
81 REQUIRES_SHARED(Locks::mutator_lock_) {
82 UNUSED(offset);
83 int32_t field_32 = 0;
84 memcpy(&field_32, &subtype_check_info_and_status_, sizeof(int32_t));
85 return field_32;
86 }
87
88 template <bool kTransactionActive>
Mathieu Chartier42c2e502018-06-19 12:30:56 -070089 bool CasField32(art::MemberOffset offset,
90 int32_t old_value,
91 int32_t new_value,
92 CASMode mode ATTRIBUTE_UNUSED,
93 std::memory_order memory_order ATTRIBUTE_UNUSED)
Igor Murashkin495e7832017-10-27 10:56:20 -070094 REQUIRES_SHARED(Locks::mutator_lock_) {
95 UNUSED(offset);
96 if (old_value == GetField32Volatile(offset)) {
97 memcpy(&subtype_check_info_and_status_, &new_value, sizeof(int32_t));
98 return true;
99 }
100 return false;
101 }
102
103 MemberOffset StatusOffset() const {
104 return MemberOffset(0); // Doesn't matter. We ignore offset.
105 }
106
107 ///////////////////////////////////////////////////////////////
108 // Convenience functions to make the testing easier
109 ///////////////////////////////////////////////////////////////
110
111 size_t GetNumberOfChildren() const {
112 return children_.size();
113 }
114
115 MockClass* GetParent() const {
116 return parent_;
117 }
118
119 MockClass* GetMaxChild() const {
120 if (GetNumberOfChildren() > 0u) {
121 return GetChild(GetNumberOfChildren() - 1);
122 }
123 return nullptr;
124 }
125
126 MockClass* GetChild(size_t idx) const {
127 if (idx >= GetNumberOfChildren()) {
128 return nullptr;
129 }
130 return children_[idx];
131 }
132
133 // Traverse the sibling at "X" at each level.
134 // Once we get to level==depth, return yourself.
135 MockClass* FindChildAt(size_t x, size_t depth) {
136 if (Depth() == depth) {
137 return this;
138 } else if (GetNumberOfChildren() > 0) {
139 return GetChild(x)->FindChildAt(x, depth);
140 }
141 return nullptr;
142 }
143
144 template <typename T>
145 MockClass* Visit(T visitor, bool recursive = true) {
146 if (!visitor(this)) {
147 return this;
148 }
149
150 if (!recursive) {
151 return this;
152 }
153
154 for (MockClass* child : children_) {
155 MockClass* visit_res = child->Visit(visitor);
156 if (visit_res != nullptr) {
157 return visit_res;
158 }
159 }
160
161 return nullptr;
162 }
163
164 size_t GetX() const {
165 return x_;
166 }
167
168 bool SlowIsSubtypeOf(const MockClass* target) const {
169 DCHECK(target != nullptr);
170 const MockClass* kls = this;
171 while (kls != nullptr) {
172 if (kls == target) {
173 return true;
174 }
175 kls = kls->GetSuperClass();
176 }
177
178 return false;
179 }
180
181 std::string ToDotGraph() const {
182 std::stringstream ss;
183 ss << std::endl;
184 ss << "digraph MockClass {" << std::endl;
185 ss << " node [fontname=\"Arial\"];" << std::endl;
186 ToDotGraphImpl(ss);
187 ss << "}" << std::endl;
188 return ss.str();
189 }
190
191 void ToDotGraphImpl(std::ostream& os) const {
192 for (MockClass* child : children_) {
193 os << " '" << path_to_root_ << "' -> '" << child->path_to_root_ << "';" << std::endl;
194 child->ToDotGraphImpl(os);
195 }
196 }
197
198 std::vector<MockClass*> children_;
199 MockClass* parent_;
200 SubtypeCheckBitsAndStatus subtype_check_info_and_status_;
201 size_t x_;
202 size_t y_;
203 std::string path_to_root_;
204};
205
206std::ostream& operator<<(std::ostream& os, const MockClass& kls) {
207 SubtypeCheckBits iod = kls.subtype_check_info_and_status_.subtype_check_info_;
208 os << "MClass{D:" << kls.Depth() << ",W:" << kls.x_
209 << ", OF:"
210 << (iod.overflow_ ? "true" : "false")
211 << ", bitstring: " << iod.bitstring_
212 << ", mock_path: " << kls.path_to_root_
213 << "}";
214 return os;
215}
216
217struct MockSubtypeCheck {
218 using SC = SubtypeCheck<MockClass*>;
219
220 static MockSubtypeCheck Lookup(MockClass* klass) {
221 MockSubtypeCheck mock;
222 mock.klass_ = klass;
223 return mock;
224 }
225
226 // Convenience functions to avoid using statics everywhere.
227 // static(class, args...) -> instance.method(args...)
228 SubtypeCheckInfo::State EnsureInitialized()
229 REQUIRES(Locks::subtype_check_lock_)
230 REQUIRES_SHARED(Locks::mutator_lock_) {
231 return SC::EnsureInitialized(klass_);
232 }
233
234 SubtypeCheckInfo::State EnsureAssigned()
235 REQUIRES(Locks::subtype_check_lock_)
236 REQUIRES_SHARED(Locks::mutator_lock_) {
237 return SC::EnsureAssigned(klass_);
238 }
239
240 SubtypeCheckInfo::State ForceUninitialize()
241 REQUIRES(Locks::subtype_check_lock_)
242 REQUIRES_SHARED(Locks::mutator_lock_) {
243 return SC::ForceUninitialize(klass_);
244 }
245
246 BitString::StorageType GetEncodedPathToRootForSource() const
247 REQUIRES(Locks::subtype_check_lock_)
248 REQUIRES_SHARED(Locks::mutator_lock_) {
249 return SC::GetEncodedPathToRootForSource(klass_);
250 }
251
252 BitString::StorageType GetEncodedPathToRootForTarget() const
253 REQUIRES(Locks::subtype_check_lock_)
254 REQUIRES_SHARED(Locks::mutator_lock_) {
255 return SC::GetEncodedPathToRootForTarget(klass_);
256 }
257
258 BitString::StorageType GetEncodedPathToRootMask() const
259 REQUIRES(Locks::subtype_check_lock_)
260 REQUIRES_SHARED(Locks::mutator_lock_) {
261 return SC::GetEncodedPathToRootMask(klass_);
262 }
263
264 SubtypeCheckInfo::Result IsSubtypeOf(const MockSubtypeCheck& target)
265 REQUIRES_SHARED(Locks::mutator_lock_) {
266 return SC::IsSubtypeOf(klass_, target.klass_);
267 }
268
269 friend std::ostream& operator<<(std::ostream& os, const MockSubtypeCheck& tree)
270 NO_THREAD_SAFETY_ANALYSIS {
271 os << "(MockSubtypeCheck io:";
272 SC::Dump(tree.klass_, os);
273 os << ", class: " << tree.klass_->PrettyClass() << ")";
274 return os;
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800275 }
Igor Murashkin495e7832017-10-27 10:56:20 -0700276
277 // Additional convenience functions.
278 SubtypeCheckInfo::State GetState() const
279 REQUIRES(Locks::subtype_check_lock_)
280 REQUIRES_SHARED(Locks::mutator_lock_) {
281 return SC::GetSubtypeCheckInfo(klass_).GetState();
282 }
283
284 MockClass& GetClass() const {
285 return *klass_;
286 }
287
288 private:
289 MockClass* klass_;
290};
291
292struct MockScopedLockSubtypeCheck {
293 MockScopedLockSubtypeCheck() ACQUIRE(*Locks::subtype_check_lock_) {}
294 ~MockScopedLockSubtypeCheck() RELEASE(*Locks::subtype_check_lock_) {}
295};
296
297struct MockScopedLockMutator {
298 MockScopedLockMutator() ACQUIRE_SHARED(*Locks::mutator_lock_) {}
299 ~MockScopedLockMutator() RELEASE_SHARED(*Locks::mutator_lock_) {}
300};
301
302struct SubtypeCheckTest : public ::testing::Test {
303 protected:
Andreas Gampefa6a1b02018-09-07 08:11:55 -0700304 void SetUp() override {
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700305 android::base::InitLogging(/*argv=*/nullptr);
Igor Murashkin495e7832017-10-27 10:56:20 -0700306
307 CreateRootedTree(BitString::kCapacity + 2u, BitString::kCapacity + 2u);
308 }
309
Andreas Gampefa6a1b02018-09-07 08:11:55 -0700310 void TearDown() override {
Igor Murashkin495e7832017-10-27 10:56:20 -0700311 }
312
313 void CreateRootedTree(size_t width, size_t height) {
314 all_classes_.clear();
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700315 root_ = CreateClassFor(/*parent=*/nullptr, /*x=*/0, /*y=*/0);
316 CreateTreeFor(root_, /*width=*/width, /*levels=*/height);
Igor Murashkin495e7832017-10-27 10:56:20 -0700317 }
318
319 MockClass* CreateClassFor(MockClass* parent, size_t x, size_t y) {
320 MockClass* kls = new MockClass(parent, x, y);
321 all_classes_.push_back(std::unique_ptr<MockClass>(kls));
322 return kls;
323 }
324
325 void CreateTreeFor(MockClass* parent, size_t width, size_t levels) {
326 DCHECK(parent != nullptr);
327 if (levels == 0) {
328 return;
329 }
330
331 for (size_t i = 0; i < width; ++i) {
332 MockClass* child = CreateClassFor(parent, i, parent->y_ + 1);
333 CreateTreeFor(child, width, levels - 1);
334 }
335 }
336
337 MockClass* root_ = nullptr;
338 std::vector<std::unique_ptr<MockClass>> all_classes_;
339};
340
341TEST_F(SubtypeCheckTest, LookupAllChildren) {
342 MockScopedLockSubtypeCheck lock_a;
343 MockScopedLockMutator lock_b;
344 using SCTree = MockSubtypeCheck;
345
346 root_->Visit([&](MockClass* kls) {
347 MockScopedLockSubtypeCheck lock_a;
348 MockScopedLockMutator lock_b;
349
350 EXPECT_EQ(SubtypeCheckInfo::kUninitialized, SCTree::Lookup(kls).GetState());
351 return true; // Keep visiting.
352 });
353}
354
355TEST_F(SubtypeCheckTest, LookupRoot) {
356 MockScopedLockSubtypeCheck lock_a;
357 MockScopedLockMutator lock_b;
358 using SCTree = MockSubtypeCheck;
359
360 SCTree root = SCTree::Lookup(root_);
361 EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized());
362 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, root.IsSubtypeOf(root)) << root;
363}
364
365TEST_F(SubtypeCheckTest, EnsureInitializedFirstLevel) {
366 MockScopedLockSubtypeCheck lock_a;
367 MockScopedLockMutator lock_b;
368 using SCTree = MockSubtypeCheck;
369
370 SCTree root = SCTree::Lookup(root_);
371 EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized());
372
373 ASSERT_LT(0u, root_->GetNumberOfChildren());
374
375 // Initialize root's children only.
376 for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) {
377 MockClass* child = root_->GetChild(i);
378 SCTree child_tree = SCTree::Lookup(child);
379 // Before: all unknown.
380 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree;
381 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree;
382 // Transition.
383 EXPECT_EQ(SubtypeCheckInfo::kInitialized, child_tree.EnsureInitialized());
384 // After: "src instanceof target" known, but "target instanceof src" unknown.
385 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree;
386 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree;
387 }
388}
389
390TEST_F(SubtypeCheckTest, EnsureAssignedFirstLevel) {
391 MockScopedLockSubtypeCheck lock_a;
392 MockScopedLockMutator lock_b;
393 using SCTree = MockSubtypeCheck;
394
395 SCTree root = SCTree::Lookup(root_);
396 EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized());
397
398 ASSERT_LT(0u, root_->GetNumberOfChildren());
399
400 // Initialize root's children only.
401 for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) {
402 MockClass* child = root_->GetChild(i);
403 SCTree child_tree = SCTree::Lookup(child);
404 // Before: all unknown.
405 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree;
406 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree;
407 // Transition.
408 EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.EnsureAssigned());
409 // After: "src instanceof target" known, and "target instanceof src" known.
410 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child_tree.IsSubtypeOf(root)) << child_tree;
411 EXPECT_EQ(SubtypeCheckInfo::kNotSubtypeOf, root.IsSubtypeOf(child_tree)) << child_tree;
412 }
413}
414
415TEST_F(SubtypeCheckTest, EnsureInitializedSecondLevelWithPreassign) {
416 MockScopedLockSubtypeCheck lock_a;
417 MockScopedLockMutator lock_b;
418 using SCTree = MockSubtypeCheck;
419
420 SCTree root = SCTree::Lookup(root_);
421 EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized());
422
423 ASSERT_LT(0u, root_->GetNumberOfChildren());
424
425 // Initialize root's children.
426 for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) {
427 MockClass* child = root_->GetChild(i);
428 SCTree child_tree = SCTree::Lookup(child);
429
430 ASSERT_EQ(1u, child->Depth());
431
432 EXPECT_EQ(SubtypeCheckInfo::kInitialized, child_tree.EnsureInitialized()) << *child;
433 EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.EnsureAssigned())
434 << *child << ", root:" << *root_;
435 for (size_t j = 0; j < child->GetNumberOfChildren(); ++j) {
436 MockClass* child2 = child->GetChild(j);
437 ASSERT_EQ(2u, child2->Depth());
438 SCTree child2_tree = SCTree::Lookup(child2);
439
440 // Before: all unknown.
441 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree;
442 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree))
443 << child2_tree;
444 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(root))
445 << child2_tree;
446 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(child_tree))
447 << child2_tree;
448
449 EXPECT_EQ(SubtypeCheckInfo::kUninitialized, child2_tree.GetState()) << *child2;
450 EXPECT_EQ(SubtypeCheckInfo::kInitialized, child2_tree.EnsureInitialized()) << *child2;
451
452 // After: src=child2_tree is known, otherwise unknown.
453 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree;
454 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree))
455 << child2_tree;
456 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(root)) << child2_tree;
457 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(child_tree)) << child2_tree;
458 }
459
460 // The child is "assigned" as a side-effect of initializing sub-children.
461 EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.GetState());
462 }
463}
464
465TEST_F(SubtypeCheckTest, EnsureInitializedSecondLevelDontPreassign) {
466 MockScopedLockSubtypeCheck lock_a;
467 MockScopedLockMutator lock_b;
468 using SCTree = MockSubtypeCheck;
469
470 SCTree root = SCTree::Lookup(root_);
471 EXPECT_EQ(SubtypeCheckInfo::kAssigned, root.EnsureInitialized());
472
473 ASSERT_LT(0u, root_->GetNumberOfChildren());
474
475 // Initialize root's children only.
476 for (size_t i = 0; i < root_->GetNumberOfChildren(); ++i) {
477 MockClass* child = root_->GetChild(i);
478 SCTree child_tree = SCTree::Lookup(child);
479
480 ASSERT_EQ(1u, child->Depth());
481
482 for (size_t j = 0; j < child->GetNumberOfChildren(); ++j) {
483 MockClass* child2 = child->GetChild(j);
484 ASSERT_EQ(2u, child2->Depth());
485 SCTree child2_tree = SCTree::Lookup(child2);
486 // Before: all unknown.
487 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree;
488 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree))
489 << child2_tree;
490 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(root)) << child2_tree;
491 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child2_tree.IsSubtypeOf(child_tree))
492 << child2_tree;
493 // Transition.
494 EXPECT_EQ(SubtypeCheckInfo::kUninitialized, child2_tree.GetState()) << *child2;
495 EXPECT_EQ(SubtypeCheckInfo::kInitialized, child2_tree.EnsureInitialized()) << *child2;
496 // After: src=child2_tree is known, otherwise unknown.
497 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, root.IsSubtypeOf(child2_tree)) << child2_tree;
498 EXPECT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, child_tree.IsSubtypeOf(child2_tree))
499 << child2_tree;
500 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(root)) << child2_tree;
501 EXPECT_EQ(SubtypeCheckInfo::kSubtypeOf, child2_tree.IsSubtypeOf(child_tree)) << child2_tree;
502 }
503
504 // The child is "assigned" as a side-effect of initializing sub-children.
505 EXPECT_EQ(SubtypeCheckInfo::kAssigned, child_tree.GetState());
506 }
507}
508
509void ApplyTransition(MockSubtypeCheck sc_tree,
510 SubtypeCheckInfo::State transition,
511 SubtypeCheckInfo::State expected) {
512 MockScopedLockSubtypeCheck lock_a;
513 MockScopedLockMutator lock_b;
514
515 EXPECT_EQ(SubtypeCheckInfo::kUninitialized, sc_tree.GetState()) << sc_tree.GetClass();
516
517 if (transition == SubtypeCheckInfo::kUninitialized) {
518 EXPECT_EQ(expected, sc_tree.ForceUninitialize()) << sc_tree.GetClass();
519 } else if (transition == SubtypeCheckInfo::kInitialized) {
520 EXPECT_EQ(expected, sc_tree.EnsureInitialized()) << sc_tree.GetClass();
521 } else if (transition == SubtypeCheckInfo::kAssigned) {
522 EXPECT_EQ(expected, sc_tree.EnsureAssigned()) << sc_tree.GetClass();
523 }
524}
525
526enum MockSubtypeOfTransition {
527 kNone,
528 kUninitialized,
529 kInitialized,
530 kAssigned,
531};
532
533std::ostream& operator<<(std::ostream& os, const MockSubtypeOfTransition& transition) {
534 if (transition == MockSubtypeOfTransition::kUninitialized) {
535 os << "kUninitialized";
536 } else if (transition == MockSubtypeOfTransition::kInitialized) {
537 os << "kInitialized";
538 } else if (transition == MockSubtypeOfTransition::kAssigned) {
539 os << "kAssigned";
540 } else {
541 os << "kNone";
542 }
543 return os;
544}
545
546SubtypeCheckInfo::State ApplyTransition(MockSubtypeCheck sc_tree,
547 MockSubtypeOfTransition transition) {
548 MockScopedLockSubtypeCheck lock_a;
549 MockScopedLockMutator lock_b;
550
551 if (transition == MockSubtypeOfTransition::kUninitialized) {
552 return sc_tree.ForceUninitialize();
553 } else if (transition == MockSubtypeOfTransition::kInitialized) {
554 return sc_tree.EnsureInitialized();
555 } else if (transition == MockSubtypeOfTransition::kAssigned) {
556 return sc_tree.EnsureAssigned();
557 }
558
559 return sc_tree.GetState();
560}
561
562enum {
563 kBeforeTransition = 0,
564 kAfterTransition = 1,
565 kAfterChildren = 2,
566};
567
568const char* StringifyTransition(int x) {
569 if (x == kBeforeTransition) {
570 return "kBeforeTransition";
571 } else if (x == kAfterTransition) {
572 return "kAfterTransition";
573 } else if (x == kAfterChildren) {
574 return "kAfterChildren";
575 }
576
577 return "<<Unknown>>";
578}
579
580struct TransitionHistory {
581 void Record(int transition_label, MockClass* kls) {
582 ss_ << "<<<" << StringifyTransition(transition_label) << ">>>";
583 ss_ << "{Self}: " << *kls;
584
585 if (kls->HasSuperClass()) {
586 ss_ << "{Parent}: " << *(kls->GetSuperClass());
587 }
588 ss_ << "================== ";
589 }
590
591 friend std::ostream& operator<<(std::ostream& os, const TransitionHistory& t) {
592 os << t.ss_.str();
593 return os;
594 }
595
596 std::stringstream ss_;
597};
598
599template <typename T, typename T2>
600void EnsureStateChangedTestRecursiveGeneric(MockClass* klass,
601 size_t cur_depth,
602 size_t total_depth,
603 T2 transition_func,
604 T expect_checks) {
605 MockScopedLockSubtypeCheck lock_a;
606 MockScopedLockMutator lock_b;
607 using SCTree = MockSubtypeCheck;
608
609 SCTree sc_tree = SCTree::Lookup(klass);
610 MockSubtypeOfTransition requested_transition = transition_func(klass);
611
612 // FIXME: need to print before(self, parent) and after(self, parent)
613 // to make any sense of what's going on.
614
615 auto do_expect_checks = [&](int transition_label, TransitionHistory& transition_details) {
616 MockScopedLockSubtypeCheck lock_a;
617 MockScopedLockMutator lock_b;
618
619 transition_details.Record(transition_label, klass);
620
621 SCOPED_TRACE(transition_details);
622 ASSERT_EQ(cur_depth, klass->Depth());
623
624 ASSERT_NO_FATAL_FAILURE(expect_checks(klass,
625 transition_label,
626 sc_tree.GetState(),
627 requested_transition));
628 };
629
630 TransitionHistory transition_history;
631 do_expect_checks(kBeforeTransition, transition_history);
632 SubtypeCheckInfo::State state = ApplyTransition(sc_tree, requested_transition);
633 UNUSED(state);
634 do_expect_checks(kAfterTransition, transition_history);
635
636 if (total_depth == cur_depth) {
637 return;
638 }
639
640 // Initialize root's children only.
641 for (size_t i = 0; i < klass->GetNumberOfChildren(); ++i) {
642 MockClass* child = klass->GetChild(i);
643 EnsureStateChangedTestRecursiveGeneric(child,
644 cur_depth + 1u,
645 total_depth,
646 transition_func,
647 expect_checks);
648 }
649
650 do_expect_checks(kAfterChildren, transition_history);
651}
652
653void EnsureStateChangedTestRecursive(
654 MockClass* klass,
655 size_t cur_depth,
656 size_t total_depth,
Andreas Gampebc802de2018-06-20 17:24:11 -0700657 const std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>>& transitions) {
Igor Murashkin495e7832017-10-27 10:56:20 -0700658 MockScopedLockSubtypeCheck lock_a;
659 MockScopedLockMutator lock_b;
660 using SCTree = MockSubtypeCheck;
661
662 ASSERT_EQ(cur_depth, klass->Depth());
Andreas Gampebc802de2018-06-20 17:24:11 -0700663 ApplyTransition(SCTree::Lookup(klass),
664 transitions[cur_depth].first,
665 transitions[cur_depth].second);
Igor Murashkin495e7832017-10-27 10:56:20 -0700666
667 if (total_depth == cur_depth + 1) {
668 return;
669 }
670
671 // Initialize root's children only.
672 for (size_t i = 0; i < klass->GetNumberOfChildren(); ++i) {
673 MockClass* child = klass->GetChild(i);
674 EnsureStateChangedTestRecursive(child, cur_depth + 1u, total_depth, transitions);
675 }
676}
677
678void EnsureStateChangedTest(
679 MockClass* root,
680 size_t depth,
Andreas Gampebc802de2018-06-20 17:24:11 -0700681 const std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>>& transitions) {
Igor Murashkin495e7832017-10-27 10:56:20 -0700682 ASSERT_EQ(depth, transitions.size());
683
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700684 EnsureStateChangedTestRecursive(root, /*cur_depth=*/0u, depth, transitions);
Igor Murashkin495e7832017-10-27 10:56:20 -0700685}
686
687TEST_F(SubtypeCheckTest, EnsureInitialized_NoOverflow) {
688 auto transitions = [](MockClass* kls) {
689 UNUSED(kls);
690 return MockSubtypeOfTransition::kInitialized;
691 };
692
693 constexpr size_t kMaxDepthForThisTest = BitString::kCapacity;
694 auto expected = [=](MockClass* kls,
695 int expect_when,
696 SubtypeCheckInfo::State actual_state,
697 MockSubtypeOfTransition transition) {
698 if (expect_when == kBeforeTransition) {
699 EXPECT_EQ(SubtypeCheckInfo::kUninitialized, actual_state);
700 return;
701 }
702
703 if (expect_when == kAfterTransition) {
704 // After explicit transition has been completed.
705 switch (kls->Depth()) {
706 case 0:
707 if (transition >= MockSubtypeOfTransition::kInitialized) {
708 EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state);
709 }
710 break;
711 default:
712 if (transition >= MockSubtypeOfTransition::kInitialized) {
713 if (transition == MockSubtypeOfTransition::kInitialized) {
714 EXPECT_EQ(SubtypeCheckInfo::kInitialized, actual_state);
715 } else if (transition == MockSubtypeOfTransition::kAssigned) {
716 EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state);
717 }
718 }
719 break;
720 }
721 }
722
723 if (expect_when == kAfterChildren) {
724 if (transition >= MockSubtypeOfTransition::kInitialized) {
725 ASSERT_NE(kls->Depth(), kMaxDepthForThisTest);
726 EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state);
727 }
728 }
729 };
730
731 // Initialize every level 0-3.
732 // Intermediate levels become "assigned", max levels become initialized.
733 EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected);
734
735 auto transitions_uninitialize = [](MockClass* kls) {
736 UNUSED(kls);
737 return MockSubtypeOfTransition::kUninitialized;
738 };
739
740 auto expected_uninitialize = [](MockClass* kls,
741 int expect_when,
742 SubtypeCheckInfo::State actual_state,
743 MockSubtypeOfTransition transition) {
744 UNUSED(kls);
745 UNUSED(transition);
746 if (expect_when >= kAfterTransition) {
747 EXPECT_EQ(SubtypeCheckInfo::kUninitialized, actual_state);
748 }
749 };
750
751 // Uninitialize the entire tree after it was assigned.
752 EnsureStateChangedTestRecursiveGeneric(root_,
753 0u,
754 kMaxDepthForThisTest,
755 transitions_uninitialize,
756 expected_uninitialize);
757}
758
759TEST_F(SubtypeCheckTest, EnsureAssigned_TooDeep) {
760 auto transitions = [](MockClass* kls) {
761 UNUSED(kls);
762 return MockSubtypeOfTransition::kAssigned;
763 };
764
765 constexpr size_t kMaxDepthForThisTest = BitString::kCapacity + 1u;
766 auto expected = [=](MockClass* kls,
767 int expect_when,
768 SubtypeCheckInfo::State actual_state,
769 MockSubtypeOfTransition transition) {
770 UNUSED(transition);
771 if (expect_when == kAfterTransition) {
772 if (kls->Depth() > BitString::kCapacity) {
773 EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state);
774 }
775 }
776 };
777
778 // Assign every level 0-4.
779 // We cannot assign 4th level, so it will overflow instead.
780 EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected);
781}
782
783TEST_F(SubtypeCheckTest, EnsureAssigned_TooDeep_OfTooDeep) {
784 auto transitions = [](MockClass* kls) {
785 UNUSED(kls);
786 return MockSubtypeOfTransition::kAssigned;
787 };
788
789 constexpr size_t kMaxDepthForThisTest = BitString::kCapacity + 2u;
790 auto expected = [=](MockClass* kls,
791 int expect_when,
792 SubtypeCheckInfo::State actual_state,
793 MockSubtypeOfTransition transition) {
794 UNUSED(transition);
795 if (expect_when == kAfterTransition) {
796 if (kls->Depth() > BitString::kCapacity) {
797 EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state);
798 }
799 }
800 };
801
802 // Assign every level 0-5.
803 // We cannot assign 4th level, so it will overflow instead.
804 // In addition, level 5th cannot be assigned (parent is overflowed), so it will also fail.
805 EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected);
806}
807
808constexpr size_t MaxWidthCutOff(size_t depth) {
809 if (depth == 0) {
810 return 1;
811 }
812 if (depth > BitString::kCapacity) {
813 return std::numeric_limits<size_t>::max();
814 }
815 return MaxInt<size_t>(BitString::kBitSizeAtPosition[depth - 1]);
816}
817
818// Either itself is too wide, or any of the parents were too wide.
819bool IsTooWide(MockClass* kls) {
820 if (kls == nullptr || kls->Depth() == 0u) {
821 // Root is never too wide.
822 return false;
823 } else {
824 if (kls->GetX() >= MaxWidthCutOff(kls->Depth())) {
825 return true;
826 }
827 }
828 return IsTooWide(kls->GetParent());
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800829}
Igor Murashkin495e7832017-10-27 10:56:20 -0700830
831// Either itself is too deep, or any of the parents were too deep.
832bool IsTooDeep(MockClass* kls) {
833 if (kls == nullptr || kls->Depth() == 0u) {
834 // Root is never too deep.
835 return false;
836 } else {
837 if (kls->Depth() > BitString::kCapacity) {
838 return true;
839 }
840 }
841 return false;
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800842}
Igor Murashkin495e7832017-10-27 10:56:20 -0700843
844TEST_F(SubtypeCheckTest, EnsureInitialized_TooWide) {
845 auto transitions = [](MockClass* kls) {
846 UNUSED(kls);
847 return MockSubtypeOfTransition::kAssigned;
848 };
849
850 // Pick the 2nd level because has the most narrow # of bits.
851 constexpr size_t kTargetDepth = 2;
852 constexpr size_t kMaxWidthCutOff = MaxWidthCutOff(kTargetDepth);
853
854 constexpr size_t kMaxDepthForThisTest = std::numeric_limits<size_t>::max();
855 auto expected = [=](MockClass* kls,
856 int expect_when,
857 SubtypeCheckInfo::State actual_state,
858 MockSubtypeOfTransition transition) {
859 UNUSED(transition);
860 // Note: purposefuly ignore the too-deep children in the premade tree.
861 if (expect_when == kAfterTransition && kls->Depth() <= BitString::kCapacity) {
862 if (IsTooWide(kls)) {
863 EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state);
864 } else {
865 EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state);
866 }
867 }
868 };
869
870 {
871 // Create too-wide siblings at the kTargetDepth level.
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700872 MockClass* child = root_->FindChildAt(/*x=*/0, kTargetDepth - 1u);
873 CreateTreeFor(child, kMaxWidthCutOff*2, /*levels=*/1);
Igor Murashkin495e7832017-10-27 10:56:20 -0700874 ASSERT_LE(kMaxWidthCutOff*2, child->GetNumberOfChildren());
875 ASSERT_TRUE(IsTooWide(child->GetMaxChild())) << *(child->GetMaxChild());
876 // Leave the rest of the tree as the default.
877 }
878
879 // Try to assign every level
880 // It will fail once it gets to the "too wide" siblings and cause overflows.
881 EnsureStateChangedTestRecursiveGeneric(root_,
882 0u,
883 kMaxDepthForThisTest,
884 transitions,
885 expected);
886}
887
888TEST_F(SubtypeCheckTest, EnsureInitialized_TooWide_TooWide) {
889 auto transitions = [](MockClass* kls) {
890 UNUSED(kls);
891 return MockSubtypeOfTransition::kAssigned;
892 };
893
894 // Pick the 2nd level because has the most narrow # of bits.
895 constexpr size_t kTargetDepth = 2;
896 constexpr size_t kMaxWidthCutOff = MaxWidthCutOff(kTargetDepth);
897 constexpr size_t kMaxWidthCutOffSub = MaxWidthCutOff(kTargetDepth+1u);
898
899 constexpr size_t kMaxDepthForThisTest = std::numeric_limits<size_t>::max();
900 auto expected = [=](MockClass* kls,
901 int expect_when,
902 SubtypeCheckInfo::State actual_state,
903 MockSubtypeOfTransition transition) {
904 UNUSED(transition);
905 // Note: purposefuly ignore the too-deep children in the premade tree.
906 if (expect_when == kAfterTransition && kls->Depth() <= BitString::kCapacity) {
907 if (IsTooWide(kls)) {
908 EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state);
909 } else {
910 EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state);
911 }
912 }
913 };
914
915 {
916 // Create too-wide siblings at the kTargetDepth level.
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700917 MockClass* child = root_->FindChildAt(/*x=*/0, kTargetDepth - 1);
918 CreateTreeFor(child, kMaxWidthCutOff*2, /*levels=*/1);
Igor Murashkin495e7832017-10-27 10:56:20 -0700919 ASSERT_LE(kMaxWidthCutOff*2, child->GetNumberOfChildren()) << *child;
920 ASSERT_TRUE(IsTooWide(child->GetMaxChild())) << *(child->GetMaxChild());
921 // Leave the rest of the tree as the default.
922
923 // Create too-wide children for a too-wide parent.
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700924 MockClass* child_subchild = child->FindChildAt(/*x=*/0, kTargetDepth);
925 CreateTreeFor(child_subchild, kMaxWidthCutOffSub*2, /*levels=*/1);
Igor Murashkin495e7832017-10-27 10:56:20 -0700926 ASSERT_LE(kMaxWidthCutOffSub*2, child_subchild->GetNumberOfChildren()) << *child_subchild;
927 ASSERT_TRUE(IsTooWide(child_subchild->GetMaxChild())) << *(child_subchild->GetMaxChild());
928 }
929
930 // Try to assign every level
931 // It will fail once it gets to the "too wide" siblings and cause overflows.
932 // Furthermore, assigning any subtree whose ancestor is too wide will also fail.
933 EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected);
934}
935
936void EnsureSubtypeOfCorrect(MockClass* a, MockClass* b) {
937 MockScopedLockSubtypeCheck lock_a;
938 MockScopedLockMutator lock_b;
939 using SCTree = MockSubtypeCheck;
940
941 auto IsAssigned = [](SCTree& tree) {
942 MockScopedLockSubtypeCheck lock_a;
943 MockScopedLockMutator lock_b;
944 // This assumes that MockClass is always called with EnsureAssigned.
945 EXPECT_NE(SubtypeCheckInfo::kInitialized, tree.GetState());
946 EXPECT_NE(SubtypeCheckInfo::kUninitialized, tree.GetState());
947 // Use our own test checks, so we are actually testing different logic than the impl.
948 return !(IsTooDeep(&tree.GetClass()) || IsTooWide(&tree.GetClass()));
949 };
950
951 SCTree src_tree = SCTree::Lookup(a);
952 SCTree target_tree = SCTree::Lookup(b);
953
954 SCOPED_TRACE("class A");
955 SCOPED_TRACE(*a);
956 SCOPED_TRACE("class B");
957 SCOPED_TRACE(*b);
958
959 SubtypeCheckInfo::Result slow_result =
960 a->SlowIsSubtypeOf(b) ? SubtypeCheckInfo::kSubtypeOf : SubtypeCheckInfo::kNotSubtypeOf;
961 SubtypeCheckInfo::Result fast_result = src_tree.IsSubtypeOf(target_tree);
962
963 // Target must be Assigned for this check to succeed.
964 // Source is either Overflowed | Assigned (in this case).
965
966 if (IsAssigned(src_tree) && IsAssigned(target_tree)) {
967 ASSERT_EQ(slow_result, fast_result);
968 } else if (IsAssigned(src_tree)) {
969 // A is assigned. B is >= initialized.
970 ASSERT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, fast_result);
971 } else if (IsAssigned(target_tree)) {
972 // B is assigned. A is >= initialized.
973 ASSERT_EQ(slow_result, fast_result);
974 } else {
975 // Neither A,B are assigned.
976 ASSERT_EQ(SubtypeCheckInfo::kUnknownSubtypeOf, fast_result);
977 }
978
979 // Use asserts, not expects to immediately fail.
980 // Otherwise the entire tree (very large) could potentially be broken.
981}
982
983void EnsureSubtypeOfRecursive(MockClass* kls_root) {
984 MockScopedLockMutator mutator_lock_fake_;
985
986 auto visit_func = [&](MockClass* kls) {
987 kls->Visit([&](MockClass* inner_class) {
988 EnsureSubtypeOfCorrect(kls, inner_class);
989 EnsureSubtypeOfCorrect(inner_class, kls);
990
991 if (::testing::Test::HasFatalFailure()) {
992 return false;
993 }
994
995 return true; // Keep visiting.
996 });
997
998 if (::testing::Test::HasFatalFailure()) {
999 return false;
1000 }
1001
1002 return true; // Keep visiting.
1003 };
1004
1005 ASSERT_NO_FATAL_FAILURE(kls_root->Visit(visit_func));
1006}
1007
1008TEST_F(SubtypeCheckTest, EnsureInitialized_TooWide_TooDeep) {
1009 auto transitions = [](MockClass* kls) {
1010 UNUSED(kls);
1011 return MockSubtypeOfTransition::kAssigned;
1012 };
1013
1014 // Pick the 2nd level because has the most narrow # of bits.
1015 constexpr size_t kTargetDepth = 2;
1016 constexpr size_t kTooDeepTargetDepth = BitString::kCapacity + 1;
1017 constexpr size_t kMaxWidthCutOff = MaxWidthCutOff(kTargetDepth);
1018
1019 constexpr size_t kMaxDepthForThisTest = std::numeric_limits<size_t>::max();
1020 auto expected = [=](MockClass* kls,
1021 int expect_when,
1022 SubtypeCheckInfo::State actual_state,
1023 MockSubtypeOfTransition transition) {
1024 UNUSED(transition);
1025 if (expect_when == kAfterTransition) {
1026 if (IsTooDeep(kls)) {
1027 EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state);
1028 } else if (IsTooWide(kls)) {
1029 EXPECT_EQ(SubtypeCheckInfo::kOverflowed, actual_state);
1030 } else {
1031 EXPECT_EQ(SubtypeCheckInfo::kAssigned, actual_state);
1032 }
1033 }
1034 };
1035
1036 {
1037 // Create too-wide siblings at the kTargetDepth level.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07001038 MockClass* child = root_->FindChildAt(/*x=*/0, kTargetDepth - 1u);
1039 CreateTreeFor(child, kMaxWidthCutOff*2, /*levels=*/1);
Igor Murashkin495e7832017-10-27 10:56:20 -07001040 ASSERT_LE(kMaxWidthCutOff*2, child->GetNumberOfChildren());
1041 ASSERT_TRUE(IsTooWide(child->GetMaxChild())) << *(child->GetMaxChild());
1042 // Leave the rest of the tree as the default.
1043
1044 // Create too-deep children for a too-wide parent.
1045 MockClass* child_subchild = child->GetMaxChild();
1046 ASSERT_TRUE(child_subchild != nullptr);
1047 ASSERT_EQ(0u, child_subchild->GetNumberOfChildren()) << *child_subchild;
Andreas Gampe98ea9d92018-10-19 14:06:15 -07001048 CreateTreeFor(child_subchild, /*width=*/1, /*levels=*/kTooDeepTargetDepth);
Igor Murashkin495e7832017-10-27 10:56:20 -07001049 MockClass* too_deep_child = child_subchild->FindChildAt(0, kTooDeepTargetDepth + 2);
1050 ASSERT_TRUE(too_deep_child != nullptr) << child_subchild->ToDotGraph();
1051 ASSERT_TRUE(IsTooWide(too_deep_child)) << *(too_deep_child);
1052 ASSERT_TRUE(IsTooDeep(too_deep_child)) << *(too_deep_child);
1053 }
1054
1055 // Try to assign every level
1056 // It will fail once it gets to the "too wide" siblings and cause overflows.
1057 EnsureStateChangedTestRecursiveGeneric(root_, 0u, kMaxDepthForThisTest, transitions, expected);
1058
1059 // Check every class against every class for "x instanceof y".
1060 EnsureSubtypeOfRecursive(root_);
1061}
1062
1063// TODO: add dcheck for child-parent invariants (e.g. child < parent.next) and death tests
Vladimir Marko38b8b252018-01-02 19:07:06 +00001064
1065} // namespace art