Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2014 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 8 | #include "src/gpu/GrTBlockList.h" |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 9 | #include "tests/Test.h" |
| 10 | |
| 11 | namespace { |
| 12 | struct C { |
| 13 | C() : fID(-1) { ++gInstCnt; } |
| 14 | C(int id) : fID(id) { ++gInstCnt; } |
Michael Ludwig | a291b37 | 2020-07-16 14:20:49 -0400 | [diff] [blame] | 15 | C(C&& c) : C(c.fID) {} |
Michael Ludwig | c97ebe0 | 2020-07-17 08:39:46 -0400 | [diff] [blame] | 16 | C(const C& c) : C(c.fID) {} |
| 17 | |
Ben Wagner | 14ba58f | 2020-03-30 17:53:53 -0400 | [diff] [blame] | 18 | C& operator=(C&&) = default; |
Michael Ludwig | c97ebe0 | 2020-07-17 08:39:46 -0400 | [diff] [blame] | 19 | C& operator=(const C&) = default; |
| 20 | |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 21 | ~C() { --gInstCnt; } |
Michael Ludwig | c97ebe0 | 2020-07-17 08:39:46 -0400 | [diff] [blame] | 22 | |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 23 | int fID; |
| 24 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 25 | // Under the hood, GrTBlockList and GrBlockAllocator round up to max_align_t. If 'C' was |
| 26 | // just 4 bytes, that often means the internal blocks can squeeze a few extra instances in. This |
Michael Ludwig | 68e5f29 | 2020-07-20 16:17:14 -0400 | [diff] [blame] | 27 | // is fine, but makes predicting a little trickier, so make sure C is a bit bigger. |
| 28 | int fPadding[4]; |
| 29 | |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 30 | static int gInstCnt; |
| 31 | }; |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 32 | int C::gInstCnt = 0; |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 33 | |
| 34 | struct D { |
| 35 | int fID; |
| 36 | }; |
| 37 | |
John Stiles | a6841be | 2020-08-06 14:11:56 -0400 | [diff] [blame] | 38 | } // namespace |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 39 | |
| 40 | // Checks that the allocator has the correct count, etc and that the element IDs are correct. |
| 41 | // Then pops popCnt items and checks again. |
| 42 | template<int N> |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 43 | static void check_allocator_helper(GrTBlockList<C, N>* allocator, int cnt, int popCnt, |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 44 | skiatest::Reporter* reporter) { |
| 45 | REPORTER_ASSERT(reporter, (0 == cnt) == allocator->empty()); |
| 46 | REPORTER_ASSERT(reporter, cnt == allocator->count()); |
| 47 | REPORTER_ASSERT(reporter, cnt == C::gInstCnt); |
| 48 | |
| 49 | int i = 0; |
| 50 | for (const C& c : allocator->items()) { |
| 51 | REPORTER_ASSERT(reporter, i == c.fID); |
| 52 | REPORTER_ASSERT(reporter, allocator->item(i).fID == i); |
| 53 | ++i; |
| 54 | } |
| 55 | REPORTER_ASSERT(reporter, i == cnt); |
| 56 | |
| 57 | if (cnt > 0) { |
| 58 | REPORTER_ASSERT(reporter, cnt-1 == allocator->back().fID); |
| 59 | } |
| 60 | |
| 61 | if (popCnt > 0) { |
| 62 | for (int i = 0; i < popCnt; ++i) { |
| 63 | allocator->pop_back(); |
| 64 | } |
| 65 | check_allocator_helper(allocator, cnt - popCnt, 0, reporter); |
| 66 | } |
| 67 | } |
| 68 | |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 69 | template<int N> |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 70 | static void check_iterator_helper(GrTBlockList<C, N>* allocator, |
| 71 | const std::vector<C*>& expected, |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 72 | skiatest::Reporter* reporter) { |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 73 | const GrTBlockList<C, N>* cAlloc = allocator; |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 74 | REPORTER_ASSERT(reporter, (size_t) allocator->count() == expected.size()); |
| 75 | // Forward+const |
| 76 | int i = 0; |
| 77 | for (const C& c : cAlloc->items()) { |
| 78 | REPORTER_ASSERT(reporter, (uintptr_t) &c == (uintptr_t) expected[i]); |
| 79 | ++i; |
| 80 | } |
| 81 | REPORTER_ASSERT(reporter, (size_t) i == expected.size()); |
| 82 | |
| 83 | // Forward+non-const |
| 84 | i = 0; |
| 85 | for (C& c : allocator->items()) { |
| 86 | REPORTER_ASSERT(reporter, (uintptr_t) &c == (uintptr_t) expected[i]); |
| 87 | ++i; |
| 88 | } |
| 89 | REPORTER_ASSERT(reporter, (size_t) i == expected.size()); |
| 90 | |
| 91 | // Reverse+const |
| 92 | i = (int) expected.size() - 1; |
| 93 | for (const C& c : cAlloc->ritems()) { |
| 94 | REPORTER_ASSERT(reporter, (uintptr_t) &c == (uintptr_t) expected[i]); |
| 95 | --i; |
| 96 | } |
| 97 | REPORTER_ASSERT(reporter, i == -1); |
| 98 | |
| 99 | // Reverse+non-const |
| 100 | i = (int) expected.size() - 1; |
| 101 | for (C& c : allocator->ritems()) { |
| 102 | REPORTER_ASSERT(reporter, (uintptr_t) &c == (uintptr_t) expected[i]); |
| 103 | --i; |
| 104 | } |
| 105 | REPORTER_ASSERT(reporter, i == -1); |
| 106 | |
| 107 | // Also test random access |
| 108 | for (int i = 0; i < allocator->count(); ++i) { |
| 109 | REPORTER_ASSERT(reporter, (uintptr_t) &allocator->item(i) == (uintptr_t) expected[i]); |
| 110 | REPORTER_ASSERT(reporter, (uintptr_t) &cAlloc->item(i) == (uintptr_t) expected[i]); |
| 111 | } |
| 112 | } |
| 113 | |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 114 | // Adds cnt items to the allocator, tests the cnts and iterators, pops popCnt items and checks |
| 115 | // again. Finally it resets the allocator and checks again. |
| 116 | template<int N> |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 117 | static void check_allocator(GrTBlockList<C, N>* allocator, int cnt, int popCnt, |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 118 | skiatest::Reporter* reporter) { |
Michael Ludwig | c97ebe0 | 2020-07-17 08:39:46 -0400 | [diff] [blame] | 119 | enum ItemInitializer : int { |
| 120 | kCopyCtor, |
| 121 | kMoveCtor, |
| 122 | kCopyAssign, |
| 123 | kMoveAssign, |
| 124 | kEmplace, |
| 125 | }; |
| 126 | static constexpr int kInitCount = (int) kEmplace + 1; |
| 127 | |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 128 | SkASSERT(allocator); |
| 129 | SkASSERT(allocator->empty()); |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 130 | std::vector<C*> items; |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 131 | for (int i = 0; i < cnt; ++i) { |
Michael Ludwig | c97ebe0 | 2020-07-17 08:39:46 -0400 | [diff] [blame] | 132 | switch((ItemInitializer) (i % kInitCount)) { |
| 133 | case kCopyCtor: |
| 134 | allocator->push_back(C(i)); |
| 135 | break; |
| 136 | case kMoveCtor: |
| 137 | allocator->push_back(std::move(C(i))); |
| 138 | break; |
| 139 | case kCopyAssign: |
| 140 | allocator->push_back() = C(i); |
| 141 | break; |
| 142 | case kMoveAssign: |
| 143 | allocator->push_back() = std::move(C(i)); |
| 144 | break; |
| 145 | case kEmplace: |
| 146 | allocator->emplace_back(i); |
| 147 | break; |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 148 | } |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 149 | items.push_back(&allocator->back()); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 150 | } |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 151 | check_iterator_helper(allocator, items, reporter); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 152 | check_allocator_helper(allocator, cnt, popCnt, reporter); |
| 153 | allocator->reset(); |
Michael Ludwig | 26b4ffd | 2020-07-15 12:36:44 -0400 | [diff] [blame] | 154 | check_iterator_helper(allocator, {}, reporter); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 155 | check_allocator_helper(allocator, 0, 0, reporter); |
| 156 | } |
| 157 | |
| 158 | template<int N> |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 159 | static void run_allocator_test(GrTBlockList<C, N>* allocator, skiatest::Reporter* reporter) { |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 160 | check_allocator(allocator, 0, 0, reporter); |
| 161 | check_allocator(allocator, 1, 1, reporter); |
| 162 | check_allocator(allocator, 2, 2, reporter); |
| 163 | check_allocator(allocator, 10, 1, reporter); |
| 164 | check_allocator(allocator, 10, 5, reporter); |
| 165 | check_allocator(allocator, 10, 10, reporter); |
| 166 | check_allocator(allocator, 100, 10, reporter); |
| 167 | } |
| 168 | |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 169 | template<int N1, int N2> |
| 170 | static void run_concat_test(skiatest::Reporter* reporter, int aCount, int bCount) { |
| 171 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 172 | GrTBlockList<C, N1> listA; |
| 173 | GrTBlockList<C, N2> listB; |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 174 | |
| 175 | for (int i = 0; i < aCount; ++i) { |
| 176 | listA.emplace_back(i); |
| 177 | } |
| 178 | for (int i = 0; i < bCount; ++i) { |
| 179 | listB.emplace_back(aCount + i); |
| 180 | } |
| 181 | |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 182 | REPORTER_ASSERT(reporter, listA.count() == aCount && listB.count() == bCount); |
| 183 | REPORTER_ASSERT(reporter, C::gInstCnt == aCount + bCount); |
| 184 | |
| 185 | // Concatenate B into A and verify. |
| 186 | listA.concat(std::move(listB)); |
| 187 | REPORTER_ASSERT(reporter, listA.count() == aCount + bCount); |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 188 | // GrTBlockList guarantees the moved list is empty, but clang-tidy doesn't know about it; |
| 189 | // in practice we won't really be using moved lists so this won't pollute our main code base |
| 190 | // with lots of warning disables. |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 191 | REPORTER_ASSERT(reporter, listB.count() == 0); // NOLINT(bugprone-use-after-move) |
| 192 | REPORTER_ASSERT(reporter, C::gInstCnt == aCount + bCount); |
| 193 | |
| 194 | int i = 0; |
| 195 | for (const C& item : listA.items()) { |
| 196 | // By construction of A and B originally, the concatenated id sequence is continuous |
| 197 | REPORTER_ASSERT(reporter, i == item.fID); |
| 198 | i++; |
| 199 | } |
| 200 | REPORTER_ASSERT(reporter, i == (aCount + bCount)); |
| 201 | } |
| 202 | |
| 203 | template<int N1, int N2> |
| 204 | static void run_concat_trivial_test(skiatest::Reporter* reporter, int aCount, int bCount) { |
| 205 | static_assert(std::is_trivially_copyable<D>::value); |
| 206 | |
| 207 | // This is similar to run_concat_test(), except since D is trivial we can't verify the instant |
| 208 | // counts that are tracked via ctor/dtor. |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 209 | GrTBlockList<D, N1> listA; |
| 210 | GrTBlockList<D, N2> listB; |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 211 | |
| 212 | for (int i = 0; i < aCount; ++i) { |
| 213 | listA.push_back({i}); |
| 214 | } |
| 215 | for (int i = 0; i < bCount; ++i) { |
| 216 | listB.push_back({aCount + i}); |
| 217 | } |
| 218 | |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 219 | REPORTER_ASSERT(reporter, listA.count() == aCount && listB.count() == bCount); |
| 220 | // Concatenate B into A and verify. |
| 221 | listA.concat(std::move(listB)); |
| 222 | REPORTER_ASSERT(reporter, listA.count() == aCount + bCount); |
| 223 | REPORTER_ASSERT(reporter, listB.count() == 0); // NOLINT(bugprone-use-after-move): see above |
| 224 | |
| 225 | int i = 0; |
| 226 | for (const D& item : listA.items()) { |
| 227 | // By construction of A and B originally, the concatenated id sequence is continuous |
| 228 | REPORTER_ASSERT(reporter, i == item.fID); |
| 229 | i++; |
| 230 | } |
| 231 | REPORTER_ASSERT(reporter, i == (aCount + bCount)); |
| 232 | } |
Michael Ludwig | 68e5f29 | 2020-07-20 16:17:14 -0400 | [diff] [blame] | 233 | |
| 234 | template<int N> |
| 235 | static void run_reserve_test(skiatest::Reporter* reporter) { |
| 236 | constexpr int kItemsPerBlock = N + 4; // Make this a number > 1, even if N starting items == 1 |
| 237 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 238 | GrTBlockList<C, N> list(kItemsPerBlock); |
Michael Ludwig | 68e5f29 | 2020-07-20 16:17:14 -0400 | [diff] [blame] | 239 | size_t initialSize = list.allocator()->totalSize(); |
| 240 | // Should be able to add N instances of T w/o changing size from initialSize |
| 241 | for (int i = 0; i < N; ++i) { |
| 242 | list.push_back(C(i)); |
| 243 | } |
| 244 | REPORTER_ASSERT(reporter, initialSize == list.allocator()->totalSize()); |
| 245 | |
| 246 | // Reserve room for 2*kItemsPerBlock items |
| 247 | list.reserve(2 * kItemsPerBlock); |
| 248 | REPORTER_ASSERT(reporter, list.count() == N); // count shouldn't change though |
| 249 | |
| 250 | size_t reservedSize = list.allocator()->totalSize(); |
| 251 | REPORTER_ASSERT(reporter, reservedSize >= initialSize + 2 * kItemsPerBlock * sizeof(C)); |
| 252 | for (int i = 0; i < 2 * kItemsPerBlock; ++i) { |
| 253 | list.push_back(C(i)); |
| 254 | } |
| 255 | REPORTER_ASSERT(reporter, reservedSize == list.allocator()->totalSize()); |
| 256 | |
| 257 | // Make the next block partially fully (N > 0 but < kItemsPerBlock) |
| 258 | for (int i = 0; i < N; ++i) { |
| 259 | list.push_back(C(i)); |
| 260 | } |
| 261 | |
| 262 | // Reserve room again for 2*kItemsPerBlock, but reserve should automatically take account of the |
| 263 | // (kItemsPerBlock-N) that are still available in the active block |
| 264 | list.reserve(2 * kItemsPerBlock); |
| 265 | int extraReservedCount = kItemsPerBlock + N; |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 266 | // Because GrTBlockList normally allocates blocks in fixed sizes, and extraReservedCount > |
Michael Ludwig | 68e5f29 | 2020-07-20 16:17:14 -0400 | [diff] [blame] | 267 | // items-per-block, it will always use that size and not that of the growth policy. |
| 268 | REPORTER_ASSERT(reporter, (size_t) list.allocator()->testingOnly_scratchBlockSize() >= |
| 269 | extraReservedCount * sizeof(C)); |
| 270 | |
| 271 | reservedSize = list.allocator()->totalSize(); |
| 272 | for (int i = 0; i < 2 * kItemsPerBlock; ++i) { |
| 273 | list.push_back(C(i)); |
| 274 | } |
| 275 | REPORTER_ASSERT(reporter, reservedSize == list.allocator()->totalSize()); |
| 276 | |
| 277 | // If we reserve a count < items-per-block, it will use the fixed size from the growth policy. |
| 278 | list.reserve(2); |
| 279 | REPORTER_ASSERT(reporter, (size_t) list.allocator()->testingOnly_scratchBlockSize() >= |
| 280 | kItemsPerBlock * sizeof(C)); |
| 281 | |
| 282 | // Ensure the reservations didn't initialize any more D's than anticipated |
| 283 | int expectedInstanceCount = 2 * (N + 2 * kItemsPerBlock); |
| 284 | REPORTER_ASSERT(reporter, expectedInstanceCount == C::gInstCnt); |
| 285 | |
| 286 | list.reset(); |
| 287 | REPORTER_ASSERT(reporter, 0 == C::gInstCnt); |
| 288 | } |
| 289 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 290 | DEF_TEST(GrTBlockList, reporter) { |
Michael Ludwig | c97ebe0 | 2020-07-17 08:39:46 -0400 | [diff] [blame] | 291 | // Test combinations of allocators with and without stack storage and with different block sizes |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 292 | GrTBlockList<C> a1(1); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 293 | run_allocator_test(&a1, reporter); |
| 294 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 295 | GrTBlockList<C> a2(2); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 296 | run_allocator_test(&a2, reporter); |
| 297 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 298 | GrTBlockList<C> a5(5); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 299 | run_allocator_test(&a5, reporter); |
| 300 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 301 | GrTBlockList<C, 1> sa1; |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 302 | run_allocator_test(&sa1, reporter); |
| 303 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 304 | GrTBlockList<C, 3> sa3; |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 305 | run_allocator_test(&sa3, reporter); |
| 306 | |
Michael Ludwig | cc848b5 | 2020-07-22 16:36:49 -0400 | [diff] [blame] | 307 | GrTBlockList<C, 4> sa4; |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 308 | run_allocator_test(&sa4, reporter); |
Michael Ludwig | 68e5f29 | 2020-07-20 16:17:14 -0400 | [diff] [blame] | 309 | |
| 310 | run_reserve_test<1>(reporter); |
| 311 | run_reserve_test<2>(reporter); |
| 312 | run_reserve_test<3>(reporter); |
| 313 | run_reserve_test<4>(reporter); |
| 314 | run_reserve_test<5>(reporter); |
Michael Ludwig | 1d4c08f | 2020-07-21 13:04:42 -0400 | [diff] [blame] | 315 | |
| 316 | run_concat_test<1, 1>(reporter, 10, 10); |
| 317 | run_concat_test<5, 1>(reporter, 50, 10); |
| 318 | run_concat_test<1, 5>(reporter, 10, 50); |
| 319 | run_concat_test<5, 5>(reporter, 100, 100); |
| 320 | |
| 321 | run_concat_trivial_test<1, 1>(reporter, 10, 10); |
| 322 | run_concat_trivial_test<5, 1>(reporter, 50, 10); |
| 323 | run_concat_trivial_test<1, 5>(reporter, 10, 50); |
| 324 | run_concat_trivial_test<5, 5>(reporter, 100, 100); |
Michael Ludwig | 4519134 | 2020-03-24 12:29:39 -0400 | [diff] [blame] | 325 | } |