Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 1 | // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include <vector> |
| 6 | |
| 7 | #include "src/globals.h" |
| 8 | #include "src/heap/heap.h" |
| 9 | #include "src/heap/spaces.h" |
| 10 | #include "src/heap/spaces-inl.h" |
| 11 | #include "test/cctest/cctest.h" |
| 12 | |
| 13 | namespace v8 { |
| 14 | namespace internal { |
| 15 | |
| 16 | static Address AllocateLabBackingStore(Heap* heap, intptr_t size_in_bytes) { |
| 17 | AllocationResult result = heap->old_space()->AllocateRaw( |
| 18 | static_cast<int>(size_in_bytes), kDoubleAligned); |
| 19 | Object* obj = result.ToObjectChecked(); |
| 20 | Address adr = HeapObject::cast(obj)->address(); |
| 21 | return adr; |
| 22 | } |
| 23 | |
| 24 | |
| 25 | static void VerifyIterable(v8::internal::Address base, |
| 26 | v8::internal::Address limit, |
| 27 | std::vector<intptr_t> expected_size) { |
| 28 | CHECK_LE(reinterpret_cast<intptr_t>(base), reinterpret_cast<intptr_t>(limit)); |
| 29 | HeapObject* object = nullptr; |
| 30 | size_t counter = 0; |
| 31 | while (base < limit) { |
| 32 | object = HeapObject::FromAddress(base); |
| 33 | CHECK(object->IsFiller()); |
| 34 | CHECK_LT(counter, expected_size.size()); |
| 35 | CHECK_EQ(expected_size[counter], object->Size()); |
| 36 | base += object->Size(); |
| 37 | counter++; |
| 38 | } |
| 39 | } |
| 40 | |
| 41 | |
| 42 | static bool AllocateFromLab(Heap* heap, LocalAllocationBuffer* lab, |
| 43 | intptr_t size_in_bytes, |
| 44 | AllocationAlignment alignment = kWordAligned) { |
| 45 | HeapObject* obj; |
| 46 | AllocationResult result = |
| 47 | lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment); |
| 48 | if (result.To(&obj)) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 49 | heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes), |
| 50 | ClearRecordedSlots::kNo); |
Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 51 | return true; |
| 52 | } |
| 53 | return false; |
| 54 | } |
| 55 | |
| 56 | |
| 57 | TEST(InvalidLab) { |
| 58 | LocalAllocationBuffer lab = LocalAllocationBuffer::InvalidBuffer(); |
| 59 | CHECK(!lab.IsValid()); |
| 60 | } |
| 61 | |
| 62 | |
| 63 | TEST(UnusedLabImplicitClose) { |
| 64 | CcTest::InitializeVM(); |
| 65 | Heap* heap = CcTest::heap(); |
| 66 | heap->root(Heap::kOnePointerFillerMapRootIndex); |
| 67 | const int kLabSize = 4 * KB; |
| 68 | Address base = AllocateLabBackingStore(heap, kLabSize); |
| 69 | Address limit = base + kLabSize; |
| 70 | intptr_t expected_sizes_raw[1] = {kLabSize}; |
| 71 | std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 72 | expected_sizes_raw + 1); |
| 73 | { |
| 74 | AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 75 | LocalAllocationBuffer lab = |
| 76 | LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 77 | CHECK(lab.IsValid()); |
| 78 | } |
| 79 | VerifyIterable(base, limit, expected_sizes); |
| 80 | } |
| 81 | |
| 82 | |
| 83 | TEST(SimpleAllocate) { |
| 84 | CcTest::InitializeVM(); |
| 85 | Heap* heap = CcTest::heap(); |
| 86 | const int kLabSize = 4 * KB; |
| 87 | Address base = AllocateLabBackingStore(heap, kLabSize); |
| 88 | Address limit = base + kLabSize; |
| 89 | intptr_t sizes_raw[1] = {128}; |
| 90 | intptr_t expected_sizes_raw[2] = {128, kLabSize - 128}; |
| 91 | std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 1); |
| 92 | std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 93 | expected_sizes_raw + 2); |
| 94 | { |
| 95 | AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 96 | LocalAllocationBuffer lab = |
| 97 | LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 98 | CHECK(lab.IsValid()); |
| 99 | intptr_t sum = 0; |
| 100 | for (auto size : sizes) { |
| 101 | if (AllocateFromLab(heap, &lab, size)) { |
| 102 | sum += size; |
| 103 | } |
| 104 | } |
| 105 | } |
| 106 | VerifyIterable(base, limit, expected_sizes); |
| 107 | } |
| 108 | |
| 109 | |
| 110 | TEST(AllocateUntilLabOOM) { |
| 111 | CcTest::InitializeVM(); |
| 112 | Heap* heap = CcTest::heap(); |
| 113 | const int kLabSize = 2 * KB; |
| 114 | Address base = AllocateLabBackingStore(heap, kLabSize); |
| 115 | Address limit = base + kLabSize; |
| 116 | // The following objects won't fit in {kLabSize}. |
| 117 | intptr_t sizes_raw[5] = {512, 512, 128, 512, 512}; |
| 118 | intptr_t expected_sizes_raw[5] = {512, 512, 128, 512, 384 /* left over */}; |
| 119 | std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 5); |
| 120 | std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 121 | expected_sizes_raw + 5); |
| 122 | intptr_t sum = 0; |
| 123 | { |
| 124 | AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 125 | LocalAllocationBuffer lab = |
| 126 | LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 127 | CHECK(lab.IsValid()); |
| 128 | for (auto size : sizes) { |
| 129 | if (AllocateFromLab(heap, &lab, size)) { |
| 130 | sum += size; |
| 131 | } |
| 132 | } |
| 133 | CHECK_EQ(kLabSize - sum, 384); |
| 134 | } |
| 135 | VerifyIterable(base, limit, expected_sizes); |
| 136 | } |
| 137 | |
| 138 | |
| 139 | TEST(AllocateExactlyUntilLimit) { |
| 140 | CcTest::InitializeVM(); |
| 141 | Heap* heap = CcTest::heap(); |
| 142 | const int kLabSize = 2 * KB; |
| 143 | Address base = AllocateLabBackingStore(heap, kLabSize); |
| 144 | Address limit = base + kLabSize; |
| 145 | intptr_t sizes_raw[4] = {512, 512, 512, 512}; |
| 146 | intptr_t expected_sizes_raw[5] = {512, 512, 512, 512, 0}; |
| 147 | std::vector<intptr_t> sizes(sizes_raw, sizes_raw + 4); |
| 148 | std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 149 | expected_sizes_raw + 5); |
| 150 | { |
| 151 | AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 152 | LocalAllocationBuffer lab = |
| 153 | LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 154 | CHECK(lab.IsValid()); |
| 155 | intptr_t sum = 0; |
| 156 | for (auto size : sizes) { |
| 157 | if (AllocateFromLab(heap, &lab, size)) { |
| 158 | sum += size; |
| 159 | } else { |
| 160 | break; |
| 161 | } |
| 162 | } |
| 163 | CHECK_EQ(kLabSize - sum, 0); |
| 164 | } |
| 165 | VerifyIterable(base, limit, expected_sizes); |
| 166 | } |
| 167 | |
| 168 | |
| 169 | TEST(MergeSuccessful) { |
| 170 | CcTest::InitializeVM(); |
| 171 | Heap* heap = CcTest::heap(); |
| 172 | const int kLabSize = 2 * KB; |
| 173 | Address base1 = AllocateLabBackingStore(heap, kLabSize); |
| 174 | Address limit1 = base1 + kLabSize; |
| 175 | Address base2 = limit1; |
| 176 | Address limit2 = base2 + kLabSize; |
| 177 | |
| 178 | intptr_t sizes1_raw[4] = {512, 512, 512, 256}; |
| 179 | intptr_t expected_sizes1_raw[5] = {512, 512, 512, 256, 256}; |
| 180 | std::vector<intptr_t> sizes1(sizes1_raw, sizes1_raw + 4); |
| 181 | std::vector<intptr_t> expected_sizes1(expected_sizes1_raw, |
| 182 | expected_sizes1_raw + 5); |
| 183 | |
| 184 | intptr_t sizes2_raw[5] = {256, 512, 512, 512, 512}; |
| 185 | intptr_t expected_sizes2_raw[10] = {512, 512, 512, 256, 256, |
| 186 | 512, 512, 512, 512, 0}; |
| 187 | std::vector<intptr_t> sizes2(sizes2_raw, sizes2_raw + 5); |
| 188 | std::vector<intptr_t> expected_sizes2(expected_sizes2_raw, |
| 189 | expected_sizes2_raw + 10); |
| 190 | |
| 191 | { |
| 192 | AllocationResult lab_backing_store1(HeapObject::FromAddress(base1)); |
| 193 | LocalAllocationBuffer lab1 = |
| 194 | LocalAllocationBuffer::FromResult(heap, lab_backing_store1, kLabSize); |
| 195 | CHECK(lab1.IsValid()); |
| 196 | intptr_t sum = 0; |
| 197 | for (auto size : sizes1) { |
| 198 | if (AllocateFromLab(heap, &lab1, size)) { |
| 199 | sum += size; |
| 200 | } else { |
| 201 | break; |
| 202 | } |
| 203 | } |
| 204 | |
| 205 | AllocationResult lab_backing_store2(HeapObject::FromAddress(base2)); |
| 206 | LocalAllocationBuffer lab2 = |
| 207 | LocalAllocationBuffer::FromResult(heap, lab_backing_store2, kLabSize); |
| 208 | CHECK(lab2.IsValid()); |
| 209 | CHECK(lab2.TryMerge(&lab1)); |
| 210 | CHECK(!lab1.IsValid()); |
| 211 | for (auto size : sizes2) { |
| 212 | if (AllocateFromLab(heap, &lab2, size)) { |
| 213 | sum += size; |
| 214 | } else { |
| 215 | break; |
| 216 | } |
| 217 | } |
| 218 | CHECK_EQ(2 * kLabSize - sum, 0); |
| 219 | } |
| 220 | VerifyIterable(base1, limit1, expected_sizes1); |
| 221 | VerifyIterable(base1, limit2, expected_sizes2); |
| 222 | } |
| 223 | |
| 224 | |
| 225 | TEST(MergeFailed) { |
| 226 | CcTest::InitializeVM(); |
| 227 | Heap* heap = CcTest::heap(); |
| 228 | const int kLabSize = 2 * KB; |
| 229 | Address base1 = AllocateLabBackingStore(heap, kLabSize); |
| 230 | Address base2 = base1 + kLabSize; |
| 231 | Address base3 = base2 + kLabSize; |
| 232 | |
| 233 | { |
| 234 | AllocationResult lab_backing_store1(HeapObject::FromAddress(base1)); |
| 235 | LocalAllocationBuffer lab1 = |
| 236 | LocalAllocationBuffer::FromResult(heap, lab_backing_store1, kLabSize); |
| 237 | CHECK(lab1.IsValid()); |
| 238 | |
| 239 | AllocationResult lab_backing_store2(HeapObject::FromAddress(base2)); |
| 240 | LocalAllocationBuffer lab2 = |
| 241 | LocalAllocationBuffer::FromResult(heap, lab_backing_store2, kLabSize); |
| 242 | CHECK(lab2.IsValid()); |
| 243 | |
| 244 | AllocationResult lab_backing_store3(HeapObject::FromAddress(base3)); |
| 245 | LocalAllocationBuffer lab3 = |
| 246 | LocalAllocationBuffer::FromResult(heap, lab_backing_store3, kLabSize); |
| 247 | CHECK(lab3.IsValid()); |
| 248 | |
| 249 | CHECK(!lab3.TryMerge(&lab1)); |
| 250 | } |
| 251 | } |
| 252 | |
| 253 | |
| 254 | #ifdef V8_HOST_ARCH_32_BIT |
| 255 | TEST(AllocateAligned) { |
| 256 | CcTest::InitializeVM(); |
| 257 | Heap* heap = CcTest::heap(); |
| 258 | const int kLabSize = 2 * KB; |
| 259 | Address base = AllocateLabBackingStore(heap, kLabSize); |
| 260 | Address limit = base + kLabSize; |
| 261 | std::pair<intptr_t, AllocationAlignment> sizes_raw[2] = { |
| 262 | std::make_pair(116, kWordAligned), std::make_pair(64, kDoubleAligned)}; |
| 263 | std::vector<std::pair<intptr_t, AllocationAlignment>> sizes(sizes_raw, |
| 264 | sizes_raw + 2); |
| 265 | intptr_t expected_sizes_raw[4] = {116, 4, 64, 1864}; |
| 266 | std::vector<intptr_t> expected_sizes(expected_sizes_raw, |
| 267 | expected_sizes_raw + 4); |
| 268 | |
| 269 | { |
| 270 | AllocationResult lab_backing_store(HeapObject::FromAddress(base)); |
| 271 | LocalAllocationBuffer lab = |
| 272 | LocalAllocationBuffer::FromResult(heap, lab_backing_store, kLabSize); |
| 273 | CHECK(lab.IsValid()); |
| 274 | for (auto pair : sizes) { |
| 275 | if (!AllocateFromLab(heap, &lab, pair.first, pair.second)) { |
| 276 | break; |
| 277 | } |
| 278 | } |
| 279 | } |
| 280 | VerifyIterable(base, limit, expected_sizes); |
| 281 | } |
| 282 | #endif // V8_HOST_ARCH_32_BIT |
| 283 | |
| 284 | } // namespace internal |
| 285 | } // namespace v8 |