blob: 7d4d4bf40dc31d4d267ab6df5c3e7f18354a2078 [file] [log] [blame]
Ben Murdoch61f157c2016-09-16 13:49:30 +01001// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "test/cctest/heap/heap-utils.h"
6
7#include "src/factory.h"
8#include "src/heap/heap-inl.h"
9#include "src/heap/incremental-marking.h"
10#include "src/heap/mark-compact.h"
11#include "src/isolate.h"
12
13namespace v8 {
14namespace internal {
15namespace heap {
16
17void SealCurrentObjects(Heap* heap) {
18 heap->CollectAllGarbage();
19 heap->CollectAllGarbage();
20 heap->mark_compact_collector()->EnsureSweepingCompleted();
21 heap->old_space()->EmptyAllocationInfo();
22 for (Page* page : *heap->old_space()) {
23 page->MarkNeverAllocateForTesting();
24 }
25}
26
27int FixedArrayLenFromSize(int size) {
28 return (size - FixedArray::kHeaderSize) / kPointerSize;
29}
30
31std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
32 PretenureFlag tenure,
33 int object_size) {
34 std::vector<Handle<FixedArray>> handles;
35 Isolate* isolate = heap->isolate();
36 int allocate_memory;
37 int length;
38 int free_memory = padding_size;
39 if (tenure == i::TENURED) {
40 heap->old_space()->EmptyAllocationInfo();
41 int overall_free_memory = static_cast<int>(heap->old_space()->Available());
42 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
43 } else {
44 heap->new_space()->DisableInlineAllocationSteps();
45 int overall_free_memory =
46 static_cast<int>(*heap->new_space()->allocation_limit_address() -
47 *heap->new_space()->allocation_top_address());
48 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
49 }
50 while (free_memory > 0) {
51 if (free_memory > object_size) {
52 allocate_memory = object_size;
53 length = FixedArrayLenFromSize(allocate_memory);
54 } else {
55 allocate_memory = free_memory;
56 length = FixedArrayLenFromSize(allocate_memory);
57 if (length <= 0) {
58 // Not enough room to create another fixed array. Let's create a filler.
59 if (free_memory > (2 * kPointerSize)) {
60 heap->CreateFillerObjectAt(
61 *heap->old_space()->allocation_top_address(), free_memory,
62 ClearRecordedSlots::kNo);
63 }
64 break;
65 }
66 }
67 handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
68 CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) ||
69 (tenure == TENURED && heap->InOldSpace(*handles.back())));
70 free_memory -= allocate_memory;
71 }
72 return handles;
73}
74
75void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
76 std::vector<Handle<FixedArray>>* out_handles) {
77 space->DisableInlineAllocationSteps();
78 int space_remaining = static_cast<int>(*space->allocation_limit_address() -
79 *space->allocation_top_address());
80 CHECK(space_remaining >= extra_bytes);
81 int new_linear_size = space_remaining - extra_bytes;
82 if (new_linear_size == 0) return;
83 std::vector<Handle<FixedArray>> handles =
84 heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
85 if (out_handles != nullptr)
86 out_handles->insert(out_handles->end(), handles.begin(), handles.end());
87}
88
89void FillCurrentPage(v8::internal::NewSpace* space,
90 std::vector<Handle<FixedArray>>* out_handles) {
91 heap::AllocateAllButNBytes(space, 0, out_handles);
92}
93
94bool FillUpOnePage(v8::internal::NewSpace* space,
95 std::vector<Handle<FixedArray>>* out_handles) {
96 space->DisableInlineAllocationSteps();
97 int space_remaining = static_cast<int>(*space->allocation_limit_address() -
98 *space->allocation_top_address());
99 if (space_remaining == 0) return false;
100 std::vector<Handle<FixedArray>> handles =
101 heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
102 if (out_handles != nullptr)
103 out_handles->insert(out_handles->end(), handles.begin(), handles.end());
104 return true;
105}
106
107void SimulateFullSpace(v8::internal::NewSpace* space,
108 std::vector<Handle<FixedArray>>* out_handles) {
109 heap::FillCurrentPage(space, out_handles);
110 while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
111 }
112}
113
114void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
115 i::MarkCompactCollector* collector = heap->mark_compact_collector();
116 i::IncrementalMarking* marking = heap->incremental_marking();
117 if (collector->sweeping_in_progress()) {
118 collector->EnsureSweepingCompleted();
119 }
120 CHECK(marking->IsMarking() || marking->IsStopped());
121 if (marking->IsStopped()) {
122 heap->StartIncrementalMarking();
123 }
124 CHECK(marking->IsMarking());
125 if (!force_completion) return;
126
127 while (!marking->IsComplete()) {
128 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
129 if (marking->IsReadyToOverApproximateWeakClosure()) {
130 marking->FinalizeIncrementally();
131 }
132 }
133 CHECK(marking->IsComplete());
134}
135
136void SimulateFullSpace(v8::internal::PagedSpace* space) {
137 space->EmptyAllocationInfo();
138 space->ResetFreeList();
139 space->ClearStats();
140}
141
142void AbandonCurrentlyFreeMemory(PagedSpace* space) {
143 space->EmptyAllocationInfo();
144 for (Page* page : *space) {
145 page->MarkNeverAllocateForTesting();
146 }
147}
148
149void GcAndSweep(Heap* heap, AllocationSpace space) {
150 heap->CollectGarbage(space);
151 if (heap->mark_compact_collector()->sweeping_in_progress()) {
152 heap->mark_compact_collector()->EnsureSweepingCompleted();
153 }
154}
155
156} // namespace heap
157} // namespace internal
158} // namespace v8