blob: 56033c151ed614cb9bfdac41ee79b808687083a8 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef HEAP_UTILS_H_
6#define HEAP_UTILS_H_
7
8#include "src/factory.h"
9#include "src/heap/heap-inl.h"
10#include "src/heap/incremental-marking.h"
11#include "src/heap/mark-compact.h"
12#include "src/isolate.h"
13
14
15namespace v8 {
16namespace internal {
17
18static int LenFromSize(int size) {
19 return (size - FixedArray::kHeaderSize) / kPointerSize;
20}
21
22
23static inline std::vector<Handle<FixedArray>> CreatePadding(
24 Heap* heap, int padding_size, PretenureFlag tenure,
25 int object_size = Page::kMaxRegularHeapObjectSize) {
26 std::vector<Handle<FixedArray>> handles;
27 Isolate* isolate = heap->isolate();
28 int allocate_memory;
29 int length;
30 int free_memory = padding_size;
31 if (tenure == i::TENURED) {
32 heap->old_space()->EmptyAllocationInfo();
33 int overall_free_memory = static_cast<int>(heap->old_space()->Available());
34 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
35 } else {
36 heap->new_space()->DisableInlineAllocationSteps();
37 int overall_free_memory =
38 static_cast<int>(*heap->new_space()->allocation_limit_address() -
39 *heap->new_space()->allocation_top_address());
40 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
41 }
42 while (free_memory > 0) {
43 if (free_memory > object_size) {
44 allocate_memory = object_size;
45 length = LenFromSize(allocate_memory);
46 } else {
47 allocate_memory = free_memory;
48 length = LenFromSize(allocate_memory);
49 if (length <= 0) {
50 // Not enough room to create another fixed array. Let's create a filler.
51 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(),
Ben Murdochda12d292016-06-02 14:46:10 +010052 free_memory, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 break;
54 }
55 }
56 handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
57 CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) ||
58 (tenure == TENURED && heap->InOldSpace(*handles.back())));
59 free_memory -= allocate_memory;
60 }
61 return handles;
62}
63
64
65// Helper function that simulates a full new-space in the heap.
66static inline bool FillUpOnePage(v8::internal::NewSpace* space) {
67 space->DisableInlineAllocationSteps();
68 int space_remaining = static_cast<int>(*space->allocation_limit_address() -
69 *space->allocation_top_address());
70 if (space_remaining == 0) return false;
71 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
72 return true;
73}
74
75
76// Helper function that simulates a fill new-space in the heap.
77static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
78 int extra_bytes) {
79 space->DisableInlineAllocationSteps();
80 int space_remaining = static_cast<int>(*space->allocation_limit_address() -
81 *space->allocation_top_address());
82 CHECK(space_remaining >= extra_bytes);
83 int new_linear_size = space_remaining - extra_bytes;
84 if (new_linear_size == 0) return;
85 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
86}
87
88
89static inline void FillCurrentPage(v8::internal::NewSpace* space) {
90 AllocateAllButNBytes(space, 0);
91}
92
93
94static inline void SimulateFullSpace(v8::internal::NewSpace* space) {
95 FillCurrentPage(space);
96 while (FillUpOnePage(space)) {
97 }
98}
99
100
101// Helper function that simulates a full old-space in the heap.
102static inline void SimulateFullSpace(v8::internal::PagedSpace* space) {
103 space->EmptyAllocationInfo();
104 space->ResetFreeList();
105 space->ClearStats();
106}
107
108
109// Helper function that simulates many incremental marking steps until
110// marking is completed.
111static inline void SimulateIncrementalMarking(i::Heap* heap,
112 bool force_completion = true) {
113 i::MarkCompactCollector* collector = heap->mark_compact_collector();
114 i::IncrementalMarking* marking = heap->incremental_marking();
115 if (collector->sweeping_in_progress()) {
116 collector->EnsureSweepingCompleted();
117 }
118 CHECK(marking->IsMarking() || marking->IsStopped());
119 if (marking->IsStopped()) {
120 heap->StartIncrementalMarking();
121 }
122 CHECK(marking->IsMarking());
123 if (!force_completion) return;
124
125 while (!marking->IsComplete()) {
126 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
127 if (marking->IsReadyToOverApproximateWeakClosure()) {
128 marking->FinalizeIncrementally();
129 }
130 }
131 CHECK(marking->IsComplete());
132}
133
134} // namespace internal
135} // namespace v8
136
137#endif // HEAP_UTILS_H_