blob: c13538c356dd21d16ef6c645bc77f6ded1c91cab [file] [log] [blame]
Ben Murdoch097c5b22016-05-18 11:27:45 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/profiler/sampling-heap-profiler.h"
6
7#include <stdint.h>
8#include <memory>
9#include "src/api.h"
10#include "src/base/utils/random-number-generator.h"
11#include "src/frames-inl.h"
12#include "src/heap/heap.h"
13#include "src/isolate.h"
14#include "src/profiler/strings-storage.h"
15
16namespace v8 {
17namespace internal {
18
19// We sample with a Poisson process, with constant average sampling interval.
20// This follows the exponential probability distribution with parameter
21// λ = 1/rate where rate is the average number of bytes between samples.
22//
23// Let u be a uniformly distributed random number between 0 and 1, then
24// next_sample = (- ln u) / λ
25intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
26 if (FLAG_sampling_heap_profiler_suppress_randomness) {
27 return static_cast<intptr_t>(rate);
28 }
29 double u = random_->NextDouble();
30 double next = (-std::log(u)) * rate;
31 return next < kPointerSize
32 ? kPointerSize
33 : (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
34}
35
36// Samples were collected according to a poisson process. Since we have not
37// recorded all allocations, we must approximate the shape of the underlying
38// space of allocations based on the samples we have collected. Given that
39// we sample at rate R, the probability that an allocation of size S will be
40// sampled is 1-exp(-S/R). This function uses the above probability to
41// approximate the true number of allocations with size *size* given that
42// *count* samples were observed.
43v8::AllocationProfile::Allocation SamplingHeapProfiler::ScaleSample(
44 size_t size, unsigned int count) {
45 double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
46 // Round count instead of truncating.
47 return {size, static_cast<unsigned int>(count * scale + 0.5)};
48}
49
50SamplingHeapProfiler::SamplingHeapProfiler(Heap* heap, StringsStorage* names,
51 uint64_t rate, int stack_depth)
52 : isolate_(heap->isolate()),
53 heap_(heap),
54 new_space_observer_(new SamplingAllocationObserver(
55 heap_, static_cast<intptr_t>(rate), rate, this,
56 heap->isolate()->random_number_generator())),
57 other_spaces_observer_(new SamplingAllocationObserver(
58 heap_, static_cast<intptr_t>(rate), rate, this,
59 heap->isolate()->random_number_generator())),
60 names_(names),
61 profile_root_("(root)", v8::UnboundScript::kNoScriptId, 0),
62 samples_(),
63 stack_depth_(stack_depth),
64 rate_(rate) {
65 CHECK_GT(rate_, 0);
66 heap->new_space()->AddAllocationObserver(new_space_observer_.get());
67 AllSpaces spaces(heap);
68 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
69 if (space != heap->new_space()) {
70 space->AddAllocationObserver(other_spaces_observer_.get());
71 }
72 }
73}
74
75
76SamplingHeapProfiler::~SamplingHeapProfiler() {
77 heap_->new_space()->RemoveAllocationObserver(new_space_observer_.get());
78 AllSpaces spaces(heap_);
79 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
80 if (space != heap_->new_space()) {
81 space->RemoveAllocationObserver(other_spaces_observer_.get());
82 }
83 }
84
85 for (auto sample : samples_) {
86 delete sample;
87 }
88 std::set<Sample*> empty;
89 samples_.swap(empty);
90}
91
92
93void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
94 DisallowHeapAllocation no_allocation;
95
96 HandleScope scope(isolate_);
97 HeapObject* heap_object = HeapObject::FromAddress(soon_object);
98 Handle<Object> obj(heap_object, isolate_);
99
100 // Mark the new block as FreeSpace to make sure the heap is iterable while we
101 // are taking the sample.
102 heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size));
103
104 Local<v8::Value> loc = v8::Utils::ToLocal(obj);
105
106 AllocationNode* node = AddStack();
107 node->allocations_[size]++;
108 Sample* sample = new Sample(size, node, loc, this);
109 samples_.insert(sample);
110 sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
111}
112
113void SamplingHeapProfiler::OnWeakCallback(
114 const WeakCallbackInfo<Sample>& data) {
115 Sample* sample = data.GetParameter();
116 AllocationNode* node = sample->owner;
117 DCHECK(node->allocations_[sample->size] > 0);
118 node->allocations_[sample->size]--;
119 sample->profiler->samples_.erase(sample);
120 delete sample;
121}
122
123SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::FindOrAddChildNode(
124 AllocationNode* parent, const char* name, int script_id,
125 int start_position) {
126 for (AllocationNode* child : parent->children_) {
127 if (child->script_id_ == script_id &&
128 child->script_position_ == start_position &&
129 strcmp(child->name_, name) == 0) {
130 return child;
131 }
132 }
133 AllocationNode* child = new AllocationNode(name, script_id, start_position);
134 parent->children_.push_back(child);
135 return child;
136}
137
138SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
139 AllocationNode* node = &profile_root_;
140
141 std::vector<SharedFunctionInfo*> stack;
142 StackTraceFrameIterator it(isolate_);
143 int frames_captured = 0;
144 while (!it.done() && frames_captured < stack_depth_) {
145 JavaScriptFrame* frame = it.frame();
146 SharedFunctionInfo* shared = frame->function()->shared();
147 stack.push_back(shared);
148
149 frames_captured++;
150 it.Advance();
151 }
152
153 if (frames_captured == 0) {
154 const char* name = nullptr;
155 switch (isolate_->current_vm_state()) {
156 case GC:
157 name = "(GC)";
158 break;
159 case COMPILER:
160 name = "(COMPILER)";
161 break;
162 case OTHER:
163 name = "(V8 API)";
164 break;
165 case EXTERNAL:
166 name = "(EXTERNAL)";
167 break;
168 case IDLE:
169 name = "(IDLE)";
170 break;
171 case JS:
172 name = "(JS)";
173 break;
174 }
175 return FindOrAddChildNode(node, name, v8::UnboundScript::kNoScriptId, 0);
176 }
177
178 // We need to process the stack in reverse order as the top of the stack is
179 // the first element in the list.
180 for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
181 SharedFunctionInfo* shared = *it;
182 const char* name = this->names()->GetFunctionName(shared->DebugName());
183 int script_id = v8::UnboundScript::kNoScriptId;
184 if (shared->script()->IsScript()) {
185 Script* script = Script::cast(shared->script());
186 script_id = script->id();
187 }
188 node = FindOrAddChildNode(node, name, script_id, shared->start_position());
189 }
190 return node;
191}
192
193v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
194 AllocationProfile* profile, SamplingHeapProfiler::AllocationNode* node,
195 const std::map<int, Script*>& scripts) {
196 Local<v8::String> script_name =
197 ToApiHandle<v8::String>(isolate_->factory()->InternalizeUtf8String(""));
198 int line = v8::AllocationProfile::kNoLineNumberInfo;
199 int column = v8::AllocationProfile::kNoColumnNumberInfo;
200 std::vector<v8::AllocationProfile::Allocation> allocations;
201 allocations.reserve(node->allocations_.size());
202 if (node->script_id_ != v8::UnboundScript::kNoScriptId) {
203 // Cannot use std::map<T>::at because it is not available on android.
204 auto non_const_scripts = const_cast<std::map<int, Script*>&>(scripts);
205 Script* script = non_const_scripts[node->script_id_];
206 if (script->name()->IsName()) {
207 Name* name = Name::cast(script->name());
208 script_name = ToApiHandle<v8::String>(
209 isolate_->factory()->InternalizeUtf8String(names_->GetName(name)));
210 }
211 Handle<Script> script_handle(script);
212
213 line = 1 + Script::GetLineNumber(script_handle, node->script_position_);
214 column = 1 + Script::GetColumnNumber(script_handle, node->script_position_);
215 for (auto alloc : node->allocations_) {
216 allocations.push_back(ScaleSample(alloc.first, alloc.second));
217 }
218 }
219
220 profile->nodes().push_back(v8::AllocationProfile::Node(
221 {ToApiHandle<v8::String>(
222 isolate_->factory()->InternalizeUtf8String(node->name_)),
223 script_name, node->script_id_, node->script_position_, line, column,
224 std::vector<v8::AllocationProfile::Node*>(), allocations}));
225 v8::AllocationProfile::Node* current = &profile->nodes().back();
226 size_t child_len = node->children_.size();
227 // The children vector may have nodes appended to it during translation
228 // because the translation may allocate strings on the JS heap that have
229 // the potential to be sampled. We cache the length of the vector before
230 // iteration so that nodes appended to the vector during iteration are
231 // not processed.
232 for (size_t i = 0; i < child_len; i++) {
233 current->children.push_back(
234 TranslateAllocationNode(profile, node->children_[i], scripts));
235 }
236 return current;
237}
238
239v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
240 // To resolve positions to line/column numbers, we will need to look up
241 // scripts. Build a map to allow fast mapping from script id to script.
242 std::map<int, Script*> scripts;
243 {
244 Script::Iterator iterator(isolate_);
245 Script* script;
246 while ((script = iterator.Next())) {
247 scripts[script->id()] = script;
248 }
249 }
250
251 auto profile = new v8::internal::AllocationProfile();
252
253 TranslateAllocationNode(profile, &profile_root_, scripts);
254
255 return profile;
256}
257
258
259} // namespace internal
260} // namespace v8