blob: a32cae3ef9c926c77aa448d614ca7ed2793cbec7 [file] [log] [blame]
Ben Murdoch097c5b22016-05-18 11:27:45 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/profiler/sampling-heap-profiler.h"
6
7#include <stdint.h>
8#include <memory>
9#include "src/api.h"
10#include "src/base/utils/random-number-generator.h"
11#include "src/frames-inl.h"
12#include "src/heap/heap.h"
13#include "src/isolate.h"
14#include "src/profiler/strings-storage.h"
15
16namespace v8 {
17namespace internal {
18
19// We sample with a Poisson process, with constant average sampling interval.
20// This follows the exponential probability distribution with parameter
21// λ = 1/rate where rate is the average number of bytes between samples.
22//
23// Let u be a uniformly distributed random number between 0 and 1, then
24// next_sample = (- ln u) / λ
25intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
26 if (FLAG_sampling_heap_profiler_suppress_randomness) {
27 return static_cast<intptr_t>(rate);
28 }
29 double u = random_->NextDouble();
30 double next = (-std::log(u)) * rate;
31 return next < kPointerSize
32 ? kPointerSize
33 : (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
34}
35
36// Samples were collected according to a poisson process. Since we have not
37// recorded all allocations, we must approximate the shape of the underlying
38// space of allocations based on the samples we have collected. Given that
39// we sample at rate R, the probability that an allocation of size S will be
40// sampled is 1-exp(-S/R). This function uses the above probability to
41// approximate the true number of allocations with size *size* given that
42// *count* samples were observed.
43v8::AllocationProfile::Allocation SamplingHeapProfiler::ScaleSample(
44 size_t size, unsigned int count) {
45 double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
46 // Round count instead of truncating.
47 return {size, static_cast<unsigned int>(count * scale + 0.5)};
48}
49
50SamplingHeapProfiler::SamplingHeapProfiler(Heap* heap, StringsStorage* names,
51 uint64_t rate, int stack_depth)
52 : isolate_(heap->isolate()),
53 heap_(heap),
54 new_space_observer_(new SamplingAllocationObserver(
55 heap_, static_cast<intptr_t>(rate), rate, this,
56 heap->isolate()->random_number_generator())),
57 other_spaces_observer_(new SamplingAllocationObserver(
58 heap_, static_cast<intptr_t>(rate), rate, this,
59 heap->isolate()->random_number_generator())),
60 names_(names),
61 profile_root_("(root)", v8::UnboundScript::kNoScriptId, 0),
62 samples_(),
63 stack_depth_(stack_depth),
64 rate_(rate) {
65 CHECK_GT(rate_, 0);
66 heap->new_space()->AddAllocationObserver(new_space_observer_.get());
67 AllSpaces spaces(heap);
68 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
69 if (space != heap->new_space()) {
70 space->AddAllocationObserver(other_spaces_observer_.get());
71 }
72 }
73}
74
75
76SamplingHeapProfiler::~SamplingHeapProfiler() {
77 heap_->new_space()->RemoveAllocationObserver(new_space_observer_.get());
78 AllSpaces spaces(heap_);
79 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
80 if (space != heap_->new_space()) {
81 space->RemoveAllocationObserver(other_spaces_observer_.get());
82 }
83 }
84
85 for (auto sample : samples_) {
86 delete sample;
87 }
88 std::set<Sample*> empty;
89 samples_.swap(empty);
90}
91
92
93void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
94 DisallowHeapAllocation no_allocation;
95
96 HandleScope scope(isolate_);
97 HeapObject* heap_object = HeapObject::FromAddress(soon_object);
98 Handle<Object> obj(heap_object, isolate_);
99
100 // Mark the new block as FreeSpace to make sure the heap is iterable while we
101 // are taking the sample.
Ben Murdochda12d292016-06-02 14:46:10 +0100102 heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size),
103 ClearRecordedSlots::kNo);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100104
105 Local<v8::Value> loc = v8::Utils::ToLocal(obj);
106
107 AllocationNode* node = AddStack();
108 node->allocations_[size]++;
109 Sample* sample = new Sample(size, node, loc, this);
110 samples_.insert(sample);
111 sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
112}
113
114void SamplingHeapProfiler::OnWeakCallback(
115 const WeakCallbackInfo<Sample>& data) {
116 Sample* sample = data.GetParameter();
117 AllocationNode* node = sample->owner;
118 DCHECK(node->allocations_[sample->size] > 0);
119 node->allocations_[sample->size]--;
120 sample->profiler->samples_.erase(sample);
121 delete sample;
122}
123
124SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::FindOrAddChildNode(
125 AllocationNode* parent, const char* name, int script_id,
126 int start_position) {
127 for (AllocationNode* child : parent->children_) {
128 if (child->script_id_ == script_id &&
129 child->script_position_ == start_position &&
130 strcmp(child->name_, name) == 0) {
131 return child;
132 }
133 }
134 AllocationNode* child = new AllocationNode(name, script_id, start_position);
135 parent->children_.push_back(child);
136 return child;
137}
138
139SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
140 AllocationNode* node = &profile_root_;
141
142 std::vector<SharedFunctionInfo*> stack;
143 StackTraceFrameIterator it(isolate_);
144 int frames_captured = 0;
145 while (!it.done() && frames_captured < stack_depth_) {
146 JavaScriptFrame* frame = it.frame();
147 SharedFunctionInfo* shared = frame->function()->shared();
148 stack.push_back(shared);
149
150 frames_captured++;
151 it.Advance();
152 }
153
154 if (frames_captured == 0) {
155 const char* name = nullptr;
156 switch (isolate_->current_vm_state()) {
157 case GC:
158 name = "(GC)";
159 break;
160 case COMPILER:
161 name = "(COMPILER)";
162 break;
163 case OTHER:
164 name = "(V8 API)";
165 break;
166 case EXTERNAL:
167 name = "(EXTERNAL)";
168 break;
169 case IDLE:
170 name = "(IDLE)";
171 break;
172 case JS:
173 name = "(JS)";
174 break;
175 }
176 return FindOrAddChildNode(node, name, v8::UnboundScript::kNoScriptId, 0);
177 }
178
179 // We need to process the stack in reverse order as the top of the stack is
180 // the first element in the list.
181 for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
182 SharedFunctionInfo* shared = *it;
183 const char* name = this->names()->GetFunctionName(shared->DebugName());
184 int script_id = v8::UnboundScript::kNoScriptId;
185 if (shared->script()->IsScript()) {
186 Script* script = Script::cast(shared->script());
187 script_id = script->id();
188 }
189 node = FindOrAddChildNode(node, name, script_id, shared->start_position());
190 }
191 return node;
192}
193
194v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
195 AllocationProfile* profile, SamplingHeapProfiler::AllocationNode* node,
196 const std::map<int, Script*>& scripts) {
197 Local<v8::String> script_name =
198 ToApiHandle<v8::String>(isolate_->factory()->InternalizeUtf8String(""));
199 int line = v8::AllocationProfile::kNoLineNumberInfo;
200 int column = v8::AllocationProfile::kNoColumnNumberInfo;
201 std::vector<v8::AllocationProfile::Allocation> allocations;
202 allocations.reserve(node->allocations_.size());
Ben Murdochda12d292016-06-02 14:46:10 +0100203 if (node->script_id_ != v8::UnboundScript::kNoScriptId &&
204 scripts.find(node->script_id_) != scripts.end()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100205 // Cannot use std::map<T>::at because it is not available on android.
206 auto non_const_scripts = const_cast<std::map<int, Script*>&>(scripts);
207 Script* script = non_const_scripts[node->script_id_];
Ben Murdochda12d292016-06-02 14:46:10 +0100208 if (script) {
209 if (script->name()->IsName()) {
210 Name* name = Name::cast(script->name());
211 script_name = ToApiHandle<v8::String>(
212 isolate_->factory()->InternalizeUtf8String(names_->GetName(name)));
213 }
214 Handle<Script> script_handle(script);
215 line = 1 + Script::GetLineNumber(script_handle, node->script_position_);
216 column =
217 1 + Script::GetColumnNumber(script_handle, node->script_position_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100218 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100219 for (auto alloc : node->allocations_) {
220 allocations.push_back(ScaleSample(alloc.first, alloc.second));
221 }
222 }
223
224 profile->nodes().push_back(v8::AllocationProfile::Node(
225 {ToApiHandle<v8::String>(
226 isolate_->factory()->InternalizeUtf8String(node->name_)),
227 script_name, node->script_id_, node->script_position_, line, column,
228 std::vector<v8::AllocationProfile::Node*>(), allocations}));
229 v8::AllocationProfile::Node* current = &profile->nodes().back();
230 size_t child_len = node->children_.size();
231 // The children vector may have nodes appended to it during translation
232 // because the translation may allocate strings on the JS heap that have
233 // the potential to be sampled. We cache the length of the vector before
234 // iteration so that nodes appended to the vector during iteration are
235 // not processed.
236 for (size_t i = 0; i < child_len; i++) {
237 current->children.push_back(
238 TranslateAllocationNode(profile, node->children_[i], scripts));
239 }
240 return current;
241}
242
243v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
244 // To resolve positions to line/column numbers, we will need to look up
245 // scripts. Build a map to allow fast mapping from script id to script.
246 std::map<int, Script*> scripts;
247 {
248 Script::Iterator iterator(isolate_);
249 Script* script;
250 while ((script = iterator.Next())) {
251 scripts[script->id()] = script;
252 }
253 }
254
255 auto profile = new v8::internal::AllocationProfile();
256
257 TranslateAllocationNode(profile, &profile_root_, scripts);
258
259 return profile;
260}
261
262
263} // namespace internal
264} // namespace v8