blob: 4815f8264fd0ca985c819d7cfee8b1b9423a2e08 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2009-2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-profiler.h"
Steve Block3ce2e202009-11-05 08:53:23 +000031#include "frames-inl.h"
32#include "global-handles.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010033#include "profile-generator.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "string-stream.h"
35
36namespace v8 {
37namespace internal {
38
39
40#ifdef ENABLE_LOGGING_AND_PROFILING
41namespace {
42
43// Clusterizer is a set of helper functions for converting
44// object references into clusters.
45class Clusterizer : public AllStatic {
46 public:
47 static JSObjectsCluster Clusterize(HeapObject* obj) {
48 return Clusterize(obj, true);
49 }
50 static void InsertIntoTree(JSObjectsClusterTree* tree,
51 HeapObject* obj, bool fine_grain);
52 static void InsertReferenceIntoTree(JSObjectsClusterTree* tree,
53 const JSObjectsCluster& cluster) {
54 InsertIntoTree(tree, cluster, 0);
55 }
56
57 private:
58 static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain);
59 static int CalculateNetworkSize(JSObject* obj);
60 static int GetObjectSize(HeapObject* obj) {
61 return obj->IsJSObject() ?
62 CalculateNetworkSize(JSObject::cast(obj)) : obj->Size();
63 }
64 static void InsertIntoTree(JSObjectsClusterTree* tree,
65 const JSObjectsCluster& cluster, int size);
66};
67
68
69JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
70 if (obj->IsJSObject()) {
71 JSObject* js_obj = JSObject::cast(obj);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080072 String* constructor = GetConstructorNameForHeapProfile(
73 JSObject::cast(js_obj));
Steve Blocka7e24c12009-10-30 11:49:00 +000074 // Differentiate Object and Array instances.
Steve Block44f0eee2011-05-26 01:26:41 +010075 if (fine_grain && (constructor == HEAP->Object_symbol() ||
76 constructor == HEAP->Array_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +000077 return JSObjectsCluster(constructor, obj);
78 } else {
79 return JSObjectsCluster(constructor);
80 }
81 } else if (obj->IsString()) {
Steve Block44f0eee2011-05-26 01:26:41 +010082 return JSObjectsCluster(HEAP->String_symbol());
Steve Blockd0582a62009-12-15 09:54:21 +000083 } else if (obj->IsJSGlobalPropertyCell()) {
84 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
85 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
86 return JSObjectsCluster(JSObjectsCluster::CODE);
Steve Blocka7e24c12009-10-30 11:49:00 +000087 }
88 return JSObjectsCluster();
89}
90
91
92void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
93 HeapObject* obj, bool fine_grain) {
94 JSObjectsCluster cluster = Clusterize(obj, fine_grain);
95 if (cluster.is_null()) return;
96 InsertIntoTree(tree, cluster, GetObjectSize(obj));
97}
98
99
100void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
101 const JSObjectsCluster& cluster, int size) {
102 JSObjectsClusterTree::Locator loc;
103 tree->Insert(cluster, &loc);
104 NumberAndSizeInfo number_and_size = loc.value();
105 number_and_size.increment_number(1);
106 number_and_size.increment_bytes(size);
107 loc.set_value(number_and_size);
108}
109
110
111int Clusterizer::CalculateNetworkSize(JSObject* obj) {
112 int size = obj->Size();
113 // If 'properties' and 'elements' are non-empty (thus, non-shared),
114 // take their size into account.
Steve Block44f0eee2011-05-26 01:26:41 +0100115 if (obj->properties() != HEAP->empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000116 size += obj->properties()->Size();
117 }
Steve Block44f0eee2011-05-26 01:26:41 +0100118 if (obj->elements() != HEAP->empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 size += obj->elements()->Size();
120 }
Steve Blockd0582a62009-12-15 09:54:21 +0000121 // For functions, also account non-empty context and literals sizes.
122 if (obj->IsJSFunction()) {
123 JSFunction* f = JSFunction::cast(obj);
124 if (f->unchecked_context()->IsContext()) {
125 size += f->context()->Size();
126 }
127 if (f->literals()->length() != 0) {
128 size += f->literals()->Size();
129 }
130 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 return size;
132}
133
134
135// A helper class for recording back references.
136class ReferencesExtractor : public ObjectVisitor {
137 public:
138 ReferencesExtractor(const JSObjectsCluster& cluster,
139 RetainerHeapProfile* profile)
140 : cluster_(cluster),
141 profile_(profile),
142 inside_array_(false) {
143 }
144
145 void VisitPointer(Object** o) {
Steve Blockd0582a62009-12-15 09:54:21 +0000146 if ((*o)->IsFixedArray() && !inside_array_) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000147 // Traverse one level deep for data members that are fixed arrays.
148 // This covers the case of 'elements' and 'properties' of JSObject,
149 // and function contexts.
150 inside_array_ = true;
151 FixedArray::cast(*o)->Iterate(this);
152 inside_array_ = false;
Steve Blockd0582a62009-12-15 09:54:21 +0000153 } else if ((*o)->IsHeapObject()) {
154 profile_->StoreReference(cluster_, HeapObject::cast(*o));
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 }
156 }
157
158 void VisitPointers(Object** start, Object** end) {
159 for (Object** p = start; p < end; p++) VisitPointer(p);
160 }
161
162 private:
163 const JSObjectsCluster& cluster_;
164 RetainerHeapProfile* profile_;
165 bool inside_array_;
166};
167
168
169// A printer interface implementation for the Retainers profile.
170class RetainersPrinter : public RetainerHeapProfile::Printer {
171 public:
172 void PrintRetainers(const JSObjectsCluster& cluster,
173 const StringStream& retainers) {
174 HeapStringAllocator allocator;
175 StringStream stream(&allocator);
176 cluster.Print(&stream);
Steve Block44f0eee2011-05-26 01:26:41 +0100177 LOG(ISOLATE,
178 HeapSampleJSRetainersEvent(
Steve Blocka7e24c12009-10-30 11:49:00 +0000179 *(stream.ToCString()), *(retainers.ToCString())));
180 }
181};
182
183
184// Visitor for printing a cluster tree.
185class ClusterTreePrinter BASE_EMBEDDED {
186 public:
187 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {}
188 void Call(const JSObjectsCluster& cluster,
189 const NumberAndSizeInfo& number_and_size) {
190 Print(stream_, cluster, number_and_size);
191 }
192 static void Print(StringStream* stream,
193 const JSObjectsCluster& cluster,
194 const NumberAndSizeInfo& number_and_size);
195
196 private:
197 StringStream* stream_;
198};
199
200
201void ClusterTreePrinter::Print(StringStream* stream,
202 const JSObjectsCluster& cluster,
203 const NumberAndSizeInfo& number_and_size) {
204 stream->Put(',');
205 cluster.Print(stream);
206 stream->Add(";%d", number_and_size.number());
207}
208
209
210// Visitor for printing a retainer tree.
211class SimpleRetainerTreePrinter BASE_EMBEDDED {
212 public:
213 explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer)
214 : printer_(printer) {}
215 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
216
217 private:
218 RetainerHeapProfile::Printer* printer_;
219};
220
221
222void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
223 JSObjectsClusterTree* tree) {
224 HeapStringAllocator allocator;
225 StringStream stream(&allocator);
226 ClusterTreePrinter retainers_printer(&stream);
227 tree->ForEach(&retainers_printer);
228 printer_->PrintRetainers(cluster, stream);
229}
230
231
232// Visitor for aggregating references count of equivalent clusters.
233class RetainersAggregator BASE_EMBEDDED {
234 public:
235 RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree)
236 : coarser_(coarser), dest_tree_(dest_tree) {}
237 void Call(const JSObjectsCluster& cluster,
238 const NumberAndSizeInfo& number_and_size);
239
240 private:
241 ClustersCoarser* coarser_;
242 JSObjectsClusterTree* dest_tree_;
243};
244
245
246void RetainersAggregator::Call(const JSObjectsCluster& cluster,
247 const NumberAndSizeInfo& number_and_size) {
248 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
249 if (eq.is_null()) eq = cluster;
250 JSObjectsClusterTree::Locator loc;
251 dest_tree_->Insert(eq, &loc);
252 NumberAndSizeInfo aggregated_number = loc.value();
253 aggregated_number.increment_number(number_and_size.number());
254 loc.set_value(aggregated_number);
255}
256
257
258// Visitor for printing retainers tree. Aggregates equivalent retainer clusters.
259class AggregatingRetainerTreePrinter BASE_EMBEDDED {
260 public:
261 AggregatingRetainerTreePrinter(ClustersCoarser* coarser,
262 RetainerHeapProfile::Printer* printer)
263 : coarser_(coarser), printer_(printer) {}
264 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
265
266 private:
267 ClustersCoarser* coarser_;
268 RetainerHeapProfile::Printer* printer_;
269};
270
271
272void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
273 JSObjectsClusterTree* tree) {
274 if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return;
275 JSObjectsClusterTree dest_tree_;
276 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
277 tree->ForEach(&retainers_aggregator);
278 HeapStringAllocator allocator;
279 StringStream stream(&allocator);
280 ClusterTreePrinter retainers_printer(&stream);
281 dest_tree_.ForEach(&retainers_printer);
282 printer_->PrintRetainers(cluster, stream);
283}
284
Steve Block791712a2010-08-27 10:21:07 +0100285} // namespace
286
Steve Blocka7e24c12009-10-30 11:49:00 +0000287
288// A helper class for building a retainers tree, that aggregates
289// all equivalent clusters.
Steve Block791712a2010-08-27 10:21:07 +0100290class RetainerTreeAggregator {
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 public:
292 explicit RetainerTreeAggregator(ClustersCoarser* coarser)
293 : coarser_(coarser) {}
294 void Process(JSObjectsRetainerTree* input_tree) {
295 input_tree->ForEach(this);
296 }
297 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
298 JSObjectsRetainerTree& output_tree() { return output_tree_; }
299
300 private:
301 ClustersCoarser* coarser_;
302 JSObjectsRetainerTree output_tree_;
303};
304
305
306void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
307 JSObjectsClusterTree* tree) {
308 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
309 if (eq.is_null()) return;
310 JSObjectsRetainerTree::Locator loc;
311 if (output_tree_.Insert(eq, &loc)) {
312 loc.set_value(new JSObjectsClusterTree());
313 }
314 RetainersAggregator retainers_aggregator(coarser_, loc.value());
315 tree->ForEach(&retainers_aggregator);
316}
317
Steve Blocka7e24c12009-10-30 11:49:00 +0000318
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100319HeapProfiler::HeapProfiler()
320 : snapshots_(new HeapSnapshotsCollection()),
321 next_snapshot_uid_(1) {
322}
323
324
325HeapProfiler::~HeapProfiler() {
326 delete snapshots_;
327}
328
Steve Block44f0eee2011-05-26 01:26:41 +0100329
330void HeapProfiler::ResetSnapshots() {
331 delete snapshots_;
332 snapshots_ = new HeapSnapshotsCollection();
333}
334
335
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100336#endif // ENABLE_LOGGING_AND_PROFILING
337
338void HeapProfiler::Setup() {
339#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block44f0eee2011-05-26 01:26:41 +0100340 Isolate* isolate = Isolate::Current();
341 if (isolate->heap_profiler() == NULL) {
342 isolate->set_heap_profiler(new HeapProfiler());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100343 }
344#endif
345}
346
347
348void HeapProfiler::TearDown() {
349#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block44f0eee2011-05-26 01:26:41 +0100350 Isolate* isolate = Isolate::Current();
351 delete isolate->heap_profiler();
352 isolate->set_heap_profiler(NULL);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100353#endif
354}
355
356
357#ifdef ENABLE_LOGGING_AND_PROFILING
358
Ben Murdochb0fe1622011-05-05 13:52:32 +0100359HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name,
360 int type,
361 v8::ActivityControl* control) {
Steve Block44f0eee2011-05-26 01:26:41 +0100362 ASSERT(Isolate::Current()->heap_profiler() != NULL);
363 return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
364 type,
365 control);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100366}
367
368
Ben Murdochb0fe1622011-05-05 13:52:32 +0100369HeapSnapshot* HeapProfiler::TakeSnapshot(String* name,
370 int type,
371 v8::ActivityControl* control) {
Steve Block44f0eee2011-05-26 01:26:41 +0100372 ASSERT(Isolate::Current()->heap_profiler() != NULL);
373 return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
374 type,
375 control);
376}
377
378
379void HeapProfiler::DefineWrapperClass(
380 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) {
381 ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId);
382 if (wrapper_callbacks_.length() <= class_id) {
383 wrapper_callbacks_.AddBlock(
384 NULL, class_id - wrapper_callbacks_.length() + 1);
385 }
386 wrapper_callbacks_[class_id] = callback;
387}
388
389
390v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback(
391 uint16_t class_id, Object** wrapper) {
392 if (wrapper_callbacks_.length() <= class_id) return NULL;
393 return wrapper_callbacks_[class_id](
394 class_id, Utils::ToLocal(Handle<Object>(wrapper)));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100395}
396
397
Ben Murdochb0fe1622011-05-05 13:52:32 +0100398HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
399 int type,
400 v8::ActivityControl* control) {
Steve Block791712a2010-08-27 10:21:07 +0100401 HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type);
402 HeapSnapshot* result =
403 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100404 bool generation_completed = true;
Steve Block791712a2010-08-27 10:21:07 +0100405 switch (s_type) {
406 case HeapSnapshot::kFull: {
Steve Block44f0eee2011-05-26 01:26:41 +0100407 HEAP->CollectAllGarbage(true);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100408 HeapSnapshotGenerator generator(result, control);
409 generation_completed = generator.GenerateSnapshot();
Steve Block791712a2010-08-27 10:21:07 +0100410 break;
411 }
412 case HeapSnapshot::kAggregated: {
Steve Block44f0eee2011-05-26 01:26:41 +0100413 HEAP->CollectAllGarbage(true);
Steve Block791712a2010-08-27 10:21:07 +0100414 AggregatedHeapSnapshot agg_snapshot;
415 AggregatedHeapSnapshotGenerator generator(&agg_snapshot);
416 generator.GenerateSnapshot();
417 generator.FillHeapSnapshot(result);
418 break;
419 }
420 default:
421 UNREACHABLE();
422 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100423 if (!generation_completed) {
424 delete result;
425 result = NULL;
426 }
427 snapshots_->SnapshotGenerationFinished(result);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100428 return result;
429}
430
431
Ben Murdochb0fe1622011-05-05 13:52:32 +0100432HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name,
433 int type,
434 v8::ActivityControl* control) {
Steve Block44f0eee2011-05-26 01:26:41 +0100435 return TakeSnapshotImpl(snapshots_->names()->GetName(name), type, control);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100436}
437
438
439int HeapProfiler::GetSnapshotsCount() {
Steve Block44f0eee2011-05-26 01:26:41 +0100440 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
441 ASSERT(profiler != NULL);
442 return profiler->snapshots_->snapshots()->length();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100443}
444
445
446HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
Steve Block44f0eee2011-05-26 01:26:41 +0100447 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
448 ASSERT(profiler != NULL);
449 return profiler->snapshots_->snapshots()->at(index);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100450}
451
452
453HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
Steve Block44f0eee2011-05-26 01:26:41 +0100454 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
455 ASSERT(profiler != NULL);
456 return profiler->snapshots_->GetSnapshot(uid);
457}
458
459
460void HeapProfiler::DeleteAllSnapshots() {
461 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
462 ASSERT(profiler != NULL);
463 profiler->ResetSnapshots();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100464}
465
466
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100467void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
Steve Block44f0eee2011-05-26 01:26:41 +0100468 snapshots_->ObjectMoveEvent(from, to);
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100469}
470
471
Steve Blocka7e24c12009-10-30 11:49:00 +0000472const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
473const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
474
475
476ConstructorHeapProfile::ConstructorHeapProfile()
477 : zscope_(DELETE_ON_EXIT) {
478}
479
480
481void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
482 const NumberAndSizeInfo& number_and_size) {
483 HeapStringAllocator allocator;
484 StringStream stream(&allocator);
485 cluster.Print(&stream);
Steve Block44f0eee2011-05-26 01:26:41 +0100486 LOG(ISOLATE,
487 HeapSampleJSConstructorEvent(*(stream.ToCString()),
Steve Blocka7e24c12009-10-30 11:49:00 +0000488 number_and_size.number(),
489 number_and_size.bytes()));
490}
491
492
493void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
494 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
495}
496
497
498void ConstructorHeapProfile::PrintStats() {
499 js_objects_info_tree_.ForEach(this);
500}
501
502
Steve Block3ce2e202009-11-05 08:53:23 +0000503static const char* GetConstructorName(const char* name) {
504 return name[0] != '\0' ? name : "(anonymous)";
505}
506
507
Steve Block791712a2010-08-27 10:21:07 +0100508const char* JSObjectsCluster::GetSpecialCaseName() const {
509 if (constructor_ == FromSpecialCase(ROOTS)) {
510 return "(roots)";
511 } else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) {
512 return "(global property)";
513 } else if (constructor_ == FromSpecialCase(CODE)) {
514 return "(code)";
515 } else if (constructor_ == FromSpecialCase(SELF)) {
516 return "(self)";
517 }
518 return NULL;
519}
520
521
Steve Blocka7e24c12009-10-30 11:49:00 +0000522void JSObjectsCluster::Print(StringStream* accumulator) const {
523 ASSERT(!is_null());
Steve Block791712a2010-08-27 10:21:07 +0100524 const char* special_case_name = GetSpecialCaseName();
525 if (special_case_name != NULL) {
526 accumulator->Add(special_case_name);
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 } else {
528 SmartPointer<char> s_name(
529 constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
Steve Block3ce2e202009-11-05 08:53:23 +0000530 accumulator->Add("%s", GetConstructorName(*s_name));
Steve Blocka7e24c12009-10-30 11:49:00 +0000531 if (instance_ != NULL) {
532 accumulator->Add(":%p", static_cast<void*>(instance_));
533 }
534 }
535}
536
537
538void JSObjectsCluster::DebugPrint(StringStream* accumulator) const {
539 if (!is_null()) {
540 Print(accumulator);
541 } else {
542 accumulator->Add("(null cluster)");
543 }
544}
545
546
547inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
548 const JSObjectsCluster& cluster_)
549 : cluster(cluster_), refs(kInitialBackrefsListCapacity) {
550}
551
552
553inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
554 const ClustersCoarser::ClusterBackRefs& src)
555 : cluster(src.cluster), refs(src.refs.capacity()) {
556 refs.AddAll(src.refs);
557}
558
559
560inline ClustersCoarser::ClusterBackRefs&
561 ClustersCoarser::ClusterBackRefs::operator=(
562 const ClustersCoarser::ClusterBackRefs& src) {
563 if (this == &src) return *this;
564 cluster = src.cluster;
565 refs.Clear();
566 refs.AddAll(src.refs);
567 return *this;
568}
569
570
571inline int ClustersCoarser::ClusterBackRefs::Compare(
572 const ClustersCoarser::ClusterBackRefs& a,
573 const ClustersCoarser::ClusterBackRefs& b) {
574 int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster);
575 if (cmp != 0) return cmp;
576 if (a.refs.length() < b.refs.length()) return -1;
577 if (a.refs.length() > b.refs.length()) return 1;
578 for (int i = 0; i < a.refs.length(); ++i) {
579 int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]);
580 if (cmp != 0) return cmp;
581 }
582 return 0;
583}
584
585
586ClustersCoarser::ClustersCoarser()
587 : zscope_(DELETE_ON_EXIT),
588 sim_list_(ClustersCoarser::kInitialSimilarityListCapacity),
589 current_pair_(NULL),
590 current_set_(NULL),
591 self_(NULL) {
592}
593
594
595void ClustersCoarser::Call(const JSObjectsCluster& cluster,
596 JSObjectsClusterTree* tree) {
597 if (!cluster.can_be_coarsed()) return;
598 ClusterBackRefs pair(cluster);
599 ASSERT(current_pair_ == NULL);
600 current_pair_ = &pair;
601 current_set_ = new JSObjectsRetainerTree();
602 self_ = &cluster;
603 tree->ForEach(this);
604 sim_list_.Add(pair);
605 current_pair_ = NULL;
606 current_set_ = NULL;
607 self_ = NULL;
608}
609
610
611void ClustersCoarser::Call(const JSObjectsCluster& cluster,
612 const NumberAndSizeInfo& number_and_size) {
613 ASSERT(current_pair_ != NULL);
614 ASSERT(current_set_ != NULL);
615 ASSERT(self_ != NULL);
616 JSObjectsRetainerTree::Locator loc;
617 if (JSObjectsCluster::Compare(*self_, cluster) == 0) {
618 current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF));
619 return;
620 }
621 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
622 if (!eq.is_null()) {
623 if (current_set_->Find(eq, &loc)) return;
624 current_pair_->refs.Add(eq);
625 current_set_->Insert(eq, &loc);
626 } else {
627 current_pair_->refs.Add(cluster);
628 }
629}
630
631
632void ClustersCoarser::Process(JSObjectsRetainerTree* tree) {
633 int last_eq_clusters = -1;
634 for (int i = 0; i < kMaxPassesCount; ++i) {
635 sim_list_.Clear();
636 const int curr_eq_clusters = DoProcess(tree);
637 // If no new cluster equivalents discovered, abort processing.
638 if (last_eq_clusters == curr_eq_clusters) break;
639 last_eq_clusters = curr_eq_clusters;
640 }
641}
642
643
644int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) {
645 tree->ForEach(this);
646 sim_list_.Iterate(ClusterBackRefs::SortRefsIterator);
647 sim_list_.Sort(ClusterBackRefsCmp);
648 return FillEqualityTree();
649}
650
651
652JSObjectsCluster ClustersCoarser::GetCoarseEquivalent(
653 const JSObjectsCluster& cluster) {
654 if (!cluster.can_be_coarsed()) return JSObjectsCluster();
655 EqualityTree::Locator loc;
656 return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster();
657}
658
659
660bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) {
661 // Return true for coarsible clusters that have a non-identical equivalent.
662 if (!cluster.can_be_coarsed()) return false;
663 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
664 return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0;
665}
666
667
668int ClustersCoarser::FillEqualityTree() {
669 int eq_clusters_count = 0;
670 int eq_to = 0;
671 bool first_added = false;
672 for (int i = 1; i < sim_list_.length(); ++i) {
673 if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) {
674 EqualityTree::Locator loc;
675 if (!first_added) {
676 // Add self-equivalence, if we have more than one item in this
677 // equivalence class.
678 eq_tree_.Insert(sim_list_[eq_to].cluster, &loc);
679 loc.set_value(sim_list_[eq_to].cluster);
680 first_added = true;
681 }
682 eq_tree_.Insert(sim_list_[i].cluster, &loc);
683 loc.set_value(sim_list_[eq_to].cluster);
684 ++eq_clusters_count;
685 } else {
686 eq_to = i;
687 first_added = false;
688 }
689 }
690 return eq_clusters_count;
691}
692
693
694const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey;
695const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue;
696const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
697const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
698 NULL;
699
700
701RetainerHeapProfile::RetainerHeapProfile()
Steve Block791712a2010-08-27 10:21:07 +0100702 : zscope_(DELETE_ON_EXIT),
703 aggregator_(NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000704 JSObjectsCluster roots(JSObjectsCluster::ROOTS);
705 ReferencesExtractor extractor(roots, this);
Steve Block44f0eee2011-05-26 01:26:41 +0100706 HEAP->IterateRoots(&extractor, VISIT_ONLY_STRONG);
Steve Blocka7e24c12009-10-30 11:49:00 +0000707}
708
709
Steve Block791712a2010-08-27 10:21:07 +0100710RetainerHeapProfile::~RetainerHeapProfile() {
711 delete aggregator_;
712}
713
714
Steve Blocka7e24c12009-10-30 11:49:00 +0000715void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
716 HeapObject* ref) {
717 JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref);
Steve Blockd0582a62009-12-15 09:54:21 +0000718 if (ref_cluster.is_null()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +0000719 JSObjectsRetainerTree::Locator ref_loc;
720 if (retainers_tree_.Insert(ref_cluster, &ref_loc)) {
721 ref_loc.set_value(new JSObjectsClusterTree());
722 }
723 JSObjectsClusterTree* referenced_by = ref_loc.value();
724 Clusterizer::InsertReferenceIntoTree(referenced_by, cluster);
725}
726
727
728void RetainerHeapProfile::CollectStats(HeapObject* obj) {
Steve Blockd0582a62009-12-15 09:54:21 +0000729 const JSObjectsCluster cluster = Clusterizer::Clusterize(obj);
730 if (cluster.is_null()) return;
731 ReferencesExtractor extractor(cluster, this);
732 obj->Iterate(&extractor);
Steve Blocka7e24c12009-10-30 11:49:00 +0000733}
734
735
Steve Block791712a2010-08-27 10:21:07 +0100736void RetainerHeapProfile::CoarseAndAggregate() {
737 coarser_.Process(&retainers_tree_);
738 ASSERT(aggregator_ == NULL);
739 aggregator_ = new RetainerTreeAggregator(&coarser_);
740 aggregator_->Process(&retainers_tree_);
741}
742
743
Steve Blocka7e24c12009-10-30 11:49:00 +0000744void RetainerHeapProfile::DebugPrintStats(
745 RetainerHeapProfile::Printer* printer) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 // Print clusters that have no equivalents, aggregating their retainers.
747 AggregatingRetainerTreePrinter agg_printer(&coarser_, printer);
748 retainers_tree_.ForEach(&agg_printer);
Steve Block791712a2010-08-27 10:21:07 +0100749 // Print clusters that have equivalents.
Steve Blocka7e24c12009-10-30 11:49:00 +0000750 SimpleRetainerTreePrinter s_printer(printer);
Steve Block791712a2010-08-27 10:21:07 +0100751 aggregator_->output_tree().ForEach(&s_printer);
Steve Blocka7e24c12009-10-30 11:49:00 +0000752}
753
754
755void RetainerHeapProfile::PrintStats() {
756 RetainersPrinter printer;
757 DebugPrintStats(&printer);
758}
759
760
761//
762// HeapProfiler class implementation.
763//
Steve Block3ce2e202009-11-05 08:53:23 +0000764static void StackWeakReferenceCallback(Persistent<Value> object,
765 void* trace) {
766 DeleteArray(static_cast<Address*>(trace));
767 object.Dispose();
768}
769
770
771static void PrintProducerStackTrace(Object* obj, void* trace) {
772 if (!obj->IsJSObject()) return;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800773 String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj));
Steve Block3ce2e202009-11-05 08:53:23 +0000774 SmartPointer<char> s_name(
775 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
Steve Block44f0eee2011-05-26 01:26:41 +0100776 LOG(ISOLATE,
777 HeapSampleJSProducerEvent(GetConstructorName(*s_name),
Steve Block3ce2e202009-11-05 08:53:23 +0000778 reinterpret_cast<Address*>(trace)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000779}
780
781
782void HeapProfiler::WriteSample() {
Steve Block44f0eee2011-05-26 01:26:41 +0100783 Isolate* isolate = Isolate::Current();
784 LOG(isolate, HeapSampleBeginEvent("Heap", "allocated"));
785 LOG(isolate,
786 HeapSampleStats(
787 "Heap", "allocated", HEAP->CommittedMemory(), HEAP->SizeOfObjects()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000788
Steve Block791712a2010-08-27 10:21:07 +0100789 AggregatedHeapSnapshot snapshot;
790 AggregatedHeapSnapshotGenerator generator(&snapshot);
791 generator.GenerateSnapshot();
Steve Blocka7e24c12009-10-30 11:49:00 +0000792
Steve Block791712a2010-08-27 10:21:07 +0100793 HistogramInfo* info = snapshot.info();
794 for (int i = FIRST_NONSTRING_TYPE;
795 i <= AggregatedHeapSnapshotGenerator::kAllStringsType;
796 ++i) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000797 if (info[i].bytes() > 0) {
Steve Block44f0eee2011-05-26 01:26:41 +0100798 LOG(isolate,
799 HeapSampleItemEvent(info[i].name(), info[i].number(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000800 info[i].bytes()));
801 }
802 }
803
Steve Block791712a2010-08-27 10:21:07 +0100804 snapshot.js_cons_profile()->PrintStats();
805 snapshot.js_retainer_profile()->PrintStats();
Steve Blocka7e24c12009-10-30 11:49:00 +0000806
Steve Block44f0eee2011-05-26 01:26:41 +0100807 isolate->global_handles()->IterateWeakRoots(PrintProducerStackTrace,
808 StackWeakReferenceCallback);
Steve Block3ce2e202009-11-05 08:53:23 +0000809
Steve Block44f0eee2011-05-26 01:26:41 +0100810 LOG(isolate, HeapSampleEndEvent("Heap", "allocated"));
Steve Blocka7e24c12009-10-30 11:49:00 +0000811}
812
813
Steve Block791712a2010-08-27 10:21:07 +0100814AggregatedHeapSnapshot::AggregatedHeapSnapshot()
815 : info_(NewArray<HistogramInfo>(
816 AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) {
817#define DEF_TYPE_NAME(name) info_[name].set_name(#name);
818 INSTANCE_TYPE_LIST(DEF_TYPE_NAME);
819#undef DEF_TYPE_NAME
820 info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name(
821 "STRING_TYPE");
822}
823
824
825AggregatedHeapSnapshot::~AggregatedHeapSnapshot() {
826 DeleteArray(info_);
827}
828
829
830AggregatedHeapSnapshotGenerator::AggregatedHeapSnapshotGenerator(
831 AggregatedHeapSnapshot* agg_snapshot)
832 : agg_snapshot_(agg_snapshot) {
833}
834
835
836void AggregatedHeapSnapshotGenerator::CalculateStringsStats() {
837 HistogramInfo* info = agg_snapshot_->info();
838 HistogramInfo& strings = info[kAllStringsType];
839 // Lump all the string types together.
840#define INCREMENT_SIZE(type, size, name, camel_name) \
841 strings.increment_number(info[type].number()); \
842 strings.increment_bytes(info[type].bytes());
843 STRING_TYPE_LIST(INCREMENT_SIZE);
844#undef INCREMENT_SIZE
845}
846
847
848void AggregatedHeapSnapshotGenerator::CollectStats(HeapObject* obj) {
849 InstanceType type = obj->map()->instance_type();
850 ASSERT(0 <= type && type <= LAST_TYPE);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800851 agg_snapshot_->info()[type].increment_number(1);
852 agg_snapshot_->info()[type].increment_bytes(obj->Size());
Steve Block791712a2010-08-27 10:21:07 +0100853}
854
855
856void AggregatedHeapSnapshotGenerator::GenerateSnapshot() {
Steve Block1e0659c2011-05-24 12:43:12 +0100857 HeapIterator iterator(HeapIterator::kFilterUnreachable);
Steve Block791712a2010-08-27 10:21:07 +0100858 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
859 CollectStats(obj);
860 agg_snapshot_->js_cons_profile()->CollectStats(obj);
861 agg_snapshot_->js_retainer_profile()->CollectStats(obj);
862 }
863 CalculateStringsStats();
864 agg_snapshot_->js_retainer_profile()->CoarseAndAggregate();
865}
866
867
868class CountingConstructorHeapProfileIterator {
869 public:
870 CountingConstructorHeapProfileIterator()
871 : entities_count_(0), children_count_(0) {
872 }
873
874 void Call(const JSObjectsCluster& cluster,
875 const NumberAndSizeInfo& number_and_size) {
876 ++entities_count_;
877 children_count_ += number_and_size.number();
878 }
879
880 int entities_count() { return entities_count_; }
881 int children_count() { return children_count_; }
882
883 private:
884 int entities_count_;
885 int children_count_;
886};
887
888
889static HeapEntry* AddEntryFromAggregatedSnapshot(HeapSnapshot* snapshot,
890 int* root_child_index,
891 HeapEntry::Type type,
892 const char* name,
893 int count,
894 int size,
895 int children_count,
896 int retainers_count) {
897 HeapEntry* entry = snapshot->AddEntry(
898 type, name, count, size, children_count, retainers_count);
899 ASSERT(entry != NULL);
900 snapshot->root()->SetUnidirElementReference(*root_child_index,
901 *root_child_index + 1,
902 entry);
903 *root_child_index = *root_child_index + 1;
904 return entry;
905}
906
907
908class AllocatingConstructorHeapProfileIterator {
909 public:
910 AllocatingConstructorHeapProfileIterator(HeapSnapshot* snapshot,
911 int* root_child_index)
912 : snapshot_(snapshot),
913 root_child_index_(root_child_index) {
914 }
915
916 void Call(const JSObjectsCluster& cluster,
917 const NumberAndSizeInfo& number_and_size) {
918 const char* name = cluster.GetSpecialCaseName();
919 if (name == NULL) {
Steve Block44f0eee2011-05-26 01:26:41 +0100920 name = snapshot_->collection()->names()->GetFunctionName(
921 cluster.constructor());
Steve Block791712a2010-08-27 10:21:07 +0100922 }
923 AddEntryFromAggregatedSnapshot(snapshot_,
924 root_child_index_,
925 HeapEntry::kObject,
926 name,
927 number_and_size.number(),
928 number_and_size.bytes(),
929 0,
930 0);
931 }
932
933 private:
934 HeapSnapshot* snapshot_;
935 int* root_child_index_;
936};
937
938
939static HeapObject* ClusterAsHeapObject(const JSObjectsCluster& cluster) {
940 return cluster.can_be_coarsed() ?
941 reinterpret_cast<HeapObject*>(cluster.instance()) : cluster.constructor();
942}
943
944
945static JSObjectsCluster HeapObjectAsCluster(HeapObject* object) {
946 if (object->IsString()) {
947 return JSObjectsCluster(String::cast(object));
948 } else {
949 JSObject* js_obj = JSObject::cast(object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800950 String* constructor = GetConstructorNameForHeapProfile(
951 JSObject::cast(js_obj));
Steve Block791712a2010-08-27 10:21:07 +0100952 return JSObjectsCluster(constructor, object);
953 }
954}
955
956
957class CountingRetainersIterator {
958 public:
959 CountingRetainersIterator(const JSObjectsCluster& child_cluster,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100960 HeapEntriesAllocator* allocator,
Steve Block791712a2010-08-27 10:21:07 +0100961 HeapEntriesMap* map)
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100962 : child_(ClusterAsHeapObject(child_cluster)),
963 allocator_(allocator),
964 map_(map) {
Steve Block791712a2010-08-27 10:21:07 +0100965 if (map_->Map(child_) == NULL)
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100966 map_->Pair(child_, allocator_, HeapEntriesMap::kHeapEntryPlaceholder);
Steve Block791712a2010-08-27 10:21:07 +0100967 }
968
969 void Call(const JSObjectsCluster& cluster,
970 const NumberAndSizeInfo& number_and_size) {
971 if (map_->Map(ClusterAsHeapObject(cluster)) == NULL)
972 map_->Pair(ClusterAsHeapObject(cluster),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100973 allocator_,
Steve Block791712a2010-08-27 10:21:07 +0100974 HeapEntriesMap::kHeapEntryPlaceholder);
975 map_->CountReference(ClusterAsHeapObject(cluster), child_);
976 }
977
978 private:
979 HeapObject* child_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100980 HeapEntriesAllocator* allocator_;
Steve Block791712a2010-08-27 10:21:07 +0100981 HeapEntriesMap* map_;
982};
983
984
985class AllocatingRetainersIterator {
986 public:
987 AllocatingRetainersIterator(const JSObjectsCluster& child_cluster,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100988 HeapEntriesAllocator*,
Steve Block791712a2010-08-27 10:21:07 +0100989 HeapEntriesMap* map)
990 : child_(ClusterAsHeapObject(child_cluster)), map_(map) {
991 child_entry_ = map_->Map(child_);
992 ASSERT(child_entry_ != NULL);
993 }
994
995 void Call(const JSObjectsCluster& cluster,
996 const NumberAndSizeInfo& number_and_size) {
997 int child_index, retainer_index;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800998 map_->CountReference(ClusterAsHeapObject(cluster),
999 child_,
1000 &child_index,
1001 &retainer_index);
1002 map_->Map(ClusterAsHeapObject(cluster))->SetIndexedReference(
1003 HeapGraphEdge::kElement,
1004 child_index,
1005 number_and_size.number(),
1006 child_entry_,
1007 retainer_index);
Steve Block791712a2010-08-27 10:21:07 +01001008 }
1009
1010 private:
1011 HeapObject* child_;
1012 HeapEntriesMap* map_;
1013 HeapEntry* child_entry_;
1014};
1015
1016
1017template<class RetainersIterator>
1018class AggregatingRetainerTreeIterator {
1019 public:
1020 explicit AggregatingRetainerTreeIterator(ClustersCoarser* coarser,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001021 HeapEntriesAllocator* allocator,
Steve Block791712a2010-08-27 10:21:07 +01001022 HeapEntriesMap* map)
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001023 : coarser_(coarser), allocator_(allocator), map_(map) {
Steve Block791712a2010-08-27 10:21:07 +01001024 }
1025
1026 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree) {
1027 if (coarser_ != NULL &&
1028 !coarser_->GetCoarseEquivalent(cluster).is_null()) return;
1029 JSObjectsClusterTree* tree_to_iterate = tree;
1030 ZoneScope zs(DELETE_ON_EXIT);
1031 JSObjectsClusterTree dest_tree_;
1032 if (coarser_ != NULL) {
1033 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
1034 tree->ForEach(&retainers_aggregator);
1035 tree_to_iterate = &dest_tree_;
1036 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001037 RetainersIterator iterator(cluster, allocator_, map_);
Steve Block791712a2010-08-27 10:21:07 +01001038 tree_to_iterate->ForEach(&iterator);
1039 }
1040
1041 private:
1042 ClustersCoarser* coarser_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001043 HeapEntriesAllocator* allocator_;
Steve Block791712a2010-08-27 10:21:07 +01001044 HeapEntriesMap* map_;
1045};
1046
1047
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001048class AggregatedRetainerTreeAllocator : public HeapEntriesAllocator {
Steve Block791712a2010-08-27 10:21:07 +01001049 public:
1050 AggregatedRetainerTreeAllocator(HeapSnapshot* snapshot,
1051 int* root_child_index)
1052 : snapshot_(snapshot), root_child_index_(root_child_index) {
1053 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001054 ~AggregatedRetainerTreeAllocator() { }
Steve Block791712a2010-08-27 10:21:07 +01001055
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001056 HeapEntry* AllocateEntry(
1057 HeapThing ptr, int children_count, int retainers_count) {
1058 HeapObject* obj = reinterpret_cast<HeapObject*>(ptr);
Steve Block791712a2010-08-27 10:21:07 +01001059 JSObjectsCluster cluster = HeapObjectAsCluster(obj);
1060 const char* name = cluster.GetSpecialCaseName();
1061 if (name == NULL) {
Steve Block44f0eee2011-05-26 01:26:41 +01001062 name = snapshot_->collection()->names()->GetFunctionName(
1063 cluster.constructor());
Steve Block791712a2010-08-27 10:21:07 +01001064 }
1065 return AddEntryFromAggregatedSnapshot(
1066 snapshot_, root_child_index_, HeapEntry::kObject, name,
1067 0, 0, children_count, retainers_count);
1068 }
1069
1070 private:
1071 HeapSnapshot* snapshot_;
1072 int* root_child_index_;
1073};
1074
1075
1076template<class Iterator>
1077void AggregatedHeapSnapshotGenerator::IterateRetainers(
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001078 HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map) {
Steve Block791712a2010-08-27 10:21:07 +01001079 RetainerHeapProfile* p = agg_snapshot_->js_retainer_profile();
1080 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_1(
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001081 p->coarser(), allocator, entries_map);
Steve Block791712a2010-08-27 10:21:07 +01001082 p->retainers_tree()->ForEach(&agg_ret_iter_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001083 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2(
1084 NULL, allocator, entries_map);
Steve Block791712a2010-08-27 10:21:07 +01001085 p->aggregator()->output_tree().ForEach(&agg_ret_iter_2);
1086}
1087
1088
1089void AggregatedHeapSnapshotGenerator::FillHeapSnapshot(HeapSnapshot* snapshot) {
1090 // Count the number of entities.
1091 int histogram_entities_count = 0;
1092 int histogram_children_count = 0;
1093 int histogram_retainers_count = 0;
1094 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
1095 if (agg_snapshot_->info()[i].bytes() > 0) {
1096 ++histogram_entities_count;
1097 }
1098 }
1099 CountingConstructorHeapProfileIterator counting_cons_iter;
1100 agg_snapshot_->js_cons_profile()->ForEach(&counting_cons_iter);
1101 histogram_entities_count += counting_cons_iter.entities_count();
1102 HeapEntriesMap entries_map;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001103 int root_child_index = 0;
1104 AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index);
1105 IterateRetainers<CountingRetainersIterator>(&allocator, &entries_map);
Steve Block791712a2010-08-27 10:21:07 +01001106 histogram_entities_count += entries_map.entries_count();
1107 histogram_children_count += entries_map.total_children_count();
1108 histogram_retainers_count += entries_map.total_retainers_count();
1109
1110 // Root entry references all other entries.
1111 histogram_children_count += histogram_entities_count;
1112 int root_children_count = histogram_entities_count;
1113 ++histogram_entities_count;
1114
1115 // Allocate and fill entries in the snapshot, allocate references.
1116 snapshot->AllocateEntries(histogram_entities_count,
1117 histogram_children_count,
1118 histogram_retainers_count);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001119 snapshot->AddRootEntry(root_children_count);
Steve Block791712a2010-08-27 10:21:07 +01001120 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
1121 if (agg_snapshot_->info()[i].bytes() > 0) {
1122 AddEntryFromAggregatedSnapshot(snapshot,
1123 &root_child_index,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001124 HeapEntry::kHidden,
Steve Block791712a2010-08-27 10:21:07 +01001125 agg_snapshot_->info()[i].name(),
1126 agg_snapshot_->info()[i].number(),
1127 agg_snapshot_->info()[i].bytes(),
1128 0,
1129 0);
1130 }
1131 }
1132 AllocatingConstructorHeapProfileIterator alloc_cons_iter(
1133 snapshot, &root_child_index);
1134 agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001135 entries_map.AllocateEntries();
Steve Block791712a2010-08-27 10:21:07 +01001136
1137 // Fill up references.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001138 IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001139
1140 snapshot->SetDominatorsToSelf();
Steve Block791712a2010-08-27 10:21:07 +01001141}
1142
1143
Steve Block3ce2e202009-11-05 08:53:23 +00001144void ProducerHeapProfile::Setup() {
1145 can_log_ = true;
1146}
1147
Leon Clarkee46be812010-01-19 14:06:41 +00001148void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
1149 ASSERT(FLAG_log_producers);
1150 if (!can_log_) return;
Steve Block3ce2e202009-11-05 08:53:23 +00001151 int framesCount = 0;
1152 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1153 ++framesCount;
1154 }
1155 if (framesCount == 0) return;
1156 ++framesCount; // Reserve place for the terminator item.
1157 Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
1158 int i = 0;
1159 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1160 stack[i++] = it.frame()->pc();
1161 }
1162 stack[i] = NULL;
Steve Block44f0eee2011-05-26 01:26:41 +01001163 Handle<Object> handle = isolate_->global_handles()->Create(obj);
1164 isolate_->global_handles()->MakeWeak(handle.location(),
1165 static_cast<void*>(stack.start()),
1166 StackWeakReferenceCallback);
Steve Block3ce2e202009-11-05 08:53:23 +00001167}
1168
1169
Steve Blocka7e24c12009-10-30 11:49:00 +00001170#endif // ENABLE_LOGGING_AND_PROFILING
1171
1172
1173} } // namespace v8::internal