blob: e47d66f984805def59210f6920d7618fcd1dce01 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-profiler.h"
Steve Block3ce2e202009-11-05 08:53:23 +000031#include "frames-inl.h"
32#include "global-handles.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010033#include "profile-generator.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "string-stream.h"
35
36namespace v8 {
37namespace internal {
38
39
40#ifdef ENABLE_LOGGING_AND_PROFILING
41namespace {
42
43// Clusterizer is a set of helper functions for converting
44// object references into clusters.
45class Clusterizer : public AllStatic {
46 public:
47 static JSObjectsCluster Clusterize(HeapObject* obj) {
48 return Clusterize(obj, true);
49 }
50 static void InsertIntoTree(JSObjectsClusterTree* tree,
51 HeapObject* obj, bool fine_grain);
52 static void InsertReferenceIntoTree(JSObjectsClusterTree* tree,
53 const JSObjectsCluster& cluster) {
54 InsertIntoTree(tree, cluster, 0);
55 }
56
57 private:
58 static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain);
59 static int CalculateNetworkSize(JSObject* obj);
60 static int GetObjectSize(HeapObject* obj) {
61 return obj->IsJSObject() ?
62 CalculateNetworkSize(JSObject::cast(obj)) : obj->Size();
63 }
64 static void InsertIntoTree(JSObjectsClusterTree* tree,
65 const JSObjectsCluster& cluster, int size);
66};
67
68
69JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
70 if (obj->IsJSObject()) {
71 JSObject* js_obj = JSObject::cast(obj);
72 String* constructor = JSObject::cast(js_obj)->constructor_name();
73 // Differentiate Object and Array instances.
74 if (fine_grain && (constructor == Heap::Object_symbol() ||
75 constructor == Heap::Array_symbol())) {
76 return JSObjectsCluster(constructor, obj);
77 } else {
78 return JSObjectsCluster(constructor);
79 }
80 } else if (obj->IsString()) {
81 return JSObjectsCluster(Heap::String_symbol());
Steve Blockd0582a62009-12-15 09:54:21 +000082 } else if (obj->IsJSGlobalPropertyCell()) {
83 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
84 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
85 return JSObjectsCluster(JSObjectsCluster::CODE);
Steve Blocka7e24c12009-10-30 11:49:00 +000086 }
87 return JSObjectsCluster();
88}
89
90
91void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
92 HeapObject* obj, bool fine_grain) {
93 JSObjectsCluster cluster = Clusterize(obj, fine_grain);
94 if (cluster.is_null()) return;
95 InsertIntoTree(tree, cluster, GetObjectSize(obj));
96}
97
98
99void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
100 const JSObjectsCluster& cluster, int size) {
101 JSObjectsClusterTree::Locator loc;
102 tree->Insert(cluster, &loc);
103 NumberAndSizeInfo number_and_size = loc.value();
104 number_and_size.increment_number(1);
105 number_and_size.increment_bytes(size);
106 loc.set_value(number_and_size);
107}
108
109
110int Clusterizer::CalculateNetworkSize(JSObject* obj) {
111 int size = obj->Size();
112 // If 'properties' and 'elements' are non-empty (thus, non-shared),
113 // take their size into account.
Iain Merrick75681382010-08-19 15:07:18 +0100114 if (obj->properties() != Heap::empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000115 size += obj->properties()->Size();
116 }
Iain Merrick75681382010-08-19 15:07:18 +0100117 if (obj->elements() != Heap::empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000118 size += obj->elements()->Size();
119 }
Steve Blockd0582a62009-12-15 09:54:21 +0000120 // For functions, also account non-empty context and literals sizes.
121 if (obj->IsJSFunction()) {
122 JSFunction* f = JSFunction::cast(obj);
123 if (f->unchecked_context()->IsContext()) {
124 size += f->context()->Size();
125 }
126 if (f->literals()->length() != 0) {
127 size += f->literals()->Size();
128 }
129 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 return size;
131}
132
133
134// A helper class for recording back references.
135class ReferencesExtractor : public ObjectVisitor {
136 public:
137 ReferencesExtractor(const JSObjectsCluster& cluster,
138 RetainerHeapProfile* profile)
139 : cluster_(cluster),
140 profile_(profile),
141 inside_array_(false) {
142 }
143
144 void VisitPointer(Object** o) {
Steve Blockd0582a62009-12-15 09:54:21 +0000145 if ((*o)->IsFixedArray() && !inside_array_) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // Traverse one level deep for data members that are fixed arrays.
147 // This covers the case of 'elements' and 'properties' of JSObject,
148 // and function contexts.
149 inside_array_ = true;
150 FixedArray::cast(*o)->Iterate(this);
151 inside_array_ = false;
Steve Blockd0582a62009-12-15 09:54:21 +0000152 } else if ((*o)->IsHeapObject()) {
153 profile_->StoreReference(cluster_, HeapObject::cast(*o));
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 }
155 }
156
157 void VisitPointers(Object** start, Object** end) {
158 for (Object** p = start; p < end; p++) VisitPointer(p);
159 }
160
161 private:
162 const JSObjectsCluster& cluster_;
163 RetainerHeapProfile* profile_;
164 bool inside_array_;
165};
166
167
168// A printer interface implementation for the Retainers profile.
169class RetainersPrinter : public RetainerHeapProfile::Printer {
170 public:
171 void PrintRetainers(const JSObjectsCluster& cluster,
172 const StringStream& retainers) {
173 HeapStringAllocator allocator;
174 StringStream stream(&allocator);
175 cluster.Print(&stream);
176 LOG(HeapSampleJSRetainersEvent(
177 *(stream.ToCString()), *(retainers.ToCString())));
178 }
179};
180
181
182// Visitor for printing a cluster tree.
183class ClusterTreePrinter BASE_EMBEDDED {
184 public:
185 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {}
186 void Call(const JSObjectsCluster& cluster,
187 const NumberAndSizeInfo& number_and_size) {
188 Print(stream_, cluster, number_and_size);
189 }
190 static void Print(StringStream* stream,
191 const JSObjectsCluster& cluster,
192 const NumberAndSizeInfo& number_and_size);
193
194 private:
195 StringStream* stream_;
196};
197
198
199void ClusterTreePrinter::Print(StringStream* stream,
200 const JSObjectsCluster& cluster,
201 const NumberAndSizeInfo& number_and_size) {
202 stream->Put(',');
203 cluster.Print(stream);
204 stream->Add(";%d", number_and_size.number());
205}
206
207
208// Visitor for printing a retainer tree.
209class SimpleRetainerTreePrinter BASE_EMBEDDED {
210 public:
211 explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer)
212 : printer_(printer) {}
213 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
214
215 private:
216 RetainerHeapProfile::Printer* printer_;
217};
218
219
220void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
221 JSObjectsClusterTree* tree) {
222 HeapStringAllocator allocator;
223 StringStream stream(&allocator);
224 ClusterTreePrinter retainers_printer(&stream);
225 tree->ForEach(&retainers_printer);
226 printer_->PrintRetainers(cluster, stream);
227}
228
229
230// Visitor for aggregating references count of equivalent clusters.
231class RetainersAggregator BASE_EMBEDDED {
232 public:
233 RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree)
234 : coarser_(coarser), dest_tree_(dest_tree) {}
235 void Call(const JSObjectsCluster& cluster,
236 const NumberAndSizeInfo& number_and_size);
237
238 private:
239 ClustersCoarser* coarser_;
240 JSObjectsClusterTree* dest_tree_;
241};
242
243
244void RetainersAggregator::Call(const JSObjectsCluster& cluster,
245 const NumberAndSizeInfo& number_and_size) {
246 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
247 if (eq.is_null()) eq = cluster;
248 JSObjectsClusterTree::Locator loc;
249 dest_tree_->Insert(eq, &loc);
250 NumberAndSizeInfo aggregated_number = loc.value();
251 aggregated_number.increment_number(number_and_size.number());
252 loc.set_value(aggregated_number);
253}
254
255
256// Visitor for printing retainers tree. Aggregates equivalent retainer clusters.
257class AggregatingRetainerTreePrinter BASE_EMBEDDED {
258 public:
259 AggregatingRetainerTreePrinter(ClustersCoarser* coarser,
260 RetainerHeapProfile::Printer* printer)
261 : coarser_(coarser), printer_(printer) {}
262 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
263
264 private:
265 ClustersCoarser* coarser_;
266 RetainerHeapProfile::Printer* printer_;
267};
268
269
270void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
271 JSObjectsClusterTree* tree) {
272 if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return;
273 JSObjectsClusterTree dest_tree_;
274 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
275 tree->ForEach(&retainers_aggregator);
276 HeapStringAllocator allocator;
277 StringStream stream(&allocator);
278 ClusterTreePrinter retainers_printer(&stream);
279 dest_tree_.ForEach(&retainers_printer);
280 printer_->PrintRetainers(cluster, stream);
281}
282
Steve Block791712a2010-08-27 10:21:07 +0100283} // namespace
284
Steve Blocka7e24c12009-10-30 11:49:00 +0000285
286// A helper class for building a retainers tree, that aggregates
287// all equivalent clusters.
Steve Block791712a2010-08-27 10:21:07 +0100288class RetainerTreeAggregator {
Steve Blocka7e24c12009-10-30 11:49:00 +0000289 public:
290 explicit RetainerTreeAggregator(ClustersCoarser* coarser)
291 : coarser_(coarser) {}
292 void Process(JSObjectsRetainerTree* input_tree) {
293 input_tree->ForEach(this);
294 }
295 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
296 JSObjectsRetainerTree& output_tree() { return output_tree_; }
297
298 private:
299 ClustersCoarser* coarser_;
300 JSObjectsRetainerTree output_tree_;
301};
302
303
304void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
305 JSObjectsClusterTree* tree) {
306 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
307 if (eq.is_null()) return;
308 JSObjectsRetainerTree::Locator loc;
309 if (output_tree_.Insert(eq, &loc)) {
310 loc.set_value(new JSObjectsClusterTree());
311 }
312 RetainersAggregator retainers_aggregator(coarser_, loc.value());
313 tree->ForEach(&retainers_aggregator);
314}
315
Steve Blocka7e24c12009-10-30 11:49:00 +0000316
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100317HeapProfiler* HeapProfiler::singleton_ = NULL;
318
319HeapProfiler::HeapProfiler()
320 : snapshots_(new HeapSnapshotsCollection()),
321 next_snapshot_uid_(1) {
322}
323
324
325HeapProfiler::~HeapProfiler() {
326 delete snapshots_;
327}
328
329#endif // ENABLE_LOGGING_AND_PROFILING
330
331void HeapProfiler::Setup() {
332#ifdef ENABLE_LOGGING_AND_PROFILING
333 if (singleton_ == NULL) {
334 singleton_ = new HeapProfiler();
335 }
336#endif
337}
338
339
340void HeapProfiler::TearDown() {
341#ifdef ENABLE_LOGGING_AND_PROFILING
342 delete singleton_;
343 singleton_ = NULL;
344#endif
345}
346
347
348#ifdef ENABLE_LOGGING_AND_PROFILING
349
Steve Block791712a2010-08-27 10:21:07 +0100350HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name, int type) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100351 ASSERT(singleton_ != NULL);
Steve Block791712a2010-08-27 10:21:07 +0100352 return singleton_->TakeSnapshotImpl(name, type);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100353}
354
355
Steve Block791712a2010-08-27 10:21:07 +0100356HeapSnapshot* HeapProfiler::TakeSnapshot(String* name, int type) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100357 ASSERT(singleton_ != NULL);
Steve Block791712a2010-08-27 10:21:07 +0100358 return singleton_->TakeSnapshotImpl(name, type);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100359}
360
361
Steve Block791712a2010-08-27 10:21:07 +0100362HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name, int type) {
Iain Merrick75681382010-08-19 15:07:18 +0100363 Heap::CollectAllGarbage(true);
Steve Block791712a2010-08-27 10:21:07 +0100364 HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type);
365 HeapSnapshot* result =
366 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++);
367 switch (s_type) {
368 case HeapSnapshot::kFull: {
369 HeapSnapshotGenerator generator(result);
370 generator.GenerateSnapshot();
371 break;
372 }
373 case HeapSnapshot::kAggregated: {
374 AggregatedHeapSnapshot agg_snapshot;
375 AggregatedHeapSnapshotGenerator generator(&agg_snapshot);
376 generator.GenerateSnapshot();
377 generator.FillHeapSnapshot(result);
378 break;
379 }
380 default:
381 UNREACHABLE();
382 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100383 snapshots_->SnapshotGenerationFinished();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100384 return result;
385}
386
387
Steve Block791712a2010-08-27 10:21:07 +0100388HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name, int type) {
389 return TakeSnapshotImpl(snapshots_->GetName(name), type);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100390}
391
392
393int HeapProfiler::GetSnapshotsCount() {
394 ASSERT(singleton_ != NULL);
395 return singleton_->snapshots_->snapshots()->length();
396}
397
398
399HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
400 ASSERT(singleton_ != NULL);
401 return singleton_->snapshots_->snapshots()->at(index);
402}
403
404
405HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
406 ASSERT(singleton_ != NULL);
407 return singleton_->snapshots_->GetSnapshot(uid);
408}
409
410
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100411void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
412 ASSERT(singleton_ != NULL);
413 singleton_->snapshots_->ObjectMoveEvent(from, to);
414}
415
416
Steve Blocka7e24c12009-10-30 11:49:00 +0000417const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
418const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
419
420
421ConstructorHeapProfile::ConstructorHeapProfile()
422 : zscope_(DELETE_ON_EXIT) {
423}
424
425
426void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
427 const NumberAndSizeInfo& number_and_size) {
428 HeapStringAllocator allocator;
429 StringStream stream(&allocator);
430 cluster.Print(&stream);
431 LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()),
432 number_and_size.number(),
433 number_and_size.bytes()));
434}
435
436
437void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
438 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
439}
440
441
442void ConstructorHeapProfile::PrintStats() {
443 js_objects_info_tree_.ForEach(this);
444}
445
446
Steve Block3ce2e202009-11-05 08:53:23 +0000447static const char* GetConstructorName(const char* name) {
448 return name[0] != '\0' ? name : "(anonymous)";
449}
450
451
Steve Block791712a2010-08-27 10:21:07 +0100452const char* JSObjectsCluster::GetSpecialCaseName() const {
453 if (constructor_ == FromSpecialCase(ROOTS)) {
454 return "(roots)";
455 } else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) {
456 return "(global property)";
457 } else if (constructor_ == FromSpecialCase(CODE)) {
458 return "(code)";
459 } else if (constructor_ == FromSpecialCase(SELF)) {
460 return "(self)";
461 }
462 return NULL;
463}
464
465
Steve Blocka7e24c12009-10-30 11:49:00 +0000466void JSObjectsCluster::Print(StringStream* accumulator) const {
467 ASSERT(!is_null());
Steve Block791712a2010-08-27 10:21:07 +0100468 const char* special_case_name = GetSpecialCaseName();
469 if (special_case_name != NULL) {
470 accumulator->Add(special_case_name);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 } else {
472 SmartPointer<char> s_name(
473 constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
Steve Block3ce2e202009-11-05 08:53:23 +0000474 accumulator->Add("%s", GetConstructorName(*s_name));
Steve Blocka7e24c12009-10-30 11:49:00 +0000475 if (instance_ != NULL) {
476 accumulator->Add(":%p", static_cast<void*>(instance_));
477 }
478 }
479}
480
481
482void JSObjectsCluster::DebugPrint(StringStream* accumulator) const {
483 if (!is_null()) {
484 Print(accumulator);
485 } else {
486 accumulator->Add("(null cluster)");
487 }
488}
489
490
491inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
492 const JSObjectsCluster& cluster_)
493 : cluster(cluster_), refs(kInitialBackrefsListCapacity) {
494}
495
496
497inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
498 const ClustersCoarser::ClusterBackRefs& src)
499 : cluster(src.cluster), refs(src.refs.capacity()) {
500 refs.AddAll(src.refs);
501}
502
503
504inline ClustersCoarser::ClusterBackRefs&
505 ClustersCoarser::ClusterBackRefs::operator=(
506 const ClustersCoarser::ClusterBackRefs& src) {
507 if (this == &src) return *this;
508 cluster = src.cluster;
509 refs.Clear();
510 refs.AddAll(src.refs);
511 return *this;
512}
513
514
515inline int ClustersCoarser::ClusterBackRefs::Compare(
516 const ClustersCoarser::ClusterBackRefs& a,
517 const ClustersCoarser::ClusterBackRefs& b) {
518 int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster);
519 if (cmp != 0) return cmp;
520 if (a.refs.length() < b.refs.length()) return -1;
521 if (a.refs.length() > b.refs.length()) return 1;
522 for (int i = 0; i < a.refs.length(); ++i) {
523 int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]);
524 if (cmp != 0) return cmp;
525 }
526 return 0;
527}
528
529
530ClustersCoarser::ClustersCoarser()
531 : zscope_(DELETE_ON_EXIT),
532 sim_list_(ClustersCoarser::kInitialSimilarityListCapacity),
533 current_pair_(NULL),
534 current_set_(NULL),
535 self_(NULL) {
536}
537
538
539void ClustersCoarser::Call(const JSObjectsCluster& cluster,
540 JSObjectsClusterTree* tree) {
541 if (!cluster.can_be_coarsed()) return;
542 ClusterBackRefs pair(cluster);
543 ASSERT(current_pair_ == NULL);
544 current_pair_ = &pair;
545 current_set_ = new JSObjectsRetainerTree();
546 self_ = &cluster;
547 tree->ForEach(this);
548 sim_list_.Add(pair);
549 current_pair_ = NULL;
550 current_set_ = NULL;
551 self_ = NULL;
552}
553
554
555void ClustersCoarser::Call(const JSObjectsCluster& cluster,
556 const NumberAndSizeInfo& number_and_size) {
557 ASSERT(current_pair_ != NULL);
558 ASSERT(current_set_ != NULL);
559 ASSERT(self_ != NULL);
560 JSObjectsRetainerTree::Locator loc;
561 if (JSObjectsCluster::Compare(*self_, cluster) == 0) {
562 current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF));
563 return;
564 }
565 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
566 if (!eq.is_null()) {
567 if (current_set_->Find(eq, &loc)) return;
568 current_pair_->refs.Add(eq);
569 current_set_->Insert(eq, &loc);
570 } else {
571 current_pair_->refs.Add(cluster);
572 }
573}
574
575
576void ClustersCoarser::Process(JSObjectsRetainerTree* tree) {
577 int last_eq_clusters = -1;
578 for (int i = 0; i < kMaxPassesCount; ++i) {
579 sim_list_.Clear();
580 const int curr_eq_clusters = DoProcess(tree);
581 // If no new cluster equivalents discovered, abort processing.
582 if (last_eq_clusters == curr_eq_clusters) break;
583 last_eq_clusters = curr_eq_clusters;
584 }
585}
586
587
588int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) {
589 tree->ForEach(this);
590 sim_list_.Iterate(ClusterBackRefs::SortRefsIterator);
591 sim_list_.Sort(ClusterBackRefsCmp);
592 return FillEqualityTree();
593}
594
595
596JSObjectsCluster ClustersCoarser::GetCoarseEquivalent(
597 const JSObjectsCluster& cluster) {
598 if (!cluster.can_be_coarsed()) return JSObjectsCluster();
599 EqualityTree::Locator loc;
600 return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster();
601}
602
603
604bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) {
605 // Return true for coarsible clusters that have a non-identical equivalent.
606 if (!cluster.can_be_coarsed()) return false;
607 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
608 return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0;
609}
610
611
612int ClustersCoarser::FillEqualityTree() {
613 int eq_clusters_count = 0;
614 int eq_to = 0;
615 bool first_added = false;
616 for (int i = 1; i < sim_list_.length(); ++i) {
617 if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) {
618 EqualityTree::Locator loc;
619 if (!first_added) {
620 // Add self-equivalence, if we have more than one item in this
621 // equivalence class.
622 eq_tree_.Insert(sim_list_[eq_to].cluster, &loc);
623 loc.set_value(sim_list_[eq_to].cluster);
624 first_added = true;
625 }
626 eq_tree_.Insert(sim_list_[i].cluster, &loc);
627 loc.set_value(sim_list_[eq_to].cluster);
628 ++eq_clusters_count;
629 } else {
630 eq_to = i;
631 first_added = false;
632 }
633 }
634 return eq_clusters_count;
635}
636
637
638const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey;
639const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue;
640const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
641const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
642 NULL;
643
644
645RetainerHeapProfile::RetainerHeapProfile()
Steve Block791712a2010-08-27 10:21:07 +0100646 : zscope_(DELETE_ON_EXIT),
647 aggregator_(NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000648 JSObjectsCluster roots(JSObjectsCluster::ROOTS);
649 ReferencesExtractor extractor(roots, this);
Steve Blockd0582a62009-12-15 09:54:21 +0000650 Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG);
Steve Blocka7e24c12009-10-30 11:49:00 +0000651}
652
653
Steve Block791712a2010-08-27 10:21:07 +0100654RetainerHeapProfile::~RetainerHeapProfile() {
655 delete aggregator_;
656}
657
658
Steve Blocka7e24c12009-10-30 11:49:00 +0000659void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
660 HeapObject* ref) {
661 JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref);
Steve Blockd0582a62009-12-15 09:54:21 +0000662 if (ref_cluster.is_null()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 JSObjectsRetainerTree::Locator ref_loc;
664 if (retainers_tree_.Insert(ref_cluster, &ref_loc)) {
665 ref_loc.set_value(new JSObjectsClusterTree());
666 }
667 JSObjectsClusterTree* referenced_by = ref_loc.value();
668 Clusterizer::InsertReferenceIntoTree(referenced_by, cluster);
669}
670
671
672void RetainerHeapProfile::CollectStats(HeapObject* obj) {
Steve Blockd0582a62009-12-15 09:54:21 +0000673 const JSObjectsCluster cluster = Clusterizer::Clusterize(obj);
674 if (cluster.is_null()) return;
675 ReferencesExtractor extractor(cluster, this);
676 obj->Iterate(&extractor);
Steve Blocka7e24c12009-10-30 11:49:00 +0000677}
678
679
Steve Block791712a2010-08-27 10:21:07 +0100680void RetainerHeapProfile::CoarseAndAggregate() {
681 coarser_.Process(&retainers_tree_);
682 ASSERT(aggregator_ == NULL);
683 aggregator_ = new RetainerTreeAggregator(&coarser_);
684 aggregator_->Process(&retainers_tree_);
685}
686
687
Steve Blocka7e24c12009-10-30 11:49:00 +0000688void RetainerHeapProfile::DebugPrintStats(
689 RetainerHeapProfile::Printer* printer) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 // Print clusters that have no equivalents, aggregating their retainers.
691 AggregatingRetainerTreePrinter agg_printer(&coarser_, printer);
692 retainers_tree_.ForEach(&agg_printer);
Steve Block791712a2010-08-27 10:21:07 +0100693 // Print clusters that have equivalents.
Steve Blocka7e24c12009-10-30 11:49:00 +0000694 SimpleRetainerTreePrinter s_printer(printer);
Steve Block791712a2010-08-27 10:21:07 +0100695 aggregator_->output_tree().ForEach(&s_printer);
Steve Blocka7e24c12009-10-30 11:49:00 +0000696}
697
698
699void RetainerHeapProfile::PrintStats() {
700 RetainersPrinter printer;
701 DebugPrintStats(&printer);
702}
703
704
705//
706// HeapProfiler class implementation.
707//
Steve Block3ce2e202009-11-05 08:53:23 +0000708static void StackWeakReferenceCallback(Persistent<Value> object,
709 void* trace) {
710 DeleteArray(static_cast<Address*>(trace));
711 object.Dispose();
712}
713
714
715static void PrintProducerStackTrace(Object* obj, void* trace) {
716 if (!obj->IsJSObject()) return;
717 String* constructor = JSObject::cast(obj)->constructor_name();
718 SmartPointer<char> s_name(
719 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
720 LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name),
721 reinterpret_cast<Address*>(trace)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000722}
723
724
725void HeapProfiler::WriteSample() {
726 LOG(HeapSampleBeginEvent("Heap", "allocated"));
727 LOG(HeapSampleStats(
Steve Block3ce2e202009-11-05 08:53:23 +0000728 "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000729
Steve Block791712a2010-08-27 10:21:07 +0100730 AggregatedHeapSnapshot snapshot;
731 AggregatedHeapSnapshotGenerator generator(&snapshot);
732 generator.GenerateSnapshot();
Steve Blocka7e24c12009-10-30 11:49:00 +0000733
Steve Block791712a2010-08-27 10:21:07 +0100734 HistogramInfo* info = snapshot.info();
735 for (int i = FIRST_NONSTRING_TYPE;
736 i <= AggregatedHeapSnapshotGenerator::kAllStringsType;
737 ++i) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000738 if (info[i].bytes() > 0) {
739 LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
740 info[i].bytes()));
741 }
742 }
743
Steve Block791712a2010-08-27 10:21:07 +0100744 snapshot.js_cons_profile()->PrintStats();
745 snapshot.js_retainer_profile()->PrintStats();
Steve Blocka7e24c12009-10-30 11:49:00 +0000746
Steve Block3ce2e202009-11-05 08:53:23 +0000747 GlobalHandles::IterateWeakRoots(PrintProducerStackTrace,
748 StackWeakReferenceCallback);
749
Steve Blocka7e24c12009-10-30 11:49:00 +0000750 LOG(HeapSampleEndEvent("Heap", "allocated"));
751}
752
753
Steve Block791712a2010-08-27 10:21:07 +0100754AggregatedHeapSnapshot::AggregatedHeapSnapshot()
755 : info_(NewArray<HistogramInfo>(
756 AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) {
757#define DEF_TYPE_NAME(name) info_[name].set_name(#name);
758 INSTANCE_TYPE_LIST(DEF_TYPE_NAME);
759#undef DEF_TYPE_NAME
760 info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name(
761 "STRING_TYPE");
762}
763
764
765AggregatedHeapSnapshot::~AggregatedHeapSnapshot() {
766 DeleteArray(info_);
767}
768
769
770AggregatedHeapSnapshotGenerator::AggregatedHeapSnapshotGenerator(
771 AggregatedHeapSnapshot* agg_snapshot)
772 : agg_snapshot_(agg_snapshot) {
773}
774
775
776void AggregatedHeapSnapshotGenerator::CalculateStringsStats() {
777 HistogramInfo* info = agg_snapshot_->info();
778 HistogramInfo& strings = info[kAllStringsType];
779 // Lump all the string types together.
780#define INCREMENT_SIZE(type, size, name, camel_name) \
781 strings.increment_number(info[type].number()); \
782 strings.increment_bytes(info[type].bytes());
783 STRING_TYPE_LIST(INCREMENT_SIZE);
784#undef INCREMENT_SIZE
785}
786
787
788void AggregatedHeapSnapshotGenerator::CollectStats(HeapObject* obj) {
789 InstanceType type = obj->map()->instance_type();
790 ASSERT(0 <= type && type <= LAST_TYPE);
791 if (!FreeListNode::IsFreeListNode(obj)) {
792 agg_snapshot_->info()[type].increment_number(1);
793 agg_snapshot_->info()[type].increment_bytes(obj->Size());
794 }
795}
796
797
798void AggregatedHeapSnapshotGenerator::GenerateSnapshot() {
799 HeapIterator iterator;
800 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
801 CollectStats(obj);
802 agg_snapshot_->js_cons_profile()->CollectStats(obj);
803 agg_snapshot_->js_retainer_profile()->CollectStats(obj);
804 }
805 CalculateStringsStats();
806 agg_snapshot_->js_retainer_profile()->CoarseAndAggregate();
807}
808
809
810class CountingConstructorHeapProfileIterator {
811 public:
812 CountingConstructorHeapProfileIterator()
813 : entities_count_(0), children_count_(0) {
814 }
815
816 void Call(const JSObjectsCluster& cluster,
817 const NumberAndSizeInfo& number_and_size) {
818 ++entities_count_;
819 children_count_ += number_and_size.number();
820 }
821
822 int entities_count() { return entities_count_; }
823 int children_count() { return children_count_; }
824
825 private:
826 int entities_count_;
827 int children_count_;
828};
829
830
831static HeapEntry* AddEntryFromAggregatedSnapshot(HeapSnapshot* snapshot,
832 int* root_child_index,
833 HeapEntry::Type type,
834 const char* name,
835 int count,
836 int size,
837 int children_count,
838 int retainers_count) {
839 HeapEntry* entry = snapshot->AddEntry(
840 type, name, count, size, children_count, retainers_count);
841 ASSERT(entry != NULL);
842 snapshot->root()->SetUnidirElementReference(*root_child_index,
843 *root_child_index + 1,
844 entry);
845 *root_child_index = *root_child_index + 1;
846 return entry;
847}
848
849
850class AllocatingConstructorHeapProfileIterator {
851 public:
852 AllocatingConstructorHeapProfileIterator(HeapSnapshot* snapshot,
853 int* root_child_index)
854 : snapshot_(snapshot),
855 root_child_index_(root_child_index) {
856 }
857
858 void Call(const JSObjectsCluster& cluster,
859 const NumberAndSizeInfo& number_and_size) {
860 const char* name = cluster.GetSpecialCaseName();
861 if (name == NULL) {
862 name = snapshot_->collection()->GetFunctionName(cluster.constructor());
863 }
864 AddEntryFromAggregatedSnapshot(snapshot_,
865 root_child_index_,
866 HeapEntry::kObject,
867 name,
868 number_and_size.number(),
869 number_and_size.bytes(),
870 0,
871 0);
872 }
873
874 private:
875 HeapSnapshot* snapshot_;
876 int* root_child_index_;
877};
878
879
880static HeapObject* ClusterAsHeapObject(const JSObjectsCluster& cluster) {
881 return cluster.can_be_coarsed() ?
882 reinterpret_cast<HeapObject*>(cluster.instance()) : cluster.constructor();
883}
884
885
886static JSObjectsCluster HeapObjectAsCluster(HeapObject* object) {
887 if (object->IsString()) {
888 return JSObjectsCluster(String::cast(object));
889 } else {
890 JSObject* js_obj = JSObject::cast(object);
891 String* constructor = JSObject::cast(js_obj)->constructor_name();
892 return JSObjectsCluster(constructor, object);
893 }
894}
895
896
897class CountingRetainersIterator {
898 public:
899 CountingRetainersIterator(const JSObjectsCluster& child_cluster,
900 HeapEntriesMap* map)
901 : child_(ClusterAsHeapObject(child_cluster)), map_(map) {
902 if (map_->Map(child_) == NULL)
903 map_->Pair(child_, HeapEntriesMap::kHeapEntryPlaceholder);
904 }
905
906 void Call(const JSObjectsCluster& cluster,
907 const NumberAndSizeInfo& number_and_size) {
908 if (map_->Map(ClusterAsHeapObject(cluster)) == NULL)
909 map_->Pair(ClusterAsHeapObject(cluster),
910 HeapEntriesMap::kHeapEntryPlaceholder);
911 map_->CountReference(ClusterAsHeapObject(cluster), child_);
912 }
913
914 private:
915 HeapObject* child_;
916 HeapEntriesMap* map_;
917};
918
919
920class AllocatingRetainersIterator {
921 public:
922 AllocatingRetainersIterator(const JSObjectsCluster& child_cluster,
923 HeapEntriesMap* map)
924 : child_(ClusterAsHeapObject(child_cluster)), map_(map) {
925 child_entry_ = map_->Map(child_);
926 ASSERT(child_entry_ != NULL);
927 }
928
929 void Call(const JSObjectsCluster& cluster,
930 const NumberAndSizeInfo& number_and_size) {
931 int child_index, retainer_index;
932 map_->CountReference(ClusterAsHeapObject(cluster), child_,
933 &child_index, &retainer_index);
934 map_->Map(ClusterAsHeapObject(cluster))->SetElementReference(
935 child_index, number_and_size.number(), child_entry_, retainer_index);
936 }
937
938 private:
939 HeapObject* child_;
940 HeapEntriesMap* map_;
941 HeapEntry* child_entry_;
942};
943
944
945template<class RetainersIterator>
946class AggregatingRetainerTreeIterator {
947 public:
948 explicit AggregatingRetainerTreeIterator(ClustersCoarser* coarser,
949 HeapEntriesMap* map)
950 : coarser_(coarser), map_(map) {
951 }
952
953 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree) {
954 if (coarser_ != NULL &&
955 !coarser_->GetCoarseEquivalent(cluster).is_null()) return;
956 JSObjectsClusterTree* tree_to_iterate = tree;
957 ZoneScope zs(DELETE_ON_EXIT);
958 JSObjectsClusterTree dest_tree_;
959 if (coarser_ != NULL) {
960 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
961 tree->ForEach(&retainers_aggregator);
962 tree_to_iterate = &dest_tree_;
963 }
964 RetainersIterator iterator(cluster, map_);
965 tree_to_iterate->ForEach(&iterator);
966 }
967
968 private:
969 ClustersCoarser* coarser_;
970 HeapEntriesMap* map_;
971};
972
973
974class AggregatedRetainerTreeAllocator {
975 public:
976 AggregatedRetainerTreeAllocator(HeapSnapshot* snapshot,
977 int* root_child_index)
978 : snapshot_(snapshot), root_child_index_(root_child_index) {
979 }
980
981 HeapEntry* GetEntry(
982 HeapObject* obj, int children_count, int retainers_count) {
983 JSObjectsCluster cluster = HeapObjectAsCluster(obj);
984 const char* name = cluster.GetSpecialCaseName();
985 if (name == NULL) {
986 name = snapshot_->collection()->GetFunctionName(cluster.constructor());
987 }
988 return AddEntryFromAggregatedSnapshot(
989 snapshot_, root_child_index_, HeapEntry::kObject, name,
990 0, 0, children_count, retainers_count);
991 }
992
993 private:
994 HeapSnapshot* snapshot_;
995 int* root_child_index_;
996};
997
998
999template<class Iterator>
1000void AggregatedHeapSnapshotGenerator::IterateRetainers(
1001 HeapEntriesMap* entries_map) {
1002 RetainerHeapProfile* p = agg_snapshot_->js_retainer_profile();
1003 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_1(
1004 p->coarser(), entries_map);
1005 p->retainers_tree()->ForEach(&agg_ret_iter_1);
1006 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2(NULL, entries_map);
1007 p->aggregator()->output_tree().ForEach(&agg_ret_iter_2);
1008}
1009
1010
1011void AggregatedHeapSnapshotGenerator::FillHeapSnapshot(HeapSnapshot* snapshot) {
1012 // Count the number of entities.
1013 int histogram_entities_count = 0;
1014 int histogram_children_count = 0;
1015 int histogram_retainers_count = 0;
1016 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
1017 if (agg_snapshot_->info()[i].bytes() > 0) {
1018 ++histogram_entities_count;
1019 }
1020 }
1021 CountingConstructorHeapProfileIterator counting_cons_iter;
1022 agg_snapshot_->js_cons_profile()->ForEach(&counting_cons_iter);
1023 histogram_entities_count += counting_cons_iter.entities_count();
1024 HeapEntriesMap entries_map;
1025 IterateRetainers<CountingRetainersIterator>(&entries_map);
1026 histogram_entities_count += entries_map.entries_count();
1027 histogram_children_count += entries_map.total_children_count();
1028 histogram_retainers_count += entries_map.total_retainers_count();
1029
1030 // Root entry references all other entries.
1031 histogram_children_count += histogram_entities_count;
1032 int root_children_count = histogram_entities_count;
1033 ++histogram_entities_count;
1034
1035 // Allocate and fill entries in the snapshot, allocate references.
1036 snapshot->AllocateEntries(histogram_entities_count,
1037 histogram_children_count,
1038 histogram_retainers_count);
1039 snapshot->AddEntry(HeapSnapshot::kInternalRootObject,
1040 root_children_count,
1041 0);
1042 int root_child_index = 0;
1043 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
1044 if (agg_snapshot_->info()[i].bytes() > 0) {
1045 AddEntryFromAggregatedSnapshot(snapshot,
1046 &root_child_index,
1047 HeapEntry::kInternal,
1048 agg_snapshot_->info()[i].name(),
1049 agg_snapshot_->info()[i].number(),
1050 agg_snapshot_->info()[i].bytes(),
1051 0,
1052 0);
1053 }
1054 }
1055 AllocatingConstructorHeapProfileIterator alloc_cons_iter(
1056 snapshot, &root_child_index);
1057 agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter);
1058 AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index);
1059 entries_map.UpdateEntries(&allocator);
1060
1061 // Fill up references.
1062 IterateRetainers<AllocatingRetainersIterator>(&entries_map);
1063}
1064
1065
Steve Block3ce2e202009-11-05 08:53:23 +00001066bool ProducerHeapProfile::can_log_ = false;
1067
1068void ProducerHeapProfile::Setup() {
1069 can_log_ = true;
1070}
1071
Leon Clarkee46be812010-01-19 14:06:41 +00001072void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
1073 ASSERT(FLAG_log_producers);
1074 if (!can_log_) return;
Steve Block3ce2e202009-11-05 08:53:23 +00001075 int framesCount = 0;
1076 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1077 ++framesCount;
1078 }
1079 if (framesCount == 0) return;
1080 ++framesCount; // Reserve place for the terminator item.
1081 Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
1082 int i = 0;
1083 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1084 stack[i++] = it.frame()->pc();
1085 }
1086 stack[i] = NULL;
1087 Handle<Object> handle = GlobalHandles::Create(obj);
1088 GlobalHandles::MakeWeak(handle.location(),
1089 static_cast<void*>(stack.start()),
1090 StackWeakReferenceCallback);
1091}
1092
1093
Steve Blocka7e24c12009-10-30 11:49:00 +00001094#endif // ENABLE_LOGGING_AND_PROFILING
1095
1096
1097} } // namespace v8::internal