blob: 7668bbc1505d8dad680292bcd47c27c55a4c45ed [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-profiler.h"
Steve Block3ce2e202009-11-05 08:53:23 +000031#include "frames-inl.h"
32#include "global-handles.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010033#include "profile-generator.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "string-stream.h"
35
36namespace v8 {
37namespace internal {
38
39
40#ifdef ENABLE_LOGGING_AND_PROFILING
41namespace {
42
43// Clusterizer is a set of helper functions for converting
44// object references into clusters.
45class Clusterizer : public AllStatic {
46 public:
47 static JSObjectsCluster Clusterize(HeapObject* obj) {
48 return Clusterize(obj, true);
49 }
50 static void InsertIntoTree(JSObjectsClusterTree* tree,
51 HeapObject* obj, bool fine_grain);
52 static void InsertReferenceIntoTree(JSObjectsClusterTree* tree,
53 const JSObjectsCluster& cluster) {
54 InsertIntoTree(tree, cluster, 0);
55 }
56
57 private:
58 static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain);
59 static int CalculateNetworkSize(JSObject* obj);
60 static int GetObjectSize(HeapObject* obj) {
61 return obj->IsJSObject() ?
62 CalculateNetworkSize(JSObject::cast(obj)) : obj->Size();
63 }
64 static void InsertIntoTree(JSObjectsClusterTree* tree,
65 const JSObjectsCluster& cluster, int size);
66};
67
68
69JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
70 if (obj->IsJSObject()) {
71 JSObject* js_obj = JSObject::cast(obj);
72 String* constructor = JSObject::cast(js_obj)->constructor_name();
73 // Differentiate Object and Array instances.
74 if (fine_grain && (constructor == Heap::Object_symbol() ||
75 constructor == Heap::Array_symbol())) {
76 return JSObjectsCluster(constructor, obj);
77 } else {
78 return JSObjectsCluster(constructor);
79 }
80 } else if (obj->IsString()) {
81 return JSObjectsCluster(Heap::String_symbol());
Steve Blockd0582a62009-12-15 09:54:21 +000082 } else if (obj->IsJSGlobalPropertyCell()) {
83 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
84 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
85 return JSObjectsCluster(JSObjectsCluster::CODE);
Steve Blocka7e24c12009-10-30 11:49:00 +000086 }
87 return JSObjectsCluster();
88}
89
90
91void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
92 HeapObject* obj, bool fine_grain) {
93 JSObjectsCluster cluster = Clusterize(obj, fine_grain);
94 if (cluster.is_null()) return;
95 InsertIntoTree(tree, cluster, GetObjectSize(obj));
96}
97
98
99void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
100 const JSObjectsCluster& cluster, int size) {
101 JSObjectsClusterTree::Locator loc;
102 tree->Insert(cluster, &loc);
103 NumberAndSizeInfo number_and_size = loc.value();
104 number_and_size.increment_number(1);
105 number_and_size.increment_bytes(size);
106 loc.set_value(number_and_size);
107}
108
109
110int Clusterizer::CalculateNetworkSize(JSObject* obj) {
111 int size = obj->Size();
112 // If 'properties' and 'elements' are non-empty (thus, non-shared),
113 // take their size into account.
Iain Merrick75681382010-08-19 15:07:18 +0100114 if (obj->properties() != Heap::empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000115 size += obj->properties()->Size();
116 }
Iain Merrick75681382010-08-19 15:07:18 +0100117 if (obj->elements() != Heap::empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000118 size += obj->elements()->Size();
119 }
Steve Blockd0582a62009-12-15 09:54:21 +0000120 // For functions, also account non-empty context and literals sizes.
121 if (obj->IsJSFunction()) {
122 JSFunction* f = JSFunction::cast(obj);
123 if (f->unchecked_context()->IsContext()) {
124 size += f->context()->Size();
125 }
126 if (f->literals()->length() != 0) {
127 size += f->literals()->Size();
128 }
129 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 return size;
131}
132
133
134// A helper class for recording back references.
135class ReferencesExtractor : public ObjectVisitor {
136 public:
137 ReferencesExtractor(const JSObjectsCluster& cluster,
138 RetainerHeapProfile* profile)
139 : cluster_(cluster),
140 profile_(profile),
141 inside_array_(false) {
142 }
143
144 void VisitPointer(Object** o) {
Steve Blockd0582a62009-12-15 09:54:21 +0000145 if ((*o)->IsFixedArray() && !inside_array_) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // Traverse one level deep for data members that are fixed arrays.
147 // This covers the case of 'elements' and 'properties' of JSObject,
148 // and function contexts.
149 inside_array_ = true;
150 FixedArray::cast(*o)->Iterate(this);
151 inside_array_ = false;
Steve Blockd0582a62009-12-15 09:54:21 +0000152 } else if ((*o)->IsHeapObject()) {
153 profile_->StoreReference(cluster_, HeapObject::cast(*o));
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 }
155 }
156
157 void VisitPointers(Object** start, Object** end) {
158 for (Object** p = start; p < end; p++) VisitPointer(p);
159 }
160
161 private:
162 const JSObjectsCluster& cluster_;
163 RetainerHeapProfile* profile_;
164 bool inside_array_;
165};
166
167
168// A printer interface implementation for the Retainers profile.
169class RetainersPrinter : public RetainerHeapProfile::Printer {
170 public:
171 void PrintRetainers(const JSObjectsCluster& cluster,
172 const StringStream& retainers) {
173 HeapStringAllocator allocator;
174 StringStream stream(&allocator);
175 cluster.Print(&stream);
176 LOG(HeapSampleJSRetainersEvent(
177 *(stream.ToCString()), *(retainers.ToCString())));
178 }
179};
180
181
182// Visitor for printing a cluster tree.
183class ClusterTreePrinter BASE_EMBEDDED {
184 public:
185 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {}
186 void Call(const JSObjectsCluster& cluster,
187 const NumberAndSizeInfo& number_and_size) {
188 Print(stream_, cluster, number_and_size);
189 }
190 static void Print(StringStream* stream,
191 const JSObjectsCluster& cluster,
192 const NumberAndSizeInfo& number_and_size);
193
194 private:
195 StringStream* stream_;
196};
197
198
199void ClusterTreePrinter::Print(StringStream* stream,
200 const JSObjectsCluster& cluster,
201 const NumberAndSizeInfo& number_and_size) {
202 stream->Put(',');
203 cluster.Print(stream);
204 stream->Add(";%d", number_and_size.number());
205}
206
207
208// Visitor for printing a retainer tree.
209class SimpleRetainerTreePrinter BASE_EMBEDDED {
210 public:
211 explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer)
212 : printer_(printer) {}
213 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
214
215 private:
216 RetainerHeapProfile::Printer* printer_;
217};
218
219
220void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
221 JSObjectsClusterTree* tree) {
222 HeapStringAllocator allocator;
223 StringStream stream(&allocator);
224 ClusterTreePrinter retainers_printer(&stream);
225 tree->ForEach(&retainers_printer);
226 printer_->PrintRetainers(cluster, stream);
227}
228
229
230// Visitor for aggregating references count of equivalent clusters.
231class RetainersAggregator BASE_EMBEDDED {
232 public:
233 RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree)
234 : coarser_(coarser), dest_tree_(dest_tree) {}
235 void Call(const JSObjectsCluster& cluster,
236 const NumberAndSizeInfo& number_and_size);
237
238 private:
239 ClustersCoarser* coarser_;
240 JSObjectsClusterTree* dest_tree_;
241};
242
243
244void RetainersAggregator::Call(const JSObjectsCluster& cluster,
245 const NumberAndSizeInfo& number_and_size) {
246 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
247 if (eq.is_null()) eq = cluster;
248 JSObjectsClusterTree::Locator loc;
249 dest_tree_->Insert(eq, &loc);
250 NumberAndSizeInfo aggregated_number = loc.value();
251 aggregated_number.increment_number(number_and_size.number());
252 loc.set_value(aggregated_number);
253}
254
255
256// Visitor for printing retainers tree. Aggregates equivalent retainer clusters.
257class AggregatingRetainerTreePrinter BASE_EMBEDDED {
258 public:
259 AggregatingRetainerTreePrinter(ClustersCoarser* coarser,
260 RetainerHeapProfile::Printer* printer)
261 : coarser_(coarser), printer_(printer) {}
262 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
263
264 private:
265 ClustersCoarser* coarser_;
266 RetainerHeapProfile::Printer* printer_;
267};
268
269
270void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
271 JSObjectsClusterTree* tree) {
272 if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return;
273 JSObjectsClusterTree dest_tree_;
274 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
275 tree->ForEach(&retainers_aggregator);
276 HeapStringAllocator allocator;
277 StringStream stream(&allocator);
278 ClusterTreePrinter retainers_printer(&stream);
279 dest_tree_.ForEach(&retainers_printer);
280 printer_->PrintRetainers(cluster, stream);
281}
282
283
284// A helper class for building a retainers tree, that aggregates
285// all equivalent clusters.
286class RetainerTreeAggregator BASE_EMBEDDED {
287 public:
288 explicit RetainerTreeAggregator(ClustersCoarser* coarser)
289 : coarser_(coarser) {}
290 void Process(JSObjectsRetainerTree* input_tree) {
291 input_tree->ForEach(this);
292 }
293 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
294 JSObjectsRetainerTree& output_tree() { return output_tree_; }
295
296 private:
297 ClustersCoarser* coarser_;
298 JSObjectsRetainerTree output_tree_;
299};
300
301
302void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
303 JSObjectsClusterTree* tree) {
304 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
305 if (eq.is_null()) return;
306 JSObjectsRetainerTree::Locator loc;
307 if (output_tree_.Insert(eq, &loc)) {
308 loc.set_value(new JSObjectsClusterTree());
309 }
310 RetainersAggregator retainers_aggregator(coarser_, loc.value());
311 tree->ForEach(&retainers_aggregator);
312}
313
314} // namespace
315
316
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100317HeapProfiler* HeapProfiler::singleton_ = NULL;
318
319HeapProfiler::HeapProfiler()
320 : snapshots_(new HeapSnapshotsCollection()),
321 next_snapshot_uid_(1) {
322}
323
324
325HeapProfiler::~HeapProfiler() {
326 delete snapshots_;
327}
328
329#endif // ENABLE_LOGGING_AND_PROFILING
330
331void HeapProfiler::Setup() {
332#ifdef ENABLE_LOGGING_AND_PROFILING
333 if (singleton_ == NULL) {
334 singleton_ = new HeapProfiler();
335 }
336#endif
337}
338
339
340void HeapProfiler::TearDown() {
341#ifdef ENABLE_LOGGING_AND_PROFILING
342 delete singleton_;
343 singleton_ = NULL;
344#endif
345}
346
347
348#ifdef ENABLE_LOGGING_AND_PROFILING
349
350HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name) {
351 ASSERT(singleton_ != NULL);
352 return singleton_->TakeSnapshotImpl(name);
353}
354
355
356HeapSnapshot* HeapProfiler::TakeSnapshot(String* name) {
357 ASSERT(singleton_ != NULL);
358 return singleton_->TakeSnapshotImpl(name);
359}
360
361
362HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name) {
Iain Merrick75681382010-08-19 15:07:18 +0100363 Heap::CollectAllGarbage(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100364 HeapSnapshot* result = snapshots_->NewSnapshot(name, next_snapshot_uid_++);
365 HeapSnapshotGenerator generator(result);
366 generator.GenerateSnapshot();
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100367 snapshots_->SnapshotGenerationFinished();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100368 return result;
369}
370
371
372HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name) {
373 return TakeSnapshotImpl(snapshots_->GetName(name));
374}
375
376
377int HeapProfiler::GetSnapshotsCount() {
378 ASSERT(singleton_ != NULL);
379 return singleton_->snapshots_->snapshots()->length();
380}
381
382
383HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
384 ASSERT(singleton_ != NULL);
385 return singleton_->snapshots_->snapshots()->at(index);
386}
387
388
389HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
390 ASSERT(singleton_ != NULL);
391 return singleton_->snapshots_->GetSnapshot(uid);
392}
393
394
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100395void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
396 ASSERT(singleton_ != NULL);
397 singleton_->snapshots_->ObjectMoveEvent(from, to);
398}
399
400
Steve Blocka7e24c12009-10-30 11:49:00 +0000401const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
402const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
403
404
405ConstructorHeapProfile::ConstructorHeapProfile()
406 : zscope_(DELETE_ON_EXIT) {
407}
408
409
410void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
411 const NumberAndSizeInfo& number_and_size) {
412 HeapStringAllocator allocator;
413 StringStream stream(&allocator);
414 cluster.Print(&stream);
415 LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()),
416 number_and_size.number(),
417 number_and_size.bytes()));
418}
419
420
421void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
422 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
423}
424
425
426void ConstructorHeapProfile::PrintStats() {
427 js_objects_info_tree_.ForEach(this);
428}
429
430
Steve Block3ce2e202009-11-05 08:53:23 +0000431static const char* GetConstructorName(const char* name) {
432 return name[0] != '\0' ? name : "(anonymous)";
433}
434
435
Steve Blocka7e24c12009-10-30 11:49:00 +0000436void JSObjectsCluster::Print(StringStream* accumulator) const {
437 ASSERT(!is_null());
438 if (constructor_ == FromSpecialCase(ROOTS)) {
439 accumulator->Add("(roots)");
440 } else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) {
441 accumulator->Add("(global property)");
Steve Blockd0582a62009-12-15 09:54:21 +0000442 } else if (constructor_ == FromSpecialCase(CODE)) {
443 accumulator->Add("(code)");
Steve Blocka7e24c12009-10-30 11:49:00 +0000444 } else if (constructor_ == FromSpecialCase(SELF)) {
445 accumulator->Add("(self)");
446 } else {
447 SmartPointer<char> s_name(
448 constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
Steve Block3ce2e202009-11-05 08:53:23 +0000449 accumulator->Add("%s", GetConstructorName(*s_name));
Steve Blocka7e24c12009-10-30 11:49:00 +0000450 if (instance_ != NULL) {
451 accumulator->Add(":%p", static_cast<void*>(instance_));
452 }
453 }
454}
455
456
457void JSObjectsCluster::DebugPrint(StringStream* accumulator) const {
458 if (!is_null()) {
459 Print(accumulator);
460 } else {
461 accumulator->Add("(null cluster)");
462 }
463}
464
465
466inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
467 const JSObjectsCluster& cluster_)
468 : cluster(cluster_), refs(kInitialBackrefsListCapacity) {
469}
470
471
472inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
473 const ClustersCoarser::ClusterBackRefs& src)
474 : cluster(src.cluster), refs(src.refs.capacity()) {
475 refs.AddAll(src.refs);
476}
477
478
479inline ClustersCoarser::ClusterBackRefs&
480 ClustersCoarser::ClusterBackRefs::operator=(
481 const ClustersCoarser::ClusterBackRefs& src) {
482 if (this == &src) return *this;
483 cluster = src.cluster;
484 refs.Clear();
485 refs.AddAll(src.refs);
486 return *this;
487}
488
489
490inline int ClustersCoarser::ClusterBackRefs::Compare(
491 const ClustersCoarser::ClusterBackRefs& a,
492 const ClustersCoarser::ClusterBackRefs& b) {
493 int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster);
494 if (cmp != 0) return cmp;
495 if (a.refs.length() < b.refs.length()) return -1;
496 if (a.refs.length() > b.refs.length()) return 1;
497 for (int i = 0; i < a.refs.length(); ++i) {
498 int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]);
499 if (cmp != 0) return cmp;
500 }
501 return 0;
502}
503
504
505ClustersCoarser::ClustersCoarser()
506 : zscope_(DELETE_ON_EXIT),
507 sim_list_(ClustersCoarser::kInitialSimilarityListCapacity),
508 current_pair_(NULL),
509 current_set_(NULL),
510 self_(NULL) {
511}
512
513
514void ClustersCoarser::Call(const JSObjectsCluster& cluster,
515 JSObjectsClusterTree* tree) {
516 if (!cluster.can_be_coarsed()) return;
517 ClusterBackRefs pair(cluster);
518 ASSERT(current_pair_ == NULL);
519 current_pair_ = &pair;
520 current_set_ = new JSObjectsRetainerTree();
521 self_ = &cluster;
522 tree->ForEach(this);
523 sim_list_.Add(pair);
524 current_pair_ = NULL;
525 current_set_ = NULL;
526 self_ = NULL;
527}
528
529
530void ClustersCoarser::Call(const JSObjectsCluster& cluster,
531 const NumberAndSizeInfo& number_and_size) {
532 ASSERT(current_pair_ != NULL);
533 ASSERT(current_set_ != NULL);
534 ASSERT(self_ != NULL);
535 JSObjectsRetainerTree::Locator loc;
536 if (JSObjectsCluster::Compare(*self_, cluster) == 0) {
537 current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF));
538 return;
539 }
540 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
541 if (!eq.is_null()) {
542 if (current_set_->Find(eq, &loc)) return;
543 current_pair_->refs.Add(eq);
544 current_set_->Insert(eq, &loc);
545 } else {
546 current_pair_->refs.Add(cluster);
547 }
548}
549
550
551void ClustersCoarser::Process(JSObjectsRetainerTree* tree) {
552 int last_eq_clusters = -1;
553 for (int i = 0; i < kMaxPassesCount; ++i) {
554 sim_list_.Clear();
555 const int curr_eq_clusters = DoProcess(tree);
556 // If no new cluster equivalents discovered, abort processing.
557 if (last_eq_clusters == curr_eq_clusters) break;
558 last_eq_clusters = curr_eq_clusters;
559 }
560}
561
562
563int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) {
564 tree->ForEach(this);
565 sim_list_.Iterate(ClusterBackRefs::SortRefsIterator);
566 sim_list_.Sort(ClusterBackRefsCmp);
567 return FillEqualityTree();
568}
569
570
571JSObjectsCluster ClustersCoarser::GetCoarseEquivalent(
572 const JSObjectsCluster& cluster) {
573 if (!cluster.can_be_coarsed()) return JSObjectsCluster();
574 EqualityTree::Locator loc;
575 return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster();
576}
577
578
579bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) {
580 // Return true for coarsible clusters that have a non-identical equivalent.
581 if (!cluster.can_be_coarsed()) return false;
582 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
583 return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0;
584}
585
586
587int ClustersCoarser::FillEqualityTree() {
588 int eq_clusters_count = 0;
589 int eq_to = 0;
590 bool first_added = false;
591 for (int i = 1; i < sim_list_.length(); ++i) {
592 if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) {
593 EqualityTree::Locator loc;
594 if (!first_added) {
595 // Add self-equivalence, if we have more than one item in this
596 // equivalence class.
597 eq_tree_.Insert(sim_list_[eq_to].cluster, &loc);
598 loc.set_value(sim_list_[eq_to].cluster);
599 first_added = true;
600 }
601 eq_tree_.Insert(sim_list_[i].cluster, &loc);
602 loc.set_value(sim_list_[eq_to].cluster);
603 ++eq_clusters_count;
604 } else {
605 eq_to = i;
606 first_added = false;
607 }
608 }
609 return eq_clusters_count;
610}
611
612
613const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey;
614const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue;
615const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
616const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
617 NULL;
618
619
620RetainerHeapProfile::RetainerHeapProfile()
621 : zscope_(DELETE_ON_EXIT) {
622 JSObjectsCluster roots(JSObjectsCluster::ROOTS);
623 ReferencesExtractor extractor(roots, this);
Steve Blockd0582a62009-12-15 09:54:21 +0000624 Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG);
Steve Blocka7e24c12009-10-30 11:49:00 +0000625}
626
627
628void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
629 HeapObject* ref) {
630 JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref);
Steve Blockd0582a62009-12-15 09:54:21 +0000631 if (ref_cluster.is_null()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +0000632 JSObjectsRetainerTree::Locator ref_loc;
633 if (retainers_tree_.Insert(ref_cluster, &ref_loc)) {
634 ref_loc.set_value(new JSObjectsClusterTree());
635 }
636 JSObjectsClusterTree* referenced_by = ref_loc.value();
637 Clusterizer::InsertReferenceIntoTree(referenced_by, cluster);
638}
639
640
641void RetainerHeapProfile::CollectStats(HeapObject* obj) {
Steve Blockd0582a62009-12-15 09:54:21 +0000642 const JSObjectsCluster cluster = Clusterizer::Clusterize(obj);
643 if (cluster.is_null()) return;
644 ReferencesExtractor extractor(cluster, this);
645 obj->Iterate(&extractor);
Steve Blocka7e24c12009-10-30 11:49:00 +0000646}
647
648
649void RetainerHeapProfile::DebugPrintStats(
650 RetainerHeapProfile::Printer* printer) {
651 coarser_.Process(&retainers_tree_);
652 // Print clusters that have no equivalents, aggregating their retainers.
653 AggregatingRetainerTreePrinter agg_printer(&coarser_, printer);
654 retainers_tree_.ForEach(&agg_printer);
655 // Now aggregate clusters that have equivalents...
656 RetainerTreeAggregator aggregator(&coarser_);
657 aggregator.Process(&retainers_tree_);
658 // ...and print them.
659 SimpleRetainerTreePrinter s_printer(printer);
660 aggregator.output_tree().ForEach(&s_printer);
661}
662
663
664void RetainerHeapProfile::PrintStats() {
665 RetainersPrinter printer;
666 DebugPrintStats(&printer);
667}
668
669
670//
671// HeapProfiler class implementation.
672//
673void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
674 InstanceType type = obj->map()->instance_type();
675 ASSERT(0 <= type && type <= LAST_TYPE);
Steve Block3ce2e202009-11-05 08:53:23 +0000676 if (!FreeListNode::IsFreeListNode(obj)) {
677 info[type].increment_number(1);
678 info[type].increment_bytes(obj->Size());
679 }
680}
681
682
683static void StackWeakReferenceCallback(Persistent<Value> object,
684 void* trace) {
685 DeleteArray(static_cast<Address*>(trace));
686 object.Dispose();
687}
688
689
690static void PrintProducerStackTrace(Object* obj, void* trace) {
691 if (!obj->IsJSObject()) return;
692 String* constructor = JSObject::cast(obj)->constructor_name();
693 SmartPointer<char> s_name(
694 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
695 LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name),
696 reinterpret_cast<Address*>(trace)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000697}
698
699
700void HeapProfiler::WriteSample() {
701 LOG(HeapSampleBeginEvent("Heap", "allocated"));
702 LOG(HeapSampleStats(
Steve Block3ce2e202009-11-05 08:53:23 +0000703 "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000704
705 HistogramInfo info[LAST_TYPE+1];
706#define DEF_TYPE_NAME(name) info[name].set_name(#name);
707 INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
708#undef DEF_TYPE_NAME
709
710 ConstructorHeapProfile js_cons_profile;
711 RetainerHeapProfile js_retainer_profile;
712 HeapIterator iterator;
Leon Clarked91b9f72010-01-27 17:25:45 +0000713 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 CollectStats(obj, info);
715 js_cons_profile.CollectStats(obj);
716 js_retainer_profile.CollectStats(obj);
717 }
718
719 // Lump all the string types together.
720 int string_number = 0;
721 int string_bytes = 0;
722#define INCREMENT_SIZE(type, size, name, camel_name) \
723 string_number += info[type].number(); \
724 string_bytes += info[type].bytes();
725 STRING_TYPE_LIST(INCREMENT_SIZE)
726#undef INCREMENT_SIZE
727 if (string_bytes > 0) {
728 LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
729 }
730
731 for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
732 if (info[i].bytes() > 0) {
733 LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
734 info[i].bytes()));
735 }
736 }
737
738 js_cons_profile.PrintStats();
739 js_retainer_profile.PrintStats();
740
Steve Block3ce2e202009-11-05 08:53:23 +0000741 GlobalHandles::IterateWeakRoots(PrintProducerStackTrace,
742 StackWeakReferenceCallback);
743
Steve Blocka7e24c12009-10-30 11:49:00 +0000744 LOG(HeapSampleEndEvent("Heap", "allocated"));
745}
746
747
Steve Block3ce2e202009-11-05 08:53:23 +0000748bool ProducerHeapProfile::can_log_ = false;
749
750void ProducerHeapProfile::Setup() {
751 can_log_ = true;
752}
753
Leon Clarkee46be812010-01-19 14:06:41 +0000754void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
755 ASSERT(FLAG_log_producers);
756 if (!can_log_) return;
Steve Block3ce2e202009-11-05 08:53:23 +0000757 int framesCount = 0;
758 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
759 ++framesCount;
760 }
761 if (framesCount == 0) return;
762 ++framesCount; // Reserve place for the terminator item.
763 Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
764 int i = 0;
765 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
766 stack[i++] = it.frame()->pc();
767 }
768 stack[i] = NULL;
769 Handle<Object> handle = GlobalHandles::Create(obj);
770 GlobalHandles::MakeWeak(handle.location(),
771 static_cast<void*>(stack.start()),
772 StackWeakReferenceCallback);
773}
774
775
Steve Blocka7e24c12009-10-30 11:49:00 +0000776#endif // ENABLE_LOGGING_AND_PROFILING
777
778
779} } // namespace v8::internal