blob: 07b631fa7332d8de6bc1b50f6987aae4d74e58cd [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2009-2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-profiler.h"
Steve Block3ce2e202009-11-05 08:53:23 +000031#include "frames-inl.h"
32#include "global-handles.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010033#include "profile-generator.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "string-stream.h"
35
36namespace v8 {
37namespace internal {
38
39
40#ifdef ENABLE_LOGGING_AND_PROFILING
41namespace {
42
43// Clusterizer is a set of helper functions for converting
44// object references into clusters.
45class Clusterizer : public AllStatic {
46 public:
47 static JSObjectsCluster Clusterize(HeapObject* obj) {
48 return Clusterize(obj, true);
49 }
50 static void InsertIntoTree(JSObjectsClusterTree* tree,
51 HeapObject* obj, bool fine_grain);
52 static void InsertReferenceIntoTree(JSObjectsClusterTree* tree,
53 const JSObjectsCluster& cluster) {
54 InsertIntoTree(tree, cluster, 0);
55 }
56
57 private:
58 static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain);
59 static int CalculateNetworkSize(JSObject* obj);
60 static int GetObjectSize(HeapObject* obj) {
61 return obj->IsJSObject() ?
62 CalculateNetworkSize(JSObject::cast(obj)) : obj->Size();
63 }
64 static void InsertIntoTree(JSObjectsClusterTree* tree,
65 const JSObjectsCluster& cluster, int size);
66};
67
68
69JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
70 if (obj->IsJSObject()) {
71 JSObject* js_obj = JSObject::cast(obj);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080072 String* constructor = GetConstructorNameForHeapProfile(
73 JSObject::cast(js_obj));
Steve Blocka7e24c12009-10-30 11:49:00 +000074 // Differentiate Object and Array instances.
75 if (fine_grain && (constructor == Heap::Object_symbol() ||
76 constructor == Heap::Array_symbol())) {
77 return JSObjectsCluster(constructor, obj);
78 } else {
79 return JSObjectsCluster(constructor);
80 }
81 } else if (obj->IsString()) {
82 return JSObjectsCluster(Heap::String_symbol());
Steve Blockd0582a62009-12-15 09:54:21 +000083 } else if (obj->IsJSGlobalPropertyCell()) {
84 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
85 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
86 return JSObjectsCluster(JSObjectsCluster::CODE);
Steve Blocka7e24c12009-10-30 11:49:00 +000087 }
88 return JSObjectsCluster();
89}
90
91
92void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
93 HeapObject* obj, bool fine_grain) {
94 JSObjectsCluster cluster = Clusterize(obj, fine_grain);
95 if (cluster.is_null()) return;
96 InsertIntoTree(tree, cluster, GetObjectSize(obj));
97}
98
99
100void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
101 const JSObjectsCluster& cluster, int size) {
102 JSObjectsClusterTree::Locator loc;
103 tree->Insert(cluster, &loc);
104 NumberAndSizeInfo number_and_size = loc.value();
105 number_and_size.increment_number(1);
106 number_and_size.increment_bytes(size);
107 loc.set_value(number_and_size);
108}
109
110
111int Clusterizer::CalculateNetworkSize(JSObject* obj) {
112 int size = obj->Size();
113 // If 'properties' and 'elements' are non-empty (thus, non-shared),
114 // take their size into account.
Iain Merrick75681382010-08-19 15:07:18 +0100115 if (obj->properties() != Heap::empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000116 size += obj->properties()->Size();
117 }
Iain Merrick75681382010-08-19 15:07:18 +0100118 if (obj->elements() != Heap::empty_fixed_array()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 size += obj->elements()->Size();
120 }
Steve Blockd0582a62009-12-15 09:54:21 +0000121 // For functions, also account non-empty context and literals sizes.
122 if (obj->IsJSFunction()) {
123 JSFunction* f = JSFunction::cast(obj);
124 if (f->unchecked_context()->IsContext()) {
125 size += f->context()->Size();
126 }
127 if (f->literals()->length() != 0) {
128 size += f->literals()->Size();
129 }
130 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 return size;
132}
133
134
135// A helper class for recording back references.
136class ReferencesExtractor : public ObjectVisitor {
137 public:
138 ReferencesExtractor(const JSObjectsCluster& cluster,
139 RetainerHeapProfile* profile)
140 : cluster_(cluster),
141 profile_(profile),
142 inside_array_(false) {
143 }
144
145 void VisitPointer(Object** o) {
Steve Blockd0582a62009-12-15 09:54:21 +0000146 if ((*o)->IsFixedArray() && !inside_array_) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000147 // Traverse one level deep for data members that are fixed arrays.
148 // This covers the case of 'elements' and 'properties' of JSObject,
149 // and function contexts.
150 inside_array_ = true;
151 FixedArray::cast(*o)->Iterate(this);
152 inside_array_ = false;
Steve Blockd0582a62009-12-15 09:54:21 +0000153 } else if ((*o)->IsHeapObject()) {
154 profile_->StoreReference(cluster_, HeapObject::cast(*o));
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 }
156 }
157
158 void VisitPointers(Object** start, Object** end) {
159 for (Object** p = start; p < end; p++) VisitPointer(p);
160 }
161
162 private:
163 const JSObjectsCluster& cluster_;
164 RetainerHeapProfile* profile_;
165 bool inside_array_;
166};
167
168
169// A printer interface implementation for the Retainers profile.
170class RetainersPrinter : public RetainerHeapProfile::Printer {
171 public:
172 void PrintRetainers(const JSObjectsCluster& cluster,
173 const StringStream& retainers) {
174 HeapStringAllocator allocator;
175 StringStream stream(&allocator);
176 cluster.Print(&stream);
177 LOG(HeapSampleJSRetainersEvent(
178 *(stream.ToCString()), *(retainers.ToCString())));
179 }
180};
181
182
183// Visitor for printing a cluster tree.
184class ClusterTreePrinter BASE_EMBEDDED {
185 public:
186 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {}
187 void Call(const JSObjectsCluster& cluster,
188 const NumberAndSizeInfo& number_and_size) {
189 Print(stream_, cluster, number_and_size);
190 }
191 static void Print(StringStream* stream,
192 const JSObjectsCluster& cluster,
193 const NumberAndSizeInfo& number_and_size);
194
195 private:
196 StringStream* stream_;
197};
198
199
200void ClusterTreePrinter::Print(StringStream* stream,
201 const JSObjectsCluster& cluster,
202 const NumberAndSizeInfo& number_and_size) {
203 stream->Put(',');
204 cluster.Print(stream);
205 stream->Add(";%d", number_and_size.number());
206}
207
208
209// Visitor for printing a retainer tree.
210class SimpleRetainerTreePrinter BASE_EMBEDDED {
211 public:
212 explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer)
213 : printer_(printer) {}
214 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
215
216 private:
217 RetainerHeapProfile::Printer* printer_;
218};
219
220
221void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
222 JSObjectsClusterTree* tree) {
223 HeapStringAllocator allocator;
224 StringStream stream(&allocator);
225 ClusterTreePrinter retainers_printer(&stream);
226 tree->ForEach(&retainers_printer);
227 printer_->PrintRetainers(cluster, stream);
228}
229
230
231// Visitor for aggregating references count of equivalent clusters.
232class RetainersAggregator BASE_EMBEDDED {
233 public:
234 RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree)
235 : coarser_(coarser), dest_tree_(dest_tree) {}
236 void Call(const JSObjectsCluster& cluster,
237 const NumberAndSizeInfo& number_and_size);
238
239 private:
240 ClustersCoarser* coarser_;
241 JSObjectsClusterTree* dest_tree_;
242};
243
244
245void RetainersAggregator::Call(const JSObjectsCluster& cluster,
246 const NumberAndSizeInfo& number_and_size) {
247 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
248 if (eq.is_null()) eq = cluster;
249 JSObjectsClusterTree::Locator loc;
250 dest_tree_->Insert(eq, &loc);
251 NumberAndSizeInfo aggregated_number = loc.value();
252 aggregated_number.increment_number(number_and_size.number());
253 loc.set_value(aggregated_number);
254}
255
256
257// Visitor for printing retainers tree. Aggregates equivalent retainer clusters.
258class AggregatingRetainerTreePrinter BASE_EMBEDDED {
259 public:
260 AggregatingRetainerTreePrinter(ClustersCoarser* coarser,
261 RetainerHeapProfile::Printer* printer)
262 : coarser_(coarser), printer_(printer) {}
263 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
264
265 private:
266 ClustersCoarser* coarser_;
267 RetainerHeapProfile::Printer* printer_;
268};
269
270
271void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster,
272 JSObjectsClusterTree* tree) {
273 if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return;
274 JSObjectsClusterTree dest_tree_;
275 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
276 tree->ForEach(&retainers_aggregator);
277 HeapStringAllocator allocator;
278 StringStream stream(&allocator);
279 ClusterTreePrinter retainers_printer(&stream);
280 dest_tree_.ForEach(&retainers_printer);
281 printer_->PrintRetainers(cluster, stream);
282}
283
Steve Block791712a2010-08-27 10:21:07 +0100284} // namespace
285
Steve Blocka7e24c12009-10-30 11:49:00 +0000286
287// A helper class for building a retainers tree, that aggregates
288// all equivalent clusters.
Steve Block791712a2010-08-27 10:21:07 +0100289class RetainerTreeAggregator {
Steve Blocka7e24c12009-10-30 11:49:00 +0000290 public:
291 explicit RetainerTreeAggregator(ClustersCoarser* coarser)
292 : coarser_(coarser) {}
293 void Process(JSObjectsRetainerTree* input_tree) {
294 input_tree->ForEach(this);
295 }
296 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
297 JSObjectsRetainerTree& output_tree() { return output_tree_; }
298
299 private:
300 ClustersCoarser* coarser_;
301 JSObjectsRetainerTree output_tree_;
302};
303
304
305void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
306 JSObjectsClusterTree* tree) {
307 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster);
308 if (eq.is_null()) return;
309 JSObjectsRetainerTree::Locator loc;
310 if (output_tree_.Insert(eq, &loc)) {
311 loc.set_value(new JSObjectsClusterTree());
312 }
313 RetainersAggregator retainers_aggregator(coarser_, loc.value());
314 tree->ForEach(&retainers_aggregator);
315}
316
Steve Blocka7e24c12009-10-30 11:49:00 +0000317
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100318HeapProfiler* HeapProfiler::singleton_ = NULL;
319
320HeapProfiler::HeapProfiler()
321 : snapshots_(new HeapSnapshotsCollection()),
322 next_snapshot_uid_(1) {
323}
324
325
326HeapProfiler::~HeapProfiler() {
327 delete snapshots_;
328}
329
330#endif // ENABLE_LOGGING_AND_PROFILING
331
332void HeapProfiler::Setup() {
333#ifdef ENABLE_LOGGING_AND_PROFILING
334 if (singleton_ == NULL) {
335 singleton_ = new HeapProfiler();
336 }
337#endif
338}
339
340
341void HeapProfiler::TearDown() {
342#ifdef ENABLE_LOGGING_AND_PROFILING
343 delete singleton_;
344 singleton_ = NULL;
345#endif
346}
347
348
349#ifdef ENABLE_LOGGING_AND_PROFILING
350
Ben Murdochb0fe1622011-05-05 13:52:32 +0100351HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name,
352 int type,
353 v8::ActivityControl* control) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100354 ASSERT(singleton_ != NULL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100355 return singleton_->TakeSnapshotImpl(name, type, control);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100356}
357
358
Ben Murdochb0fe1622011-05-05 13:52:32 +0100359HeapSnapshot* HeapProfiler::TakeSnapshot(String* name,
360 int type,
361 v8::ActivityControl* control) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100362 ASSERT(singleton_ != NULL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100363 return singleton_->TakeSnapshotImpl(name, type, control);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100364}
365
366
Ben Murdochb0fe1622011-05-05 13:52:32 +0100367HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
368 int type,
369 v8::ActivityControl* control) {
Steve Block791712a2010-08-27 10:21:07 +0100370 HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type);
371 HeapSnapshot* result =
372 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100373 bool generation_completed = true;
Steve Block791712a2010-08-27 10:21:07 +0100374 switch (s_type) {
375 case HeapSnapshot::kFull: {
Steve Block1e0659c2011-05-24 12:43:12 +0100376 Heap::CollectAllGarbage(true);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100377 HeapSnapshotGenerator generator(result, control);
378 generation_completed = generator.GenerateSnapshot();
Steve Block791712a2010-08-27 10:21:07 +0100379 break;
380 }
381 case HeapSnapshot::kAggregated: {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100382 Heap::CollectAllGarbage(true);
Steve Block791712a2010-08-27 10:21:07 +0100383 AggregatedHeapSnapshot agg_snapshot;
384 AggregatedHeapSnapshotGenerator generator(&agg_snapshot);
385 generator.GenerateSnapshot();
386 generator.FillHeapSnapshot(result);
387 break;
388 }
389 default:
390 UNREACHABLE();
391 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100392 if (!generation_completed) {
393 delete result;
394 result = NULL;
395 }
396 snapshots_->SnapshotGenerationFinished(result);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100397 return result;
398}
399
400
Ben Murdochb0fe1622011-05-05 13:52:32 +0100401HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name,
402 int type,
403 v8::ActivityControl* control) {
404 return TakeSnapshotImpl(snapshots_->GetName(name), type, control);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100405}
406
407
408int HeapProfiler::GetSnapshotsCount() {
409 ASSERT(singleton_ != NULL);
410 return singleton_->snapshots_->snapshots()->length();
411}
412
413
414HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
415 ASSERT(singleton_ != NULL);
416 return singleton_->snapshots_->snapshots()->at(index);
417}
418
419
420HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
421 ASSERT(singleton_ != NULL);
422 return singleton_->snapshots_->GetSnapshot(uid);
423}
424
425
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100426void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
427 ASSERT(singleton_ != NULL);
428 singleton_->snapshots_->ObjectMoveEvent(from, to);
429}
430
431
Steve Blocka7e24c12009-10-30 11:49:00 +0000432const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
433const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
434
435
436ConstructorHeapProfile::ConstructorHeapProfile()
437 : zscope_(DELETE_ON_EXIT) {
438}
439
440
441void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
442 const NumberAndSizeInfo& number_and_size) {
443 HeapStringAllocator allocator;
444 StringStream stream(&allocator);
445 cluster.Print(&stream);
446 LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()),
447 number_and_size.number(),
448 number_and_size.bytes()));
449}
450
451
452void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
453 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
454}
455
456
457void ConstructorHeapProfile::PrintStats() {
458 js_objects_info_tree_.ForEach(this);
459}
460
461
Steve Block3ce2e202009-11-05 08:53:23 +0000462static const char* GetConstructorName(const char* name) {
463 return name[0] != '\0' ? name : "(anonymous)";
464}
465
466
Steve Block791712a2010-08-27 10:21:07 +0100467const char* JSObjectsCluster::GetSpecialCaseName() const {
468 if (constructor_ == FromSpecialCase(ROOTS)) {
469 return "(roots)";
470 } else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) {
471 return "(global property)";
472 } else if (constructor_ == FromSpecialCase(CODE)) {
473 return "(code)";
474 } else if (constructor_ == FromSpecialCase(SELF)) {
475 return "(self)";
476 }
477 return NULL;
478}
479
480
Steve Blocka7e24c12009-10-30 11:49:00 +0000481void JSObjectsCluster::Print(StringStream* accumulator) const {
482 ASSERT(!is_null());
Steve Block791712a2010-08-27 10:21:07 +0100483 const char* special_case_name = GetSpecialCaseName();
484 if (special_case_name != NULL) {
485 accumulator->Add(special_case_name);
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 } else {
487 SmartPointer<char> s_name(
488 constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
Steve Block3ce2e202009-11-05 08:53:23 +0000489 accumulator->Add("%s", GetConstructorName(*s_name));
Steve Blocka7e24c12009-10-30 11:49:00 +0000490 if (instance_ != NULL) {
491 accumulator->Add(":%p", static_cast<void*>(instance_));
492 }
493 }
494}
495
496
497void JSObjectsCluster::DebugPrint(StringStream* accumulator) const {
498 if (!is_null()) {
499 Print(accumulator);
500 } else {
501 accumulator->Add("(null cluster)");
502 }
503}
504
505
506inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
507 const JSObjectsCluster& cluster_)
508 : cluster(cluster_), refs(kInitialBackrefsListCapacity) {
509}
510
511
512inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
513 const ClustersCoarser::ClusterBackRefs& src)
514 : cluster(src.cluster), refs(src.refs.capacity()) {
515 refs.AddAll(src.refs);
516}
517
518
519inline ClustersCoarser::ClusterBackRefs&
520 ClustersCoarser::ClusterBackRefs::operator=(
521 const ClustersCoarser::ClusterBackRefs& src) {
522 if (this == &src) return *this;
523 cluster = src.cluster;
524 refs.Clear();
525 refs.AddAll(src.refs);
526 return *this;
527}
528
529
530inline int ClustersCoarser::ClusterBackRefs::Compare(
531 const ClustersCoarser::ClusterBackRefs& a,
532 const ClustersCoarser::ClusterBackRefs& b) {
533 int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster);
534 if (cmp != 0) return cmp;
535 if (a.refs.length() < b.refs.length()) return -1;
536 if (a.refs.length() > b.refs.length()) return 1;
537 for (int i = 0; i < a.refs.length(); ++i) {
538 int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]);
539 if (cmp != 0) return cmp;
540 }
541 return 0;
542}
543
544
545ClustersCoarser::ClustersCoarser()
546 : zscope_(DELETE_ON_EXIT),
547 sim_list_(ClustersCoarser::kInitialSimilarityListCapacity),
548 current_pair_(NULL),
549 current_set_(NULL),
550 self_(NULL) {
551}
552
553
554void ClustersCoarser::Call(const JSObjectsCluster& cluster,
555 JSObjectsClusterTree* tree) {
556 if (!cluster.can_be_coarsed()) return;
557 ClusterBackRefs pair(cluster);
558 ASSERT(current_pair_ == NULL);
559 current_pair_ = &pair;
560 current_set_ = new JSObjectsRetainerTree();
561 self_ = &cluster;
562 tree->ForEach(this);
563 sim_list_.Add(pair);
564 current_pair_ = NULL;
565 current_set_ = NULL;
566 self_ = NULL;
567}
568
569
570void ClustersCoarser::Call(const JSObjectsCluster& cluster,
571 const NumberAndSizeInfo& number_and_size) {
572 ASSERT(current_pair_ != NULL);
573 ASSERT(current_set_ != NULL);
574 ASSERT(self_ != NULL);
575 JSObjectsRetainerTree::Locator loc;
576 if (JSObjectsCluster::Compare(*self_, cluster) == 0) {
577 current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF));
578 return;
579 }
580 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
581 if (!eq.is_null()) {
582 if (current_set_->Find(eq, &loc)) return;
583 current_pair_->refs.Add(eq);
584 current_set_->Insert(eq, &loc);
585 } else {
586 current_pair_->refs.Add(cluster);
587 }
588}
589
590
591void ClustersCoarser::Process(JSObjectsRetainerTree* tree) {
592 int last_eq_clusters = -1;
593 for (int i = 0; i < kMaxPassesCount; ++i) {
594 sim_list_.Clear();
595 const int curr_eq_clusters = DoProcess(tree);
596 // If no new cluster equivalents discovered, abort processing.
597 if (last_eq_clusters == curr_eq_clusters) break;
598 last_eq_clusters = curr_eq_clusters;
599 }
600}
601
602
603int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) {
604 tree->ForEach(this);
605 sim_list_.Iterate(ClusterBackRefs::SortRefsIterator);
606 sim_list_.Sort(ClusterBackRefsCmp);
607 return FillEqualityTree();
608}
609
610
611JSObjectsCluster ClustersCoarser::GetCoarseEquivalent(
612 const JSObjectsCluster& cluster) {
613 if (!cluster.can_be_coarsed()) return JSObjectsCluster();
614 EqualityTree::Locator loc;
615 return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster();
616}
617
618
619bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) {
620 // Return true for coarsible clusters that have a non-identical equivalent.
621 if (!cluster.can_be_coarsed()) return false;
622 JSObjectsCluster eq = GetCoarseEquivalent(cluster);
623 return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0;
624}
625
626
627int ClustersCoarser::FillEqualityTree() {
628 int eq_clusters_count = 0;
629 int eq_to = 0;
630 bool first_added = false;
631 for (int i = 1; i < sim_list_.length(); ++i) {
632 if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) {
633 EqualityTree::Locator loc;
634 if (!first_added) {
635 // Add self-equivalence, if we have more than one item in this
636 // equivalence class.
637 eq_tree_.Insert(sim_list_[eq_to].cluster, &loc);
638 loc.set_value(sim_list_[eq_to].cluster);
639 first_added = true;
640 }
641 eq_tree_.Insert(sim_list_[i].cluster, &loc);
642 loc.set_value(sim_list_[eq_to].cluster);
643 ++eq_clusters_count;
644 } else {
645 eq_to = i;
646 first_added = false;
647 }
648 }
649 return eq_clusters_count;
650}
651
652
653const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey;
654const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue;
655const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
656const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
657 NULL;
658
659
660RetainerHeapProfile::RetainerHeapProfile()
Steve Block791712a2010-08-27 10:21:07 +0100661 : zscope_(DELETE_ON_EXIT),
662 aggregator_(NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 JSObjectsCluster roots(JSObjectsCluster::ROOTS);
664 ReferencesExtractor extractor(roots, this);
Steve Blockd0582a62009-12-15 09:54:21 +0000665 Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG);
Steve Blocka7e24c12009-10-30 11:49:00 +0000666}
667
668
Steve Block791712a2010-08-27 10:21:07 +0100669RetainerHeapProfile::~RetainerHeapProfile() {
670 delete aggregator_;
671}
672
673
Steve Blocka7e24c12009-10-30 11:49:00 +0000674void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
675 HeapObject* ref) {
676 JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref);
Steve Blockd0582a62009-12-15 09:54:21 +0000677 if (ref_cluster.is_null()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +0000678 JSObjectsRetainerTree::Locator ref_loc;
679 if (retainers_tree_.Insert(ref_cluster, &ref_loc)) {
680 ref_loc.set_value(new JSObjectsClusterTree());
681 }
682 JSObjectsClusterTree* referenced_by = ref_loc.value();
683 Clusterizer::InsertReferenceIntoTree(referenced_by, cluster);
684}
685
686
687void RetainerHeapProfile::CollectStats(HeapObject* obj) {
Steve Blockd0582a62009-12-15 09:54:21 +0000688 const JSObjectsCluster cluster = Clusterizer::Clusterize(obj);
689 if (cluster.is_null()) return;
690 ReferencesExtractor extractor(cluster, this);
691 obj->Iterate(&extractor);
Steve Blocka7e24c12009-10-30 11:49:00 +0000692}
693
694
Steve Block791712a2010-08-27 10:21:07 +0100695void RetainerHeapProfile::CoarseAndAggregate() {
696 coarser_.Process(&retainers_tree_);
697 ASSERT(aggregator_ == NULL);
698 aggregator_ = new RetainerTreeAggregator(&coarser_);
699 aggregator_->Process(&retainers_tree_);
700}
701
702
Steve Blocka7e24c12009-10-30 11:49:00 +0000703void RetainerHeapProfile::DebugPrintStats(
704 RetainerHeapProfile::Printer* printer) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000705 // Print clusters that have no equivalents, aggregating their retainers.
706 AggregatingRetainerTreePrinter agg_printer(&coarser_, printer);
707 retainers_tree_.ForEach(&agg_printer);
Steve Block791712a2010-08-27 10:21:07 +0100708 // Print clusters that have equivalents.
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 SimpleRetainerTreePrinter s_printer(printer);
Steve Block791712a2010-08-27 10:21:07 +0100710 aggregator_->output_tree().ForEach(&s_printer);
Steve Blocka7e24c12009-10-30 11:49:00 +0000711}
712
713
714void RetainerHeapProfile::PrintStats() {
715 RetainersPrinter printer;
716 DebugPrintStats(&printer);
717}
718
719
720//
721// HeapProfiler class implementation.
722//
Steve Block3ce2e202009-11-05 08:53:23 +0000723static void StackWeakReferenceCallback(Persistent<Value> object,
724 void* trace) {
725 DeleteArray(static_cast<Address*>(trace));
726 object.Dispose();
727}
728
729
730static void PrintProducerStackTrace(Object* obj, void* trace) {
731 if (!obj->IsJSObject()) return;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800732 String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj));
Steve Block3ce2e202009-11-05 08:53:23 +0000733 SmartPointer<char> s_name(
734 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
735 LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name),
736 reinterpret_cast<Address*>(trace)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000737}
738
739
740void HeapProfiler::WriteSample() {
741 LOG(HeapSampleBeginEvent("Heap", "allocated"));
742 LOG(HeapSampleStats(
Steve Block3ce2e202009-11-05 08:53:23 +0000743 "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000744
Steve Block791712a2010-08-27 10:21:07 +0100745 AggregatedHeapSnapshot snapshot;
746 AggregatedHeapSnapshotGenerator generator(&snapshot);
747 generator.GenerateSnapshot();
Steve Blocka7e24c12009-10-30 11:49:00 +0000748
Steve Block791712a2010-08-27 10:21:07 +0100749 HistogramInfo* info = snapshot.info();
750 for (int i = FIRST_NONSTRING_TYPE;
751 i <= AggregatedHeapSnapshotGenerator::kAllStringsType;
752 ++i) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000753 if (info[i].bytes() > 0) {
754 LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
755 info[i].bytes()));
756 }
757 }
758
Steve Block791712a2010-08-27 10:21:07 +0100759 snapshot.js_cons_profile()->PrintStats();
760 snapshot.js_retainer_profile()->PrintStats();
Steve Blocka7e24c12009-10-30 11:49:00 +0000761
Steve Block3ce2e202009-11-05 08:53:23 +0000762 GlobalHandles::IterateWeakRoots(PrintProducerStackTrace,
763 StackWeakReferenceCallback);
764
Steve Blocka7e24c12009-10-30 11:49:00 +0000765 LOG(HeapSampleEndEvent("Heap", "allocated"));
766}
767
768
Steve Block791712a2010-08-27 10:21:07 +0100769AggregatedHeapSnapshot::AggregatedHeapSnapshot()
770 : info_(NewArray<HistogramInfo>(
771 AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) {
772#define DEF_TYPE_NAME(name) info_[name].set_name(#name);
773 INSTANCE_TYPE_LIST(DEF_TYPE_NAME);
774#undef DEF_TYPE_NAME
775 info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name(
776 "STRING_TYPE");
777}
778
779
780AggregatedHeapSnapshot::~AggregatedHeapSnapshot() {
781 DeleteArray(info_);
782}
783
784
785AggregatedHeapSnapshotGenerator::AggregatedHeapSnapshotGenerator(
786 AggregatedHeapSnapshot* agg_snapshot)
787 : agg_snapshot_(agg_snapshot) {
788}
789
790
791void AggregatedHeapSnapshotGenerator::CalculateStringsStats() {
792 HistogramInfo* info = agg_snapshot_->info();
793 HistogramInfo& strings = info[kAllStringsType];
794 // Lump all the string types together.
795#define INCREMENT_SIZE(type, size, name, camel_name) \
796 strings.increment_number(info[type].number()); \
797 strings.increment_bytes(info[type].bytes());
798 STRING_TYPE_LIST(INCREMENT_SIZE);
799#undef INCREMENT_SIZE
800}
801
802
803void AggregatedHeapSnapshotGenerator::CollectStats(HeapObject* obj) {
804 InstanceType type = obj->map()->instance_type();
805 ASSERT(0 <= type && type <= LAST_TYPE);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800806 agg_snapshot_->info()[type].increment_number(1);
807 agg_snapshot_->info()[type].increment_bytes(obj->Size());
Steve Block791712a2010-08-27 10:21:07 +0100808}
809
810
811void AggregatedHeapSnapshotGenerator::GenerateSnapshot() {
Steve Block1e0659c2011-05-24 12:43:12 +0100812 HeapIterator iterator(HeapIterator::kFilterUnreachable);
Steve Block791712a2010-08-27 10:21:07 +0100813 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
814 CollectStats(obj);
815 agg_snapshot_->js_cons_profile()->CollectStats(obj);
816 agg_snapshot_->js_retainer_profile()->CollectStats(obj);
817 }
818 CalculateStringsStats();
819 agg_snapshot_->js_retainer_profile()->CoarseAndAggregate();
820}
821
822
823class CountingConstructorHeapProfileIterator {
824 public:
825 CountingConstructorHeapProfileIterator()
826 : entities_count_(0), children_count_(0) {
827 }
828
829 void Call(const JSObjectsCluster& cluster,
830 const NumberAndSizeInfo& number_and_size) {
831 ++entities_count_;
832 children_count_ += number_and_size.number();
833 }
834
835 int entities_count() { return entities_count_; }
836 int children_count() { return children_count_; }
837
838 private:
839 int entities_count_;
840 int children_count_;
841};
842
843
844static HeapEntry* AddEntryFromAggregatedSnapshot(HeapSnapshot* snapshot,
845 int* root_child_index,
846 HeapEntry::Type type,
847 const char* name,
848 int count,
849 int size,
850 int children_count,
851 int retainers_count) {
852 HeapEntry* entry = snapshot->AddEntry(
853 type, name, count, size, children_count, retainers_count);
854 ASSERT(entry != NULL);
855 snapshot->root()->SetUnidirElementReference(*root_child_index,
856 *root_child_index + 1,
857 entry);
858 *root_child_index = *root_child_index + 1;
859 return entry;
860}
861
862
863class AllocatingConstructorHeapProfileIterator {
864 public:
865 AllocatingConstructorHeapProfileIterator(HeapSnapshot* snapshot,
866 int* root_child_index)
867 : snapshot_(snapshot),
868 root_child_index_(root_child_index) {
869 }
870
871 void Call(const JSObjectsCluster& cluster,
872 const NumberAndSizeInfo& number_and_size) {
873 const char* name = cluster.GetSpecialCaseName();
874 if (name == NULL) {
875 name = snapshot_->collection()->GetFunctionName(cluster.constructor());
876 }
877 AddEntryFromAggregatedSnapshot(snapshot_,
878 root_child_index_,
879 HeapEntry::kObject,
880 name,
881 number_and_size.number(),
882 number_and_size.bytes(),
883 0,
884 0);
885 }
886
887 private:
888 HeapSnapshot* snapshot_;
889 int* root_child_index_;
890};
891
892
893static HeapObject* ClusterAsHeapObject(const JSObjectsCluster& cluster) {
894 return cluster.can_be_coarsed() ?
895 reinterpret_cast<HeapObject*>(cluster.instance()) : cluster.constructor();
896}
897
898
899static JSObjectsCluster HeapObjectAsCluster(HeapObject* object) {
900 if (object->IsString()) {
901 return JSObjectsCluster(String::cast(object));
902 } else {
903 JSObject* js_obj = JSObject::cast(object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800904 String* constructor = GetConstructorNameForHeapProfile(
905 JSObject::cast(js_obj));
Steve Block791712a2010-08-27 10:21:07 +0100906 return JSObjectsCluster(constructor, object);
907 }
908}
909
910
911class CountingRetainersIterator {
912 public:
913 CountingRetainersIterator(const JSObjectsCluster& child_cluster,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100914 HeapEntriesAllocator* allocator,
Steve Block791712a2010-08-27 10:21:07 +0100915 HeapEntriesMap* map)
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100916 : child_(ClusterAsHeapObject(child_cluster)),
917 allocator_(allocator),
918 map_(map) {
Steve Block791712a2010-08-27 10:21:07 +0100919 if (map_->Map(child_) == NULL)
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100920 map_->Pair(child_, allocator_, HeapEntriesMap::kHeapEntryPlaceholder);
Steve Block791712a2010-08-27 10:21:07 +0100921 }
922
923 void Call(const JSObjectsCluster& cluster,
924 const NumberAndSizeInfo& number_and_size) {
925 if (map_->Map(ClusterAsHeapObject(cluster)) == NULL)
926 map_->Pair(ClusterAsHeapObject(cluster),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100927 allocator_,
Steve Block791712a2010-08-27 10:21:07 +0100928 HeapEntriesMap::kHeapEntryPlaceholder);
929 map_->CountReference(ClusterAsHeapObject(cluster), child_);
930 }
931
932 private:
933 HeapObject* child_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100934 HeapEntriesAllocator* allocator_;
Steve Block791712a2010-08-27 10:21:07 +0100935 HeapEntriesMap* map_;
936};
937
938
939class AllocatingRetainersIterator {
940 public:
941 AllocatingRetainersIterator(const JSObjectsCluster& child_cluster,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100942 HeapEntriesAllocator*,
Steve Block791712a2010-08-27 10:21:07 +0100943 HeapEntriesMap* map)
944 : child_(ClusterAsHeapObject(child_cluster)), map_(map) {
945 child_entry_ = map_->Map(child_);
946 ASSERT(child_entry_ != NULL);
947 }
948
949 void Call(const JSObjectsCluster& cluster,
950 const NumberAndSizeInfo& number_and_size) {
951 int child_index, retainer_index;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800952 map_->CountReference(ClusterAsHeapObject(cluster),
953 child_,
954 &child_index,
955 &retainer_index);
956 map_->Map(ClusterAsHeapObject(cluster))->SetIndexedReference(
957 HeapGraphEdge::kElement,
958 child_index,
959 number_and_size.number(),
960 child_entry_,
961 retainer_index);
Steve Block791712a2010-08-27 10:21:07 +0100962 }
963
964 private:
965 HeapObject* child_;
966 HeapEntriesMap* map_;
967 HeapEntry* child_entry_;
968};
969
970
971template<class RetainersIterator>
972class AggregatingRetainerTreeIterator {
973 public:
974 explicit AggregatingRetainerTreeIterator(ClustersCoarser* coarser,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100975 HeapEntriesAllocator* allocator,
Steve Block791712a2010-08-27 10:21:07 +0100976 HeapEntriesMap* map)
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100977 : coarser_(coarser), allocator_(allocator), map_(map) {
Steve Block791712a2010-08-27 10:21:07 +0100978 }
979
980 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree) {
981 if (coarser_ != NULL &&
982 !coarser_->GetCoarseEquivalent(cluster).is_null()) return;
983 JSObjectsClusterTree* tree_to_iterate = tree;
984 ZoneScope zs(DELETE_ON_EXIT);
985 JSObjectsClusterTree dest_tree_;
986 if (coarser_ != NULL) {
987 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_);
988 tree->ForEach(&retainers_aggregator);
989 tree_to_iterate = &dest_tree_;
990 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100991 RetainersIterator iterator(cluster, allocator_, map_);
Steve Block791712a2010-08-27 10:21:07 +0100992 tree_to_iterate->ForEach(&iterator);
993 }
994
995 private:
996 ClustersCoarser* coarser_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100997 HeapEntriesAllocator* allocator_;
Steve Block791712a2010-08-27 10:21:07 +0100998 HeapEntriesMap* map_;
999};
1000
1001
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001002class AggregatedRetainerTreeAllocator : public HeapEntriesAllocator {
Steve Block791712a2010-08-27 10:21:07 +01001003 public:
1004 AggregatedRetainerTreeAllocator(HeapSnapshot* snapshot,
1005 int* root_child_index)
1006 : snapshot_(snapshot), root_child_index_(root_child_index) {
1007 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001008 ~AggregatedRetainerTreeAllocator() { }
Steve Block791712a2010-08-27 10:21:07 +01001009
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001010 HeapEntry* AllocateEntry(
1011 HeapThing ptr, int children_count, int retainers_count) {
1012 HeapObject* obj = reinterpret_cast<HeapObject*>(ptr);
Steve Block791712a2010-08-27 10:21:07 +01001013 JSObjectsCluster cluster = HeapObjectAsCluster(obj);
1014 const char* name = cluster.GetSpecialCaseName();
1015 if (name == NULL) {
1016 name = snapshot_->collection()->GetFunctionName(cluster.constructor());
1017 }
1018 return AddEntryFromAggregatedSnapshot(
1019 snapshot_, root_child_index_, HeapEntry::kObject, name,
1020 0, 0, children_count, retainers_count);
1021 }
1022
1023 private:
1024 HeapSnapshot* snapshot_;
1025 int* root_child_index_;
1026};
1027
1028
1029template<class Iterator>
1030void AggregatedHeapSnapshotGenerator::IterateRetainers(
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001031 HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map) {
Steve Block791712a2010-08-27 10:21:07 +01001032 RetainerHeapProfile* p = agg_snapshot_->js_retainer_profile();
1033 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_1(
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001034 p->coarser(), allocator, entries_map);
Steve Block791712a2010-08-27 10:21:07 +01001035 p->retainers_tree()->ForEach(&agg_ret_iter_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001036 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2(
1037 NULL, allocator, entries_map);
Steve Block791712a2010-08-27 10:21:07 +01001038 p->aggregator()->output_tree().ForEach(&agg_ret_iter_2);
1039}
1040
1041
1042void AggregatedHeapSnapshotGenerator::FillHeapSnapshot(HeapSnapshot* snapshot) {
1043 // Count the number of entities.
1044 int histogram_entities_count = 0;
1045 int histogram_children_count = 0;
1046 int histogram_retainers_count = 0;
1047 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
1048 if (agg_snapshot_->info()[i].bytes() > 0) {
1049 ++histogram_entities_count;
1050 }
1051 }
1052 CountingConstructorHeapProfileIterator counting_cons_iter;
1053 agg_snapshot_->js_cons_profile()->ForEach(&counting_cons_iter);
1054 histogram_entities_count += counting_cons_iter.entities_count();
1055 HeapEntriesMap entries_map;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001056 int root_child_index = 0;
1057 AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index);
1058 IterateRetainers<CountingRetainersIterator>(&allocator, &entries_map);
Steve Block791712a2010-08-27 10:21:07 +01001059 histogram_entities_count += entries_map.entries_count();
1060 histogram_children_count += entries_map.total_children_count();
1061 histogram_retainers_count += entries_map.total_retainers_count();
1062
1063 // Root entry references all other entries.
1064 histogram_children_count += histogram_entities_count;
1065 int root_children_count = histogram_entities_count;
1066 ++histogram_entities_count;
1067
1068 // Allocate and fill entries in the snapshot, allocate references.
1069 snapshot->AllocateEntries(histogram_entities_count,
1070 histogram_children_count,
1071 histogram_retainers_count);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001072 snapshot->AddRootEntry(root_children_count);
Steve Block791712a2010-08-27 10:21:07 +01001073 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
1074 if (agg_snapshot_->info()[i].bytes() > 0) {
1075 AddEntryFromAggregatedSnapshot(snapshot,
1076 &root_child_index,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001077 HeapEntry::kHidden,
Steve Block791712a2010-08-27 10:21:07 +01001078 agg_snapshot_->info()[i].name(),
1079 agg_snapshot_->info()[i].number(),
1080 agg_snapshot_->info()[i].bytes(),
1081 0,
1082 0);
1083 }
1084 }
1085 AllocatingConstructorHeapProfileIterator alloc_cons_iter(
1086 snapshot, &root_child_index);
1087 agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001088 entries_map.AllocateEntries();
Steve Block791712a2010-08-27 10:21:07 +01001089
1090 // Fill up references.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001091 IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001092
1093 snapshot->SetDominatorsToSelf();
Steve Block791712a2010-08-27 10:21:07 +01001094}
1095
1096
Steve Block3ce2e202009-11-05 08:53:23 +00001097bool ProducerHeapProfile::can_log_ = false;
1098
1099void ProducerHeapProfile::Setup() {
1100 can_log_ = true;
1101}
1102
Leon Clarkee46be812010-01-19 14:06:41 +00001103void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
1104 ASSERT(FLAG_log_producers);
1105 if (!can_log_) return;
Steve Block3ce2e202009-11-05 08:53:23 +00001106 int framesCount = 0;
1107 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1108 ++framesCount;
1109 }
1110 if (framesCount == 0) return;
1111 ++framesCount; // Reserve place for the terminator item.
1112 Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
1113 int i = 0;
1114 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1115 stack[i++] = it.frame()->pc();
1116 }
1117 stack[i] = NULL;
1118 Handle<Object> handle = GlobalHandles::Create(obj);
1119 GlobalHandles::MakeWeak(handle.location(),
1120 static_cast<void*>(stack.start()),
1121 StackWeakReferenceCallback);
1122}
1123
1124
Steve Blocka7e24c12009-10-30 11:49:00 +00001125#endif // ENABLE_LOGGING_AND_PROFILING
1126
1127
1128} } // namespace v8::internal