blob: 6cbdcd67077b2a22001199e3552712304cc4d61c [file] [log] [blame]
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-snapshot-generator-inl.h"
31
32#include "heap-profiler.h"
33#include "debug.h"
34
35namespace v8 {
36namespace internal {
37
38
39HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
40 : type_(type),
41 from_index_(from),
42 to_index_(to),
43 name_(name) {
44 ASSERT(type == kContextVariable
45 || type == kProperty
46 || type == kInternal
47 || type == kShortcut);
48}
49
50
51HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
52 : type_(type),
53 from_index_(from),
54 to_index_(to),
55 index_(index) {
56 ASSERT(type == kElement || type == kHidden || type == kWeak);
57}
58
59
60void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
61 to_entry_ = &snapshot->entries()[to_index_];
62}
63
64
65const int HeapEntry::kNoEntry = -1;
66
67HeapEntry::HeapEntry(HeapSnapshot* snapshot,
68 Type type,
69 const char* name,
70 SnapshotObjectId id,
71 int self_size)
72 : type_(type),
73 children_count_(0),
74 children_index_(-1),
75 self_size_(self_size),
76 id_(id),
77 snapshot_(snapshot),
78 name_(name) { }
79
80
81void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
82 const char* name,
83 HeapEntry* entry) {
84 HeapGraphEdge edge(type, name, this->index(), entry->index());
85 snapshot_->edges().Add(edge);
86 ++children_count_;
87}
88
89
90void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
91 int index,
92 HeapEntry* entry) {
93 HeapGraphEdge edge(type, index, this->index(), entry->index());
94 snapshot_->edges().Add(edge);
95 ++children_count_;
96}
97
98
99Handle<HeapObject> HeapEntry::GetHeapObject() {
100 return snapshot_->collection()->FindHeapObjectById(id());
101}
102
103
104void HeapEntry::Print(
105 const char* prefix, const char* edge_name, int max_depth, int indent) {
106 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
107 OS::Print("%6d @%6u %*c %s%s: ",
108 self_size(), id(), indent, ' ', prefix, edge_name);
109 if (type() != kString) {
110 OS::Print("%s %.40s\n", TypeAsString(), name_);
111 } else {
112 OS::Print("\"");
113 const char* c = name_;
114 while (*c && (c - name_) <= 40) {
115 if (*c != '\n')
116 OS::Print("%c", *c);
117 else
118 OS::Print("\\n");
119 ++c;
120 }
121 OS::Print("\"\n");
122 }
123 if (--max_depth == 0) return;
124 Vector<HeapGraphEdge*> ch = children();
125 for (int i = 0; i < ch.length(); ++i) {
126 HeapGraphEdge& edge = *ch[i];
127 const char* edge_prefix = "";
128 EmbeddedVector<char, 64> index;
129 const char* edge_name = index.start();
130 switch (edge.type()) {
131 case HeapGraphEdge::kContextVariable:
132 edge_prefix = "#";
133 edge_name = edge.name();
134 break;
135 case HeapGraphEdge::kElement:
136 OS::SNPrintF(index, "%d", edge.index());
137 break;
138 case HeapGraphEdge::kInternal:
139 edge_prefix = "$";
140 edge_name = edge.name();
141 break;
142 case HeapGraphEdge::kProperty:
143 edge_name = edge.name();
144 break;
145 case HeapGraphEdge::kHidden:
146 edge_prefix = "$";
147 OS::SNPrintF(index, "%d", edge.index());
148 break;
149 case HeapGraphEdge::kShortcut:
150 edge_prefix = "^";
151 edge_name = edge.name();
152 break;
153 case HeapGraphEdge::kWeak:
154 edge_prefix = "w";
155 OS::SNPrintF(index, "%d", edge.index());
156 break;
157 default:
158 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
159 }
160 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
161 }
162}
163
164
165const char* HeapEntry::TypeAsString() {
166 switch (type()) {
167 case kHidden: return "/hidden/";
168 case kObject: return "/object/";
169 case kClosure: return "/closure/";
170 case kString: return "/string/";
171 case kCode: return "/code/";
172 case kArray: return "/array/";
173 case kRegExp: return "/regexp/";
174 case kHeapNumber: return "/number/";
175 case kNative: return "/native/";
176 case kSynthetic: return "/synthetic/";
177 default: return "???";
178 }
179}
180
181
182// It is very important to keep objects that form a heap snapshot
183// as small as possible.
184namespace { // Avoid littering the global namespace.
185
186template <size_t ptr_size> struct SnapshotSizeConstants;
187
188template <> struct SnapshotSizeConstants<4> {
189 static const int kExpectedHeapGraphEdgeSize = 12;
190 static const int kExpectedHeapEntrySize = 24;
191 static const int kExpectedHeapSnapshotsCollectionSize = 100;
192 static const int kExpectedHeapSnapshotSize = 136;
193 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
194};
195
196template <> struct SnapshotSizeConstants<8> {
197 static const int kExpectedHeapGraphEdgeSize = 24;
198 static const int kExpectedHeapEntrySize = 32;
199 static const int kExpectedHeapSnapshotsCollectionSize = 152;
200 static const int kExpectedHeapSnapshotSize = 168;
201 static const uint64_t kMaxSerializableSnapshotRawSize =
202 static_cast<uint64_t>(6000) * MB;
203};
204
205} // namespace
206
207HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
208 HeapSnapshot::Type type,
209 const char* title,
210 unsigned uid)
211 : collection_(collection),
212 type_(type),
213 title_(title),
214 uid_(uid),
215 root_index_(HeapEntry::kNoEntry),
216 gc_roots_index_(HeapEntry::kNoEntry),
217 natives_root_index_(HeapEntry::kNoEntry),
218 max_snapshot_js_object_id_(0) {
219 STATIC_CHECK(
220 sizeof(HeapGraphEdge) ==
221 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
222 STATIC_CHECK(
223 sizeof(HeapEntry) ==
224 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
225 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
226 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
227 }
228}
229
230
231void HeapSnapshot::Delete() {
232 collection_->RemoveSnapshot(this);
233 delete this;
234}
235
236
237void HeapSnapshot::RememberLastJSObjectId() {
238 max_snapshot_js_object_id_ = collection_->last_assigned_id();
239}
240
241
242HeapEntry* HeapSnapshot::AddRootEntry() {
243 ASSERT(root_index_ == HeapEntry::kNoEntry);
244 ASSERT(entries_.is_empty()); // Root entry must be the first one.
245 HeapEntry* entry = AddEntry(HeapEntry::kObject,
246 "",
247 HeapObjectsMap::kInternalRootObjectId,
248 0);
249 root_index_ = entry->index();
250 ASSERT(root_index_ == 0);
251 return entry;
252}
253
254
255HeapEntry* HeapSnapshot::AddGcRootsEntry() {
256 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
257 HeapEntry* entry = AddEntry(HeapEntry::kObject,
258 "(GC roots)",
259 HeapObjectsMap::kGcRootsObjectId,
260 0);
261 gc_roots_index_ = entry->index();
262 return entry;
263}
264
265
266HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
267 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
268 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
269 HeapEntry* entry = AddEntry(
270 HeapEntry::kObject,
271 VisitorSynchronization::kTagNames[tag],
272 HeapObjectsMap::GetNthGcSubrootId(tag),
273 0);
274 gc_subroot_indexes_[tag] = entry->index();
275 return entry;
276}
277
278
279HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
280 const char* name,
281 SnapshotObjectId id,
282 int size) {
283 HeapEntry entry(this, type, name, id, size);
284 entries_.Add(entry);
285 return &entries_.last();
286}
287
288
289void HeapSnapshot::FillChildren() {
290 ASSERT(children().is_empty());
291 children().Allocate(edges().length());
292 int children_index = 0;
293 for (int i = 0; i < entries().length(); ++i) {
294 HeapEntry* entry = &entries()[i];
295 children_index = entry->set_children_index(children_index);
296 }
297 ASSERT(edges().length() == children_index);
298 for (int i = 0; i < edges().length(); ++i) {
299 HeapGraphEdge* edge = &edges()[i];
300 edge->ReplaceToIndexWithEntry(this);
301 edge->from()->add_child(edge);
302 }
303}
304
305
306class FindEntryById {
307 public:
308 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
309 int operator()(HeapEntry* const* entry) {
310 if ((*entry)->id() == id_) return 0;
311 return (*entry)->id() < id_ ? -1 : 1;
312 }
313 private:
314 SnapshotObjectId id_;
315};
316
317
318HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
319 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
320 // Perform a binary search by id.
321 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
322 if (index == -1)
323 return NULL;
324 return entries_by_id->at(index);
325}
326
327
328template<class T>
329static int SortByIds(const T* entry1_ptr,
330 const T* entry2_ptr) {
331 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
332 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
333}
334
335
336List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
337 if (sorted_entries_.is_empty()) {
338 sorted_entries_.Allocate(entries_.length());
339 for (int i = 0; i < entries_.length(); ++i) {
340 sorted_entries_[i] = &entries_[i];
341 }
342 sorted_entries_.Sort(SortByIds);
343 }
344 return &sorted_entries_;
345}
346
347
348void HeapSnapshot::Print(int max_depth) {
349 root()->Print("", "", max_depth, 0);
350}
351
352
353template<typename T, class P>
354static size_t GetMemoryUsedByList(const List<T, P>& list) {
355 return list.length() * sizeof(T) + sizeof(list);
356}
357
358
359size_t HeapSnapshot::RawSnapshotSize() const {
360 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
361 sizeof(HeapSnapshot)); // NOLINT
362 return
363 sizeof(*this) +
364 GetMemoryUsedByList(entries_) +
365 GetMemoryUsedByList(edges_) +
366 GetMemoryUsedByList(children_) +
367 GetMemoryUsedByList(sorted_entries_);
368}
369
370
371// We split IDs on evens for embedder objects (see
372// HeapObjectsMap::GenerateId) and odds for native objects.
373const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
374const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
375 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
376const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
377 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
378const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
379 HeapObjectsMap::kGcRootsFirstSubrootId +
380 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
381
382HeapObjectsMap::HeapObjectsMap(Heap* heap)
383 : next_id_(kFirstAvailableObjectId),
384 entries_map_(AddressesMatch),
385 heap_(heap) {
386 // This dummy element solves a problem with entries_map_.
387 // When we do lookup in HashMap we see no difference between two cases:
388 // it has an entry with NULL as the value or it has created
389 // a new entry on the fly with NULL as the default value.
390 // With such dummy element we have a guaranty that all entries_map_ entries
391 // will have the value field grater than 0.
392 // This fact is using in MoveObject method.
393 entries_.Add(EntryInfo(0, NULL, 0));
394}
395
396
397void HeapObjectsMap::SnapshotGenerationFinished() {
398 RemoveDeadEntries();
399}
400
401
402void HeapObjectsMap::MoveObject(Address from, Address to) {
403 ASSERT(to != NULL);
404 ASSERT(from != NULL);
405 if (from == to) return;
406 void* from_value = entries_map_.Remove(from, AddressHash(from));
407 if (from_value == NULL) return;
408 int from_entry_info_index =
409 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
410 entries_.at(from_entry_info_index).addr = to;
411 HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
412 if (to_entry->value != NULL) {
413 int to_entry_info_index =
414 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
415 // Without this operation we will have two EntryInfo's with the same
416 // value in addr field. It is bad because later at RemoveDeadEntries
417 // one of this entry will be removed with the corresponding entries_map_
418 // entry.
419 entries_.at(to_entry_info_index).addr = NULL;
420 }
421 to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
422}
423
424
425SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
426 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
427 if (entry == NULL) return 0;
428 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
429 EntryInfo& entry_info = entries_.at(entry_index);
430 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
431 return entry_info.id;
432}
433
434
435SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
436 unsigned int size) {
437 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
438 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
439 if (entry->value != NULL) {
440 int entry_index =
441 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
442 EntryInfo& entry_info = entries_.at(entry_index);
443 entry_info.accessed = true;
444 entry_info.size = size;
445 return entry_info.id;
446 }
447 entry->value = reinterpret_cast<void*>(entries_.length());
448 SnapshotObjectId id = next_id_;
449 next_id_ += kObjectIdStep;
450 entries_.Add(EntryInfo(id, addr, size));
451 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
452 return id;
453}
454
455
456void HeapObjectsMap::StopHeapObjectsTracking() {
457 time_intervals_.Clear();
458}
459
460void HeapObjectsMap::UpdateHeapObjectsMap() {
461 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
462 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
463 HeapIterator iterator(heap_);
464 for (HeapObject* obj = iterator.next();
465 obj != NULL;
466 obj = iterator.next()) {
467 FindOrAddEntry(obj->address(), obj->Size());
468 }
469 RemoveDeadEntries();
470}
471
472
473SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
474 UpdateHeapObjectsMap();
475 time_intervals_.Add(TimeInterval(next_id_));
476 int prefered_chunk_size = stream->GetChunkSize();
477 List<v8::HeapStatsUpdate> stats_buffer;
478 ASSERT(!entries_.is_empty());
479 EntryInfo* entry_info = &entries_.first();
480 EntryInfo* end_entry_info = &entries_.last() + 1;
481 for (int time_interval_index = 0;
482 time_interval_index < time_intervals_.length();
483 ++time_interval_index) {
484 TimeInterval& time_interval = time_intervals_[time_interval_index];
485 SnapshotObjectId time_interval_id = time_interval.id;
486 uint32_t entries_size = 0;
487 EntryInfo* start_entry_info = entry_info;
488 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
489 entries_size += entry_info->size;
490 ++entry_info;
491 }
492 uint32_t entries_count =
493 static_cast<uint32_t>(entry_info - start_entry_info);
494 if (time_interval.count != entries_count ||
495 time_interval.size != entries_size) {
496 stats_buffer.Add(v8::HeapStatsUpdate(
497 time_interval_index,
498 time_interval.count = entries_count,
499 time_interval.size = entries_size));
500 if (stats_buffer.length() >= prefered_chunk_size) {
501 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
502 &stats_buffer.first(), stats_buffer.length());
503 if (result == OutputStream::kAbort) return last_assigned_id();
504 stats_buffer.Clear();
505 }
506 }
507 }
508 ASSERT(entry_info == end_entry_info);
509 if (!stats_buffer.is_empty()) {
510 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
511 &stats_buffer.first(), stats_buffer.length());
512 if (result == OutputStream::kAbort) return last_assigned_id();
513 }
514 stream->EndOfStream();
515 return last_assigned_id();
516}
517
518
519void HeapObjectsMap::RemoveDeadEntries() {
520 ASSERT(entries_.length() > 0 &&
521 entries_.at(0).id == 0 &&
522 entries_.at(0).addr == NULL);
523 int first_free_entry = 1;
524 for (int i = 1; i < entries_.length(); ++i) {
525 EntryInfo& entry_info = entries_.at(i);
526 if (entry_info.accessed) {
527 if (first_free_entry != i) {
528 entries_.at(first_free_entry) = entry_info;
529 }
530 entries_.at(first_free_entry).accessed = false;
531 HashMap::Entry* entry = entries_map_.Lookup(
532 entry_info.addr, AddressHash(entry_info.addr), false);
533 ASSERT(entry);
534 entry->value = reinterpret_cast<void*>(first_free_entry);
535 ++first_free_entry;
536 } else {
537 if (entry_info.addr) {
538 entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
539 }
540 }
541 }
542 entries_.Rewind(first_free_entry);
543 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
544 entries_map_.occupancy());
545}
546
547
548SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
549 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
550 const char* label = info->GetLabel();
551 id ^= StringHasher::HashSequentialString(label,
552 static_cast<int>(strlen(label)),
553 HEAP->HashSeed());
554 intptr_t element_count = info->GetElementCount();
555 if (element_count != -1)
556 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
557 v8::internal::kZeroHashSeed);
558 return id << 1;
559}
560
561
562size_t HeapObjectsMap::GetUsedMemorySize() const {
563 return
564 sizeof(*this) +
565 sizeof(HashMap::Entry) * entries_map_.capacity() +
566 GetMemoryUsedByList(entries_) +
567 GetMemoryUsedByList(time_intervals_);
568}
569
570
571HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
572 : is_tracking_objects_(false),
573 snapshots_uids_(HeapSnapshotsMatch),
574 token_enumerator_(new TokenEnumerator()),
575 ids_(heap) {
576}
577
578
579static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
580 delete *snapshot_ptr;
581}
582
583
584HeapSnapshotsCollection::~HeapSnapshotsCollection() {
585 delete token_enumerator_;
586 snapshots_.Iterate(DeleteHeapSnapshot);
587}
588
589
590HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
591 const char* name,
592 unsigned uid) {
593 is_tracking_objects_ = true; // Start watching for heap objects moves.
594 return new HeapSnapshot(this, type, name, uid);
595}
596
597
598void HeapSnapshotsCollection::SnapshotGenerationFinished(
599 HeapSnapshot* snapshot) {
600 ids_.SnapshotGenerationFinished();
601 if (snapshot != NULL) {
602 snapshots_.Add(snapshot);
603 HashMap::Entry* entry =
604 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
605 static_cast<uint32_t>(snapshot->uid()),
606 true);
607 ASSERT(entry->value == NULL);
608 entry->value = snapshot;
609 }
610}
611
612
613HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
614 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
615 static_cast<uint32_t>(uid),
616 false);
617 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
618}
619
620
621void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
622 snapshots_.RemoveElement(snapshot);
623 unsigned uid = snapshot->uid();
624 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
625 static_cast<uint32_t>(uid));
626}
627
628
629Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
630 SnapshotObjectId id) {
631 // First perform a full GC in order to avoid dead objects.
632 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
633 "HeapSnapshotsCollection::FindHeapObjectById");
634 AssertNoAllocation no_allocation;
635 HeapObject* object = NULL;
636 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
637 // Make sure that object with the given id is still reachable.
638 for (HeapObject* obj = iterator.next();
639 obj != NULL;
640 obj = iterator.next()) {
641 if (ids_.FindEntry(obj->address()) == id) {
642 ASSERT(object == NULL);
643 object = obj;
644 // Can't break -- kFilterUnreachable requires full heap traversal.
645 }
646 }
647 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
648}
649
650
651size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
652 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
653 kExpectedHeapSnapshotsCollectionSize ==
654 sizeof(HeapSnapshotsCollection)); // NOLINT
655 size_t size = sizeof(*this);
656 size += names_.GetUsedMemorySize();
657 size += ids_.GetUsedMemorySize();
658 size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
659 size += GetMemoryUsedByList(snapshots_);
660 for (int i = 0; i < snapshots_.length(); ++i) {
661 size += snapshots_[i]->RawSnapshotSize();
662 }
663 return size;
664}
665
666
667HeapEntriesMap::HeapEntriesMap()
668 : entries_(HeapThingsMatch) {
669}
670
671
672int HeapEntriesMap::Map(HeapThing thing) {
673 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
674 if (cache_entry == NULL) return HeapEntry::kNoEntry;
675 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
676}
677
678
679void HeapEntriesMap::Pair(HeapThing thing, int entry) {
680 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
681 ASSERT(cache_entry->value == NULL);
682 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
683}
684
685
686HeapObjectsSet::HeapObjectsSet()
687 : entries_(HeapEntriesMap::HeapThingsMatch) {
688}
689
690
691void HeapObjectsSet::Clear() {
692 entries_.Clear();
693}
694
695
696bool HeapObjectsSet::Contains(Object* obj) {
697 if (!obj->IsHeapObject()) return false;
698 HeapObject* object = HeapObject::cast(obj);
699 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
700}
701
702
703void HeapObjectsSet::Insert(Object* obj) {
704 if (!obj->IsHeapObject()) return;
705 HeapObject* object = HeapObject::cast(obj);
706 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
707}
708
709
710const char* HeapObjectsSet::GetTag(Object* obj) {
711 HeapObject* object = HeapObject::cast(obj);
712 HashMap::Entry* cache_entry =
713 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
714 return cache_entry != NULL
715 ? reinterpret_cast<const char*>(cache_entry->value)
716 : NULL;
717}
718
719
720void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
721 if (!obj->IsHeapObject()) return;
722 HeapObject* object = HeapObject::cast(obj);
723 HashMap::Entry* cache_entry =
724 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
725 cache_entry->value = const_cast<char*>(tag);
726}
727
728
729HeapObject* const V8HeapExplorer::kInternalRootObject =
730 reinterpret_cast<HeapObject*>(
731 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
732HeapObject* const V8HeapExplorer::kGcRootsObject =
733 reinterpret_cast<HeapObject*>(
734 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
735HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
736 reinterpret_cast<HeapObject*>(
737 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
738HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
739 reinterpret_cast<HeapObject*>(
740 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
741
742
743V8HeapExplorer::V8HeapExplorer(
744 HeapSnapshot* snapshot,
745 SnapshottingProgressReportingInterface* progress,
746 v8::HeapProfiler::ObjectNameResolver* resolver)
747 : heap_(Isolate::Current()->heap()),
748 snapshot_(snapshot),
749 collection_(snapshot_->collection()),
750 progress_(progress),
751 filler_(NULL),
752 global_object_name_resolver_(resolver) {
753}
754
755
756V8HeapExplorer::~V8HeapExplorer() {
757}
758
759
760HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
761 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
762}
763
764
765HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
766 if (object == kInternalRootObject) {
767 snapshot_->AddRootEntry();
768 return snapshot_->root();
769 } else if (object == kGcRootsObject) {
770 HeapEntry* entry = snapshot_->AddGcRootsEntry();
771 return entry;
772 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
773 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
774 return entry;
775 } else if (object->IsJSFunction()) {
776 JSFunction* func = JSFunction::cast(object);
777 SharedFunctionInfo* shared = func->shared();
778 const char* name = shared->bound() ? "native_bind" :
779 collection_->names()->GetName(String::cast(shared->name()));
780 return AddEntry(object, HeapEntry::kClosure, name);
781 } else if (object->IsJSRegExp()) {
782 JSRegExp* re = JSRegExp::cast(object);
783 return AddEntry(object,
784 HeapEntry::kRegExp,
785 collection_->names()->GetName(re->Pattern()));
786 } else if (object->IsJSObject()) {
787 const char* name = collection_->names()->GetName(
788 GetConstructorName(JSObject::cast(object)));
789 if (object->IsJSGlobalObject()) {
790 const char* tag = objects_tags_.GetTag(object);
791 if (tag != NULL) {
792 name = collection_->names()->GetFormatted("%s / %s", name, tag);
793 }
794 }
795 return AddEntry(object, HeapEntry::kObject, name);
796 } else if (object->IsString()) {
797 return AddEntry(object,
798 HeapEntry::kString,
799 collection_->names()->GetName(String::cast(object)));
800 } else if (object->IsCode()) {
801 return AddEntry(object, HeapEntry::kCode, "");
802 } else if (object->IsSharedFunctionInfo()) {
803 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
804 return AddEntry(object,
805 HeapEntry::kCode,
806 collection_->names()->GetName(name));
807 } else if (object->IsScript()) {
808 Object* name = Script::cast(object)->name();
809 return AddEntry(object,
810 HeapEntry::kCode,
811 name->IsString()
812 ? collection_->names()->GetName(String::cast(name))
813 : "");
814 } else if (object->IsNativeContext()) {
815 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
816 } else if (object->IsContext()) {
817 return AddEntry(object, HeapEntry::kHidden, "system / Context");
818 } else if (object->IsFixedArray() ||
819 object->IsFixedDoubleArray() ||
820 object->IsByteArray() ||
821 object->IsExternalArray()) {
822 return AddEntry(object, HeapEntry::kArray, "");
823 } else if (object->IsHeapNumber()) {
824 return AddEntry(object, HeapEntry::kHeapNumber, "number");
825 }
826 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
827}
828
829
830HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
831 HeapEntry::Type type,
832 const char* name) {
833 int object_size = object->Size();
834 SnapshotObjectId object_id =
835 collection_->GetObjectId(object->address(), object_size);
836 return snapshot_->AddEntry(type, name, object_id, object_size);
837}
838
839
840class GcSubrootsEnumerator : public ObjectVisitor {
841 public:
842 GcSubrootsEnumerator(
843 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
844 : filler_(filler),
845 explorer_(explorer),
846 previous_object_count_(0),
847 object_count_(0) {
848 }
849 void VisitPointers(Object** start, Object** end) {
850 object_count_ += end - start;
851 }
852 void Synchronize(VisitorSynchronization::SyncTag tag) {
853 // Skip empty subroots.
854 if (previous_object_count_ != object_count_) {
855 previous_object_count_ = object_count_;
856 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
857 }
858 }
859 private:
860 SnapshotFillerInterface* filler_;
861 V8HeapExplorer* explorer_;
862 intptr_t previous_object_count_;
863 intptr_t object_count_;
864};
865
866
867void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
868 filler->AddEntry(kInternalRootObject, this);
869 filler->AddEntry(kGcRootsObject, this);
870 GcSubrootsEnumerator enumerator(filler, this);
871 heap_->IterateRoots(&enumerator, VISIT_ALL);
872}
873
874
875const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
876 switch (object->map()->instance_type()) {
877 case MAP_TYPE:
878 switch (Map::cast(object)->instance_type()) {
879#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
880 case instance_type: return "system / Map (" #Name ")";
881 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
882#undef MAKE_STRING_MAP_CASE
883 default: return "system / Map";
884 }
885 case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
886 case FOREIGN_TYPE: return "system / Foreign";
887 case ODDBALL_TYPE: return "system / Oddball";
888#define MAKE_STRUCT_CASE(NAME, Name, name) \
889 case NAME##_TYPE: return "system / "#Name;
890 STRUCT_LIST(MAKE_STRUCT_CASE)
891#undef MAKE_STRUCT_CASE
892 default: return "system";
893 }
894}
895
896
897int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
898 int objects_count = 0;
899 for (HeapObject* obj = iterator->next();
900 obj != NULL;
901 obj = iterator->next()) {
902 objects_count++;
903 }
904 return objects_count;
905}
906
907
908class IndexedReferencesExtractor : public ObjectVisitor {
909 public:
910 IndexedReferencesExtractor(V8HeapExplorer* generator,
911 HeapObject* parent_obj,
912 int parent)
913 : generator_(generator),
914 parent_obj_(parent_obj),
915 parent_(parent),
916 next_index_(1) {
917 }
918 void VisitPointers(Object** start, Object** end) {
919 for (Object** p = start; p < end; p++) {
920 if (CheckVisitedAndUnmark(p)) continue;
921 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
922 }
923 }
924 static void MarkVisitedField(HeapObject* obj, int offset) {
925 if (offset < 0) return;
926 Address field = obj->address() + offset;
927 ASSERT(!Memory::Object_at(field)->IsFailure());
928 ASSERT(Memory::Object_at(field)->IsHeapObject());
929 *field |= kFailureTag;
930 }
931
932 private:
933 bool CheckVisitedAndUnmark(Object** field) {
934 if ((*field)->IsFailure()) {
935 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
936 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
937 ASSERT((*field)->IsHeapObject());
938 return true;
939 }
940 return false;
941 }
942 V8HeapExplorer* generator_;
943 HeapObject* parent_obj_;
944 int parent_;
945 int next_index_;
946};
947
948
949void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
950 HeapEntry* heap_entry = GetEntry(obj);
951 if (heap_entry == NULL) return; // No interest in this object.
952 int entry = heap_entry->index();
953
954 bool extract_indexed_refs = true;
955 if (obj->IsJSGlobalProxy()) {
956 ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
957 } else if (obj->IsJSObject()) {
958 ExtractJSObjectReferences(entry, JSObject::cast(obj));
959 } else if (obj->IsString()) {
960 ExtractStringReferences(entry, String::cast(obj));
961 } else if (obj->IsContext()) {
962 ExtractContextReferences(entry, Context::cast(obj));
963 } else if (obj->IsMap()) {
964 ExtractMapReferences(entry, Map::cast(obj));
965 } else if (obj->IsSharedFunctionInfo()) {
966 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
967 } else if (obj->IsScript()) {
968 ExtractScriptReferences(entry, Script::cast(obj));
969 } else if (obj->IsCodeCache()) {
970 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
971 } else if (obj->IsCode()) {
972 ExtractCodeReferences(entry, Code::cast(obj));
973 } else if (obj->IsJSGlobalPropertyCell()) {
974 ExtractJSGlobalPropertyCellReferences(
975 entry, JSGlobalPropertyCell::cast(obj));
976 extract_indexed_refs = false;
977 }
978 if (extract_indexed_refs) {
979 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
980 IndexedReferencesExtractor refs_extractor(this, obj, entry);
981 obj->Iterate(&refs_extractor);
982 }
983}
984
985
986void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
987 // We need to reference JS global objects from snapshot's root.
988 // We use JSGlobalProxy because this is what embedder (e.g. browser)
989 // uses for the global object.
990 Object* object = proxy->map()->prototype();
991 bool is_debug_object = false;
992#ifdef ENABLE_DEBUGGER_SUPPORT
993 is_debug_object = object->IsGlobalObject() &&
994 Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
995#endif
996 if (!is_debug_object) {
997 SetUserGlobalReference(object);
998 }
999}
1000
1001
1002void V8HeapExplorer::ExtractJSObjectReferences(
1003 int entry, JSObject* js_obj) {
1004 HeapObject* obj = js_obj;
1005 ExtractClosureReferences(js_obj, entry);
1006 ExtractPropertyReferences(js_obj, entry);
1007 ExtractElementReferences(js_obj, entry);
1008 ExtractInternalReferences(js_obj, entry);
1009 SetPropertyReference(
1010 obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1011 if (obj->IsJSFunction()) {
1012 JSFunction* js_fun = JSFunction::cast(js_obj);
1013 Object* proto_or_map = js_fun->prototype_or_initial_map();
1014 if (!proto_or_map->IsTheHole()) {
1015 if (!proto_or_map->IsMap()) {
1016 SetPropertyReference(
1017 obj, entry,
1018 heap_->prototype_symbol(), proto_or_map,
1019 NULL,
1020 JSFunction::kPrototypeOrInitialMapOffset);
1021 } else {
1022 SetPropertyReference(
1023 obj, entry,
1024 heap_->prototype_symbol(), js_fun->prototype());
1025 }
1026 }
1027 SharedFunctionInfo* shared_info = js_fun->shared();
1028 // JSFunction has either bindings or literals and never both.
1029 bool bound = shared_info->bound();
1030 TagObject(js_fun->literals_or_bindings(),
1031 bound ? "(function bindings)" : "(function literals)");
1032 SetInternalReference(js_fun, entry,
1033 bound ? "bindings" : "literals",
1034 js_fun->literals_or_bindings(),
1035 JSFunction::kLiteralsOffset);
1036 TagObject(shared_info, "(shared function info)");
1037 SetInternalReference(js_fun, entry,
1038 "shared", shared_info,
1039 JSFunction::kSharedFunctionInfoOffset);
1040 TagObject(js_fun->unchecked_context(), "(context)");
1041 SetInternalReference(js_fun, entry,
1042 "context", js_fun->unchecked_context(),
1043 JSFunction::kContextOffset);
1044 for (int i = JSFunction::kNonWeakFieldsEndOffset;
1045 i < JSFunction::kSize;
1046 i += kPointerSize) {
1047 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1048 }
1049 } else if (obj->IsGlobalObject()) {
1050 GlobalObject* global_obj = GlobalObject::cast(obj);
1051 SetInternalReference(global_obj, entry,
1052 "builtins", global_obj->builtins(),
1053 GlobalObject::kBuiltinsOffset);
1054 SetInternalReference(global_obj, entry,
1055 "native_context", global_obj->native_context(),
1056 GlobalObject::kNativeContextOffset);
1057 SetInternalReference(global_obj, entry,
1058 "global_receiver", global_obj->global_receiver(),
1059 GlobalObject::kGlobalReceiverOffset);
1060 }
1061 TagObject(js_obj->properties(), "(object properties)");
1062 SetInternalReference(obj, entry,
1063 "properties", js_obj->properties(),
1064 JSObject::kPropertiesOffset);
1065 TagObject(js_obj->elements(), "(object elements)");
1066 SetInternalReference(obj, entry,
1067 "elements", js_obj->elements(),
1068 JSObject::kElementsOffset);
1069}
1070
1071
1072void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1073 if (string->IsConsString()) {
1074 ConsString* cs = ConsString::cast(string);
1075 SetInternalReference(cs, entry, "first", cs->first(),
1076 ConsString::kFirstOffset);
1077 SetInternalReference(cs, entry, "second", cs->second(),
1078 ConsString::kSecondOffset);
1079 } else if (string->IsSlicedString()) {
1080 SlicedString* ss = SlicedString::cast(string);
1081 SetInternalReference(ss, entry, "parent", ss->parent(),
1082 SlicedString::kParentOffset);
1083 }
1084}
1085
1086
1087void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1088#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1089 SetInternalReference(context, entry, #name, context->get(Context::index), \
1090 FixedArray::OffsetOfElementAt(Context::index));
1091 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1092 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1093 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1094 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1095 if (context->IsNativeContext()) {
1096 TagObject(context->jsfunction_result_caches(),
1097 "(context func. result caches)");
1098 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1099 TagObject(context->runtime_context(), "(runtime context)");
1100 TagObject(context->embedder_data(), "(context data)");
1101 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1102#undef EXTRACT_CONTEXT_FIELD
1103 for (int i = Context::FIRST_WEAK_SLOT;
1104 i < Context::NATIVE_CONTEXT_SLOTS;
1105 ++i) {
1106 SetWeakReference(context, entry, i, context->get(i),
1107 FixedArray::OffsetOfElementAt(i));
1108 }
1109 }
1110}
1111
1112
1113void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1114 SetInternalReference(map, entry,
1115 "prototype", map->prototype(), Map::kPrototypeOffset);
1116 SetInternalReference(map, entry,
1117 "constructor", map->constructor(),
1118 Map::kConstructorOffset);
1119 if (map->HasTransitionArray()) {
1120 TransitionArray* transitions = map->transitions();
1121
1122 Object* back_pointer = transitions->back_pointer_storage();
1123 TagObject(transitions->back_pointer_storage(), "(back pointer)");
1124 SetInternalReference(transitions, entry,
1125 "backpointer", back_pointer,
1126 TransitionArray::kBackPointerStorageOffset);
1127 IndexedReferencesExtractor transitions_refs(this, transitions, entry);
1128 transitions->Iterate(&transitions_refs);
1129
1130 TagObject(transitions, "(transition array)");
1131 SetInternalReference(map, entry,
1132 "transitions", transitions,
1133 Map::kTransitionsOrBackPointerOffset);
1134 } else {
1135 Object* back_pointer = map->GetBackPointer();
1136 TagObject(back_pointer, "(back pointer)");
1137 SetInternalReference(map, entry,
1138 "backpointer", back_pointer,
1139 Map::kTransitionsOrBackPointerOffset);
1140 }
1141 DescriptorArray* descriptors = map->instance_descriptors();
1142 TagObject(descriptors, "(map descriptors)");
1143 SetInternalReference(map, entry,
1144 "descriptors", descriptors,
1145 Map::kDescriptorsOffset);
1146
1147 SetInternalReference(map, entry,
1148 "code_cache", map->code_cache(),
1149 Map::kCodeCacheOffset);
1150}
1151
1152
1153void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1154 int entry, SharedFunctionInfo* shared) {
1155 HeapObject* obj = shared;
1156 SetInternalReference(obj, entry,
1157 "name", shared->name(),
1158 SharedFunctionInfo::kNameOffset);
1159 TagObject(shared->code(), "(code)");
1160 SetInternalReference(obj, entry,
1161 "code", shared->code(),
1162 SharedFunctionInfo::kCodeOffset);
1163 TagObject(shared->scope_info(), "(function scope info)");
1164 SetInternalReference(obj, entry,
1165 "scope_info", shared->scope_info(),
1166 SharedFunctionInfo::kScopeInfoOffset);
1167 SetInternalReference(obj, entry,
1168 "instance_class_name", shared->instance_class_name(),
1169 SharedFunctionInfo::kInstanceClassNameOffset);
1170 SetInternalReference(obj, entry,
1171 "script", shared->script(),
1172 SharedFunctionInfo::kScriptOffset);
1173 TagObject(shared->construct_stub(), "(code)");
1174 SetInternalReference(obj, entry,
1175 "construct_stub", shared->construct_stub(),
1176 SharedFunctionInfo::kConstructStubOffset);
1177 SetInternalReference(obj, entry,
1178 "function_data", shared->function_data(),
1179 SharedFunctionInfo::kFunctionDataOffset);
1180 SetInternalReference(obj, entry,
1181 "debug_info", shared->debug_info(),
1182 SharedFunctionInfo::kDebugInfoOffset);
1183 SetInternalReference(obj, entry,
1184 "inferred_name", shared->inferred_name(),
1185 SharedFunctionInfo::kInferredNameOffset);
1186 SetInternalReference(obj, entry,
1187 "this_property_assignments",
1188 shared->this_property_assignments(),
1189 SharedFunctionInfo::kThisPropertyAssignmentsOffset);
1190 SetWeakReference(obj, entry,
1191 1, shared->initial_map(),
1192 SharedFunctionInfo::kInitialMapOffset);
1193}
1194
1195
1196void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1197 HeapObject* obj = script;
1198 SetInternalReference(obj, entry,
1199 "source", script->source(),
1200 Script::kSourceOffset);
1201 SetInternalReference(obj, entry,
1202 "name", script->name(),
1203 Script::kNameOffset);
1204 SetInternalReference(obj, entry,
1205 "data", script->data(),
1206 Script::kDataOffset);
1207 SetInternalReference(obj, entry,
1208 "context_data", script->context_data(),
1209 Script::kContextOffset);
1210 TagObject(script->line_ends(), "(script line ends)");
1211 SetInternalReference(obj, entry,
1212 "line_ends", script->line_ends(),
1213 Script::kLineEndsOffset);
1214}
1215
1216
1217void V8HeapExplorer::ExtractCodeCacheReferences(
1218 int entry, CodeCache* code_cache) {
1219 TagObject(code_cache->default_cache(), "(default code cache)");
1220 SetInternalReference(code_cache, entry,
1221 "default_cache", code_cache->default_cache(),
1222 CodeCache::kDefaultCacheOffset);
1223 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1224 SetInternalReference(code_cache, entry,
1225 "type_cache", code_cache->normal_type_cache(),
1226 CodeCache::kNormalTypeCacheOffset);
1227}
1228
1229
1230void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1231 TagObject(code->relocation_info(), "(code relocation info)");
1232 SetInternalReference(code, entry,
1233 "relocation_info", code->relocation_info(),
1234 Code::kRelocationInfoOffset);
1235 SetInternalReference(code, entry,
1236 "handler_table", code->handler_table(),
1237 Code::kHandlerTableOffset);
1238 TagObject(code->deoptimization_data(), "(code deopt data)");
1239 SetInternalReference(code, entry,
1240 "deoptimization_data", code->deoptimization_data(),
1241 Code::kDeoptimizationDataOffset);
1242 if (code->kind() == Code::FUNCTION) {
1243 SetInternalReference(code, entry,
1244 "type_feedback_info", code->type_feedback_info(),
1245 Code::kTypeFeedbackInfoOffset);
1246 }
1247 SetInternalReference(code, entry,
1248 "gc_metadata", code->gc_metadata(),
1249 Code::kGCMetadataOffset);
1250}
1251
1252
1253void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
1254 int entry, JSGlobalPropertyCell* cell) {
1255 SetInternalReference(cell, entry, "value", cell->value());
1256}
1257
1258
1259void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1260 if (!js_obj->IsJSFunction()) return;
1261
1262 JSFunction* func = JSFunction::cast(js_obj);
1263 if (func->shared()->bound()) {
1264 FixedArray* bindings = func->function_bindings();
1265 SetNativeBindReference(js_obj, entry, "bound_this",
1266 bindings->get(JSFunction::kBoundThisIndex));
1267 SetNativeBindReference(js_obj, entry, "bound_function",
1268 bindings->get(JSFunction::kBoundFunctionIndex));
1269 for (int i = JSFunction::kBoundArgumentsStartIndex;
1270 i < bindings->length(); i++) {
1271 const char* reference_name = collection_->names()->GetFormatted(
1272 "bound_argument_%d",
1273 i - JSFunction::kBoundArgumentsStartIndex);
1274 SetNativeBindReference(js_obj, entry, reference_name,
1275 bindings->get(i));
1276 }
1277 } else {
1278 Context* context = func->context()->declaration_context();
1279 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1280 // Add context allocated locals.
1281 int context_locals = scope_info->ContextLocalCount();
1282 for (int i = 0; i < context_locals; ++i) {
1283 String* local_name = scope_info->ContextLocalName(i);
1284 int idx = Context::MIN_CONTEXT_SLOTS + i;
1285 SetClosureReference(js_obj, entry, local_name, context->get(idx));
1286 }
1287
1288 // Add function variable.
1289 if (scope_info->HasFunctionName()) {
1290 String* name = scope_info->FunctionName();
1291 VariableMode mode;
1292 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1293 if (idx >= 0) {
1294 SetClosureReference(js_obj, entry, name, context->get(idx));
1295 }
1296 }
1297 }
1298}
1299
1300
1301void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1302 if (js_obj->HasFastProperties()) {
1303 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1304 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1305 for (int i = 0; i < descs->number_of_descriptors(); i++) {
1306 if (descs->GetDetails(i).descriptor_index() > real_size) continue;
1307 switch (descs->GetType(i)) {
1308 case FIELD: {
1309 int index = descs->GetFieldIndex(i);
1310
1311 String* k = descs->GetKey(i);
1312 if (index < js_obj->map()->inobject_properties()) {
1313 Object* value = js_obj->InObjectPropertyAt(index);
1314 if (k != heap_->hidden_symbol()) {
1315 SetPropertyReference(
1316 js_obj, entry,
1317 k, value,
1318 NULL,
1319 js_obj->GetInObjectPropertyOffset(index));
1320 } else {
1321 TagObject(value, "(hidden properties)");
1322 SetInternalReference(
1323 js_obj, entry,
1324 "hidden_properties", value,
1325 js_obj->GetInObjectPropertyOffset(index));
1326 }
1327 } else {
1328 Object* value = js_obj->FastPropertyAt(index);
1329 if (k != heap_->hidden_symbol()) {
1330 SetPropertyReference(js_obj, entry, k, value);
1331 } else {
1332 TagObject(value, "(hidden properties)");
1333 SetInternalReference(js_obj, entry, "hidden_properties", value);
1334 }
1335 }
1336 break;
1337 }
1338 case CONSTANT_FUNCTION:
1339 SetPropertyReference(
1340 js_obj, entry,
1341 descs->GetKey(i), descs->GetConstantFunction(i));
1342 break;
1343 case CALLBACKS: {
1344 Object* callback_obj = descs->GetValue(i);
1345 if (callback_obj->IsAccessorPair()) {
1346 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1347 if (Object* getter = accessors->getter()) {
1348 SetPropertyReference(js_obj, entry, descs->GetKey(i),
1349 getter, "get-%s");
1350 }
1351 if (Object* setter = accessors->setter()) {
1352 SetPropertyReference(js_obj, entry, descs->GetKey(i),
1353 setter, "set-%s");
1354 }
1355 }
1356 break;
1357 }
1358 case NORMAL: // only in slow mode
1359 case HANDLER: // only in lookup results, not in descriptors
1360 case INTERCEPTOR: // only in lookup results, not in descriptors
1361 break;
1362 case TRANSITION:
1363 case NONEXISTENT:
1364 UNREACHABLE();
1365 break;
1366 }
1367 }
1368 } else {
1369 StringDictionary* dictionary = js_obj->property_dictionary();
1370 int length = dictionary->Capacity();
1371 for (int i = 0; i < length; ++i) {
1372 Object* k = dictionary->KeyAt(i);
1373 if (dictionary->IsKey(k)) {
1374 Object* target = dictionary->ValueAt(i);
1375 // We assume that global objects can only have slow properties.
1376 Object* value = target->IsJSGlobalPropertyCell()
1377 ? JSGlobalPropertyCell::cast(target)->value()
1378 : target;
1379 if (k != heap_->hidden_symbol()) {
1380 SetPropertyReference(js_obj, entry, String::cast(k), value);
1381 } else {
1382 TagObject(value, "(hidden properties)");
1383 SetInternalReference(js_obj, entry, "hidden_properties", value);
1384 }
1385 }
1386 }
1387 }
1388}
1389
1390
1391void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1392 if (js_obj->HasFastObjectElements()) {
1393 FixedArray* elements = FixedArray::cast(js_obj->elements());
1394 int length = js_obj->IsJSArray() ?
1395 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1396 elements->length();
1397 for (int i = 0; i < length; ++i) {
1398 if (!elements->get(i)->IsTheHole()) {
1399 SetElementReference(js_obj, entry, i, elements->get(i));
1400 }
1401 }
1402 } else if (js_obj->HasDictionaryElements()) {
1403 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1404 int length = dictionary->Capacity();
1405 for (int i = 0; i < length; ++i) {
1406 Object* k = dictionary->KeyAt(i);
1407 if (dictionary->IsKey(k)) {
1408 ASSERT(k->IsNumber());
1409 uint32_t index = static_cast<uint32_t>(k->Number());
1410 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1411 }
1412 }
1413 }
1414}
1415
1416
1417void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1418 int length = js_obj->GetInternalFieldCount();
1419 for (int i = 0; i < length; ++i) {
1420 Object* o = js_obj->GetInternalField(i);
1421 SetInternalReference(
1422 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1423 }
1424}
1425
1426
1427String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1428 Heap* heap = object->GetHeap();
1429 if (object->IsJSFunction()) return heap->closure_symbol();
1430 String* constructor_name = object->constructor_name();
1431 if (constructor_name == heap->Object_symbol()) {
1432 // Look up an immediate "constructor" property, if it is a function,
1433 // return its name. This is for instances of binding objects, which
1434 // have prototype constructor type "Object".
1435 Object* constructor_prop = NULL;
1436 LookupResult result(heap->isolate());
1437 object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
1438 if (!result.IsFound()) return object->constructor_name();
1439
1440 constructor_prop = result.GetLazyValue();
1441 if (constructor_prop->IsJSFunction()) {
1442 Object* maybe_name =
1443 JSFunction::cast(constructor_prop)->shared()->name();
1444 if (maybe_name->IsString()) {
1445 String* name = String::cast(maybe_name);
1446 if (name->length() > 0) return name;
1447 }
1448 }
1449 }
1450 return object->constructor_name();
1451}
1452
1453
1454HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1455 if (!obj->IsHeapObject()) return NULL;
1456 return filler_->FindOrAddEntry(obj, this);
1457}
1458
1459
1460class RootsReferencesExtractor : public ObjectVisitor {
1461 private:
1462 struct IndexTag {
1463 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1464 : index(index), tag(tag) { }
1465 int index;
1466 VisitorSynchronization::SyncTag tag;
1467 };
1468
1469 public:
1470 RootsReferencesExtractor()
1471 : collecting_all_references_(false),
1472 previous_reference_count_(0) {
1473 }
1474
1475 void VisitPointers(Object** start, Object** end) {
1476 if (collecting_all_references_) {
1477 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1478 } else {
1479 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1480 }
1481 }
1482
1483 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1484
1485 void FillReferences(V8HeapExplorer* explorer) {
1486 ASSERT(strong_references_.length() <= all_references_.length());
1487 for (int i = 0; i < reference_tags_.length(); ++i) {
1488 explorer->SetGcRootsReference(reference_tags_[i].tag);
1489 }
1490 int strong_index = 0, all_index = 0, tags_index = 0;
1491 while (all_index < all_references_.length()) {
1492 if (strong_index < strong_references_.length() &&
1493 strong_references_[strong_index] == all_references_[all_index]) {
1494 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1495 false,
1496 all_references_[all_index++]);
1497 ++strong_index;
1498 } else {
1499 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1500 true,
1501 all_references_[all_index++]);
1502 }
1503 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1504 }
1505 }
1506
1507 void Synchronize(VisitorSynchronization::SyncTag tag) {
1508 if (collecting_all_references_ &&
1509 previous_reference_count_ != all_references_.length()) {
1510 previous_reference_count_ = all_references_.length();
1511 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1512 }
1513 }
1514
1515 private:
1516 bool collecting_all_references_;
1517 List<Object*> strong_references_;
1518 List<Object*> all_references_;
1519 int previous_reference_count_;
1520 List<IndexTag> reference_tags_;
1521};
1522
1523
1524bool V8HeapExplorer::IterateAndExtractReferences(
1525 SnapshotFillerInterface* filler) {
1526 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1527
1528 filler_ = filler;
1529 bool interrupted = false;
1530
1531 // Heap iteration with filtering must be finished in any case.
1532 for (HeapObject* obj = iterator.next();
1533 obj != NULL;
1534 obj = iterator.next(), progress_->ProgressStep()) {
1535 if (!interrupted) {
1536 ExtractReferences(obj);
1537 if (!progress_->ProgressReport(false)) interrupted = true;
1538 }
1539 }
1540 if (interrupted) {
1541 filler_ = NULL;
1542 return false;
1543 }
1544
1545 SetRootGcRootsReference();
1546 RootsReferencesExtractor extractor;
1547 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1548 extractor.SetCollectingAllReferences();
1549 heap_->IterateRoots(&extractor, VISIT_ALL);
1550 extractor.FillReferences(this);
1551 filler_ = NULL;
1552 return progress_->ProgressReport(true);
1553}
1554
1555
1556bool V8HeapExplorer::IsEssentialObject(Object* object) {
1557 return object->IsHeapObject()
1558 && !object->IsOddball()
1559 && object != heap_->empty_byte_array()
1560 && object != heap_->empty_fixed_array()
1561 && object != heap_->empty_descriptor_array()
1562 && object != heap_->fixed_array_map()
1563 && object != heap_->global_property_cell_map()
1564 && object != heap_->shared_function_info_map()
1565 && object != heap_->free_space_map()
1566 && object != heap_->one_pointer_filler_map()
1567 && object != heap_->two_pointer_filler_map();
1568}
1569
1570
1571void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
1572 int parent_entry,
1573 String* reference_name,
1574 Object* child_obj) {
1575 HeapEntry* child_entry = GetEntry(child_obj);
1576 if (child_entry != NULL) {
1577 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1578 parent_entry,
1579 collection_->names()->GetName(reference_name),
1580 child_entry);
1581 }
1582}
1583
1584
1585void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1586 int parent_entry,
1587 const char* reference_name,
1588 Object* child_obj) {
1589 HeapEntry* child_entry = GetEntry(child_obj);
1590 if (child_entry != NULL) {
1591 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1592 parent_entry,
1593 reference_name,
1594 child_entry);
1595 }
1596}
1597
1598
1599void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1600 int parent_entry,
1601 int index,
1602 Object* child_obj) {
1603 HeapEntry* child_entry = GetEntry(child_obj);
1604 if (child_entry != NULL) {
1605 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1606 parent_entry,
1607 index,
1608 child_entry);
1609 }
1610}
1611
1612
1613void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1614 int parent_entry,
1615 const char* reference_name,
1616 Object* child_obj,
1617 int field_offset) {
1618 HeapEntry* child_entry = GetEntry(child_obj);
1619 if (child_entry == NULL) return;
1620 if (IsEssentialObject(child_obj)) {
1621 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1622 parent_entry,
1623 reference_name,
1624 child_entry);
1625 }
1626 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1627}
1628
1629
1630void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1631 int parent_entry,
1632 int index,
1633 Object* child_obj,
1634 int field_offset) {
1635 HeapEntry* child_entry = GetEntry(child_obj);
1636 if (child_entry == NULL) return;
1637 if (IsEssentialObject(child_obj)) {
1638 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1639 parent_entry,
1640 collection_->names()->GetName(index),
1641 child_entry);
1642 }
1643 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1644}
1645
1646
1647void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1648 int parent_entry,
1649 int index,
1650 Object* child_obj) {
1651 HeapEntry* child_entry = GetEntry(child_obj);
1652 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1653 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1654 parent_entry,
1655 index,
1656 child_entry);
1657 }
1658}
1659
1660
1661void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1662 int parent_entry,
1663 int index,
1664 Object* child_obj,
1665 int field_offset) {
1666 HeapEntry* child_entry = GetEntry(child_obj);
1667 if (child_entry != NULL) {
1668 filler_->SetIndexedReference(HeapGraphEdge::kWeak,
1669 parent_entry,
1670 index,
1671 child_entry);
1672 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1673 }
1674}
1675
1676
1677void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1678 int parent_entry,
1679 String* reference_name,
1680 Object* child_obj,
1681 const char* name_format_string,
1682 int field_offset) {
1683 HeapEntry* child_entry = GetEntry(child_obj);
1684 if (child_entry != NULL) {
1685 HeapGraphEdge::Type type = reference_name->length() > 0 ?
1686 HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
1687 const char* name = name_format_string != NULL ?
1688 collection_->names()->GetFormatted(
1689 name_format_string,
1690 *reference_name->ToCString(DISALLOW_NULLS,
1691 ROBUST_STRING_TRAVERSAL)) :
1692 collection_->names()->GetName(reference_name);
1693
1694 filler_->SetNamedReference(type,
1695 parent_entry,
1696 name,
1697 child_entry);
1698 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1699 }
1700}
1701
1702
1703void V8HeapExplorer::SetRootGcRootsReference() {
1704 filler_->SetIndexedAutoIndexReference(
1705 HeapGraphEdge::kElement,
1706 snapshot_->root()->index(),
1707 snapshot_->gc_roots());
1708}
1709
1710
1711void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1712 HeapEntry* child_entry = GetEntry(child_obj);
1713 ASSERT(child_entry != NULL);
1714 filler_->SetNamedAutoIndexReference(
1715 HeapGraphEdge::kShortcut,
1716 snapshot_->root()->index(),
1717 child_entry);
1718}
1719
1720
1721void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
1722 filler_->SetIndexedAutoIndexReference(
1723 HeapGraphEdge::kElement,
1724 snapshot_->gc_roots()->index(),
1725 snapshot_->gc_subroot(tag));
1726}
1727
1728
1729void V8HeapExplorer::SetGcSubrootReference(
1730 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
1731 HeapEntry* child_entry = GetEntry(child_obj);
1732 if (child_entry != NULL) {
1733 const char* name = GetStrongGcSubrootName(child_obj);
1734 if (name != NULL) {
1735 filler_->SetNamedReference(
1736 HeapGraphEdge::kInternal,
1737 snapshot_->gc_subroot(tag)->index(),
1738 name,
1739 child_entry);
1740 } else {
1741 filler_->SetIndexedAutoIndexReference(
1742 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
1743 snapshot_->gc_subroot(tag)->index(),
1744 child_entry);
1745 }
1746 }
1747}
1748
1749
1750const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1751 if (strong_gc_subroot_names_.is_empty()) {
1752#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
1753#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
1754 STRONG_ROOT_LIST(ROOT_NAME)
1755#undef ROOT_NAME
1756#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
1757 STRUCT_LIST(STRUCT_MAP_NAME)
1758#undef STRUCT_MAP_NAME
1759#define SYMBOL_NAME(name, str) NAME_ENTRY(name)
1760 SYMBOL_LIST(SYMBOL_NAME)
1761#undef SYMBOL_NAME
1762#undef NAME_ENTRY
1763 CHECK(!strong_gc_subroot_names_.is_empty());
1764 }
1765 return strong_gc_subroot_names_.GetTag(object);
1766}
1767
1768
1769void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
1770 if (IsEssentialObject(obj)) {
1771 HeapEntry* entry = GetEntry(obj);
1772 if (entry->name()[0] == '\0') {
1773 entry->set_name(tag);
1774 }
1775 }
1776}
1777
1778
1779class GlobalObjectsEnumerator : public ObjectVisitor {
1780 public:
1781 virtual void VisitPointers(Object** start, Object** end) {
1782 for (Object** p = start; p < end; p++) {
1783 if ((*p)->IsNativeContext()) {
1784 Context* context = Context::cast(*p);
1785 JSObject* proxy = context->global_proxy();
1786 if (proxy->IsJSGlobalProxy()) {
1787 Object* global = proxy->map()->prototype();
1788 if (global->IsJSGlobalObject()) {
1789 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
1790 }
1791 }
1792 }
1793 }
1794 }
1795 int count() { return objects_.length(); }
1796 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
1797
1798 private:
1799 List<Handle<JSGlobalObject> > objects_;
1800};
1801
1802
1803// Modifies heap. Must not be run during heap traversal.
1804void V8HeapExplorer::TagGlobalObjects() {
1805 Isolate* isolate = Isolate::Current();
1806 HandleScope scope(isolate);
1807 GlobalObjectsEnumerator enumerator;
1808 isolate->global_handles()->IterateAllRoots(&enumerator);
1809 const char** urls = NewArray<const char*>(enumerator.count());
1810 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1811 if (global_object_name_resolver_) {
1812 HandleScope scope(isolate);
1813 Handle<JSGlobalObject> global_obj = enumerator.at(i);
1814 urls[i] = global_object_name_resolver_->GetName(
1815 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
1816 } else {
1817 urls[i] = NULL;
1818 }
1819 }
1820
1821 AssertNoAllocation no_allocation;
1822 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1823 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
1824 }
1825
1826 DeleteArray(urls);
1827}
1828
1829
1830class GlobalHandlesExtractor : public ObjectVisitor {
1831 public:
1832 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
1833 : explorer_(explorer) {}
1834 virtual ~GlobalHandlesExtractor() {}
1835 virtual void VisitPointers(Object** start, Object** end) {
1836 UNREACHABLE();
1837 }
1838 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
1839 explorer_->VisitSubtreeWrapper(p, class_id);
1840 }
1841 private:
1842 NativeObjectsExplorer* explorer_;
1843};
1844
1845
1846class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
1847 public:
1848 BasicHeapEntriesAllocator(
1849 HeapSnapshot* snapshot,
1850 HeapEntry::Type entries_type)
1851 : snapshot_(snapshot),
1852 collection_(snapshot_->collection()),
1853 entries_type_(entries_type) {
1854 }
1855 virtual HeapEntry* AllocateEntry(HeapThing ptr);
1856 private:
1857 HeapSnapshot* snapshot_;
1858 HeapSnapshotsCollection* collection_;
1859 HeapEntry::Type entries_type_;
1860};
1861
1862
1863HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
1864 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
1865 intptr_t elements = info->GetElementCount();
1866 intptr_t size = info->GetSizeInBytes();
1867 const char* name = elements != -1
1868 ? collection_->names()->GetFormatted(
1869 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
1870 : collection_->names()->GetCopy(info->GetLabel());
1871 return snapshot_->AddEntry(
1872 entries_type_,
1873 name,
1874 HeapObjectsMap::GenerateId(info),
1875 size != -1 ? static_cast<int>(size) : 0);
1876}
1877
1878
1879NativeObjectsExplorer::NativeObjectsExplorer(
1880 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
1881 : snapshot_(snapshot),
1882 collection_(snapshot_->collection()),
1883 progress_(progress),
1884 embedder_queried_(false),
1885 objects_by_info_(RetainedInfosMatch),
1886 native_groups_(StringsMatch),
1887 filler_(NULL) {
1888 synthetic_entries_allocator_ =
1889 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
1890 native_entries_allocator_ =
1891 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
1892}
1893
1894
1895NativeObjectsExplorer::~NativeObjectsExplorer() {
1896 for (HashMap::Entry* p = objects_by_info_.Start();
1897 p != NULL;
1898 p = objects_by_info_.Next(p)) {
1899 v8::RetainedObjectInfo* info =
1900 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
1901 info->Dispose();
1902 List<HeapObject*>* objects =
1903 reinterpret_cast<List<HeapObject*>* >(p->value);
1904 delete objects;
1905 }
1906 for (HashMap::Entry* p = native_groups_.Start();
1907 p != NULL;
1908 p = native_groups_.Next(p)) {
1909 v8::RetainedObjectInfo* info =
1910 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
1911 info->Dispose();
1912 }
1913 delete synthetic_entries_allocator_;
1914 delete native_entries_allocator_;
1915}
1916
1917
1918int NativeObjectsExplorer::EstimateObjectsCount() {
1919 FillRetainedObjects();
1920 return objects_by_info_.occupancy();
1921}
1922
1923
1924void NativeObjectsExplorer::FillRetainedObjects() {
1925 if (embedder_queried_) return;
1926 Isolate* isolate = Isolate::Current();
1927 const GCType major_gc_type = kGCTypeMarkSweepCompact;
1928 // Record objects that are joined into ObjectGroups.
1929 isolate->heap()->CallGCPrologueCallbacks(major_gc_type);
1930 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
1931 for (int i = 0; i < groups->length(); ++i) {
1932 ObjectGroup* group = groups->at(i);
1933 if (group->info_ == NULL) continue;
1934 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
1935 for (size_t j = 0; j < group->length_; ++j) {
1936 HeapObject* obj = HeapObject::cast(*group->objects_[j]);
1937 list->Add(obj);
1938 in_groups_.Insert(obj);
1939 }
1940 group->info_ = NULL; // Acquire info object ownership.
1941 }
1942 isolate->global_handles()->RemoveObjectGroups();
1943 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type);
1944 // Record objects that are not in ObjectGroups, but have class ID.
1945 GlobalHandlesExtractor extractor(this);
1946 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
1947 embedder_queried_ = true;
1948}
1949
1950void NativeObjectsExplorer::FillImplicitReferences() {
1951 Isolate* isolate = Isolate::Current();
1952 List<ImplicitRefGroup*>* groups =
1953 isolate->global_handles()->implicit_ref_groups();
1954 for (int i = 0; i < groups->length(); ++i) {
1955 ImplicitRefGroup* group = groups->at(i);
1956 HeapObject* parent = *group->parent_;
1957 int parent_entry =
1958 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
1959 ASSERT(parent_entry != HeapEntry::kNoEntry);
1960 Object*** children = group->children_;
1961 for (size_t j = 0; j < group->length_; ++j) {
1962 Object* child = *children[j];
1963 HeapEntry* child_entry =
1964 filler_->FindOrAddEntry(child, native_entries_allocator_);
1965 filler_->SetNamedReference(
1966 HeapGraphEdge::kInternal,
1967 parent_entry,
1968 "native",
1969 child_entry);
1970 }
1971 }
1972 isolate->global_handles()->RemoveImplicitRefGroups();
1973}
1974
1975List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
1976 v8::RetainedObjectInfo* info) {
1977 HashMap::Entry* entry =
1978 objects_by_info_.Lookup(info, InfoHash(info), true);
1979 if (entry->value != NULL) {
1980 info->Dispose();
1981 } else {
1982 entry->value = new List<HeapObject*>(4);
1983 }
1984 return reinterpret_cast<List<HeapObject*>* >(entry->value);
1985}
1986
1987
1988bool NativeObjectsExplorer::IterateAndExtractReferences(
1989 SnapshotFillerInterface* filler) {
1990 filler_ = filler;
1991 FillRetainedObjects();
1992 FillImplicitReferences();
1993 if (EstimateObjectsCount() > 0) {
1994 for (HashMap::Entry* p = objects_by_info_.Start();
1995 p != NULL;
1996 p = objects_by_info_.Next(p)) {
1997 v8::RetainedObjectInfo* info =
1998 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
1999 SetNativeRootReference(info);
2000 List<HeapObject*>* objects =
2001 reinterpret_cast<List<HeapObject*>* >(p->value);
2002 for (int i = 0; i < objects->length(); ++i) {
2003 SetWrapperNativeReferences(objects->at(i), info);
2004 }
2005 }
2006 SetRootNativeRootsReference();
2007 }
2008 filler_ = NULL;
2009 return true;
2010}
2011
2012
2013class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2014 public:
2015 explicit NativeGroupRetainedObjectInfo(const char* label)
2016 : disposed_(false),
2017 hash_(reinterpret_cast<intptr_t>(label)),
2018 label_(label) {
2019 }
2020
2021 virtual ~NativeGroupRetainedObjectInfo() {}
2022 virtual void Dispose() {
2023 CHECK(!disposed_);
2024 disposed_ = true;
2025 delete this;
2026 }
2027 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2028 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2029 }
2030 virtual intptr_t GetHash() { return hash_; }
2031 virtual const char* GetLabel() { return label_; }
2032
2033 private:
2034 bool disposed_;
2035 intptr_t hash_;
2036 const char* label_;
2037};
2038
2039
2040NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2041 const char* label) {
2042 const char* label_copy = collection_->names()->GetCopy(label);
2043 uint32_t hash = StringHasher::HashSequentialString(
2044 label_copy,
2045 static_cast<int>(strlen(label_copy)),
2046 HEAP->HashSeed());
2047 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2048 hash, true);
2049 if (entry->value == NULL) {
2050 entry->value = new NativeGroupRetainedObjectInfo(label);
2051 }
2052 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2053}
2054
2055
2056void NativeObjectsExplorer::SetNativeRootReference(
2057 v8::RetainedObjectInfo* info) {
2058 HeapEntry* child_entry =
2059 filler_->FindOrAddEntry(info, native_entries_allocator_);
2060 ASSERT(child_entry != NULL);
2061 NativeGroupRetainedObjectInfo* group_info =
2062 FindOrAddGroupInfo(info->GetGroupLabel());
2063 HeapEntry* group_entry =
2064 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2065 filler_->SetNamedAutoIndexReference(
2066 HeapGraphEdge::kInternal,
2067 group_entry->index(),
2068 child_entry);
2069}
2070
2071
2072void NativeObjectsExplorer::SetWrapperNativeReferences(
2073 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2074 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2075 ASSERT(wrapper_entry != NULL);
2076 HeapEntry* info_entry =
2077 filler_->FindOrAddEntry(info, native_entries_allocator_);
2078 ASSERT(info_entry != NULL);
2079 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2080 wrapper_entry->index(),
2081 "native",
2082 info_entry);
2083 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2084 info_entry->index(),
2085 wrapper_entry);
2086}
2087
2088
2089void NativeObjectsExplorer::SetRootNativeRootsReference() {
2090 for (HashMap::Entry* entry = native_groups_.Start();
2091 entry;
2092 entry = native_groups_.Next(entry)) {
2093 NativeGroupRetainedObjectInfo* group_info =
2094 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2095 HeapEntry* group_entry =
2096 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2097 ASSERT(group_entry != NULL);
2098 filler_->SetIndexedAutoIndexReference(
2099 HeapGraphEdge::kElement,
2100 snapshot_->root()->index(),
2101 group_entry);
2102 }
2103}
2104
2105
2106void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2107 if (in_groups_.Contains(*p)) return;
2108 Isolate* isolate = Isolate::Current();
2109 v8::RetainedObjectInfo* info =
2110 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2111 if (info == NULL) return;
2112 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2113}
2114
2115
2116class SnapshotFiller : public SnapshotFillerInterface {
2117 public:
2118 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2119 : snapshot_(snapshot),
2120 collection_(snapshot->collection()),
2121 entries_(entries) { }
2122 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2123 HeapEntry* entry = allocator->AllocateEntry(ptr);
2124 entries_->Pair(ptr, entry->index());
2125 return entry;
2126 }
2127 HeapEntry* FindEntry(HeapThing ptr) {
2128 int index = entries_->Map(ptr);
2129 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
2130 }
2131 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2132 HeapEntry* entry = FindEntry(ptr);
2133 return entry != NULL ? entry : AddEntry(ptr, allocator);
2134 }
2135 void SetIndexedReference(HeapGraphEdge::Type type,
2136 int parent,
2137 int index,
2138 HeapEntry* child_entry) {
2139 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2140 parent_entry->SetIndexedReference(type, index, child_entry);
2141 }
2142 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2143 int parent,
2144 HeapEntry* child_entry) {
2145 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2146 int index = parent_entry->children_count() + 1;
2147 parent_entry->SetIndexedReference(type, index, child_entry);
2148 }
2149 void SetNamedReference(HeapGraphEdge::Type type,
2150 int parent,
2151 const char* reference_name,
2152 HeapEntry* child_entry) {
2153 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2154 parent_entry->SetNamedReference(type, reference_name, child_entry);
2155 }
2156 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2157 int parent,
2158 HeapEntry* child_entry) {
2159 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2160 int index = parent_entry->children_count() + 1;
2161 parent_entry->SetNamedReference(
2162 type,
2163 collection_->names()->GetName(index),
2164 child_entry);
2165 }
2166
2167 private:
2168 HeapSnapshot* snapshot_;
2169 HeapSnapshotsCollection* collection_;
2170 HeapEntriesMap* entries_;
2171};
2172
2173
2174HeapSnapshotGenerator::HeapSnapshotGenerator(
2175 HeapSnapshot* snapshot,
2176 v8::ActivityControl* control,
2177 v8::HeapProfiler::ObjectNameResolver* resolver,
2178 Heap* heap)
2179 : snapshot_(snapshot),
2180 control_(control),
2181 v8_heap_explorer_(snapshot_, this, resolver),
2182 dom_explorer_(snapshot_, this),
2183 heap_(heap) {
2184}
2185
2186
2187bool HeapSnapshotGenerator::GenerateSnapshot() {
2188 v8_heap_explorer_.TagGlobalObjects();
2189
2190 // TODO(1562) Profiler assumes that any object that is in the heap after
2191 // full GC is reachable from the root when computing dominators.
2192 // This is not true for weakly reachable objects.
2193 // As a temporary solution we call GC twice.
2194 Isolate::Current()->heap()->CollectAllGarbage(
2195 Heap::kMakeHeapIterableMask,
2196 "HeapSnapshotGenerator::GenerateSnapshot");
2197 Isolate::Current()->heap()->CollectAllGarbage(
2198 Heap::kMakeHeapIterableMask,
2199 "HeapSnapshotGenerator::GenerateSnapshot");
2200
2201#ifdef VERIFY_HEAP
2202 Heap* debug_heap = Isolate::Current()->heap();
2203 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2204 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2205 CHECK(!debug_heap->code_space()->was_swept_conservatively());
2206 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2207 CHECK(!debug_heap->map_space()->was_swept_conservatively());
2208#endif
2209
2210 // The following code uses heap iterators, so we want the heap to be
2211 // stable. It should follow TagGlobalObjects as that can allocate.
2212 AssertNoAllocation no_alloc;
2213
2214#ifdef VERIFY_HEAP
2215 debug_heap->Verify();
2216#endif
2217
2218 SetProgressTotal(1); // 1 pass.
2219
2220#ifdef VERIFY_HEAP
2221 debug_heap->Verify();
2222#endif
2223
2224 if (!FillReferences()) return false;
2225
2226 snapshot_->FillChildren();
2227 snapshot_->RememberLastJSObjectId();
2228
2229 progress_counter_ = progress_total_;
2230 if (!ProgressReport(true)) return false;
2231 return true;
2232}
2233
2234
2235void HeapSnapshotGenerator::ProgressStep() {
2236 ++progress_counter_;
2237}
2238
2239
2240bool HeapSnapshotGenerator::ProgressReport(bool force) {
2241 const int kProgressReportGranularity = 10000;
2242 if (control_ != NULL
2243 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2244 return
2245 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2246 v8::ActivityControl::kContinue;
2247 }
2248 return true;
2249}
2250
2251
2252void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2253 if (control_ == NULL) return;
2254 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2255 progress_total_ = iterations_count * (
2256 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2257 dom_explorer_.EstimateObjectsCount());
2258 progress_counter_ = 0;
2259}
2260
2261
2262bool HeapSnapshotGenerator::FillReferences() {
2263 SnapshotFiller filler(snapshot_, &entries_);
2264 v8_heap_explorer_.AddRootEntries(&filler);
2265 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2266 && dom_explorer_.IterateAndExtractReferences(&filler);
2267}
2268
2269
2270template<int bytes> struct MaxDecimalDigitsIn;
2271template<> struct MaxDecimalDigitsIn<4> {
2272 static const int kSigned = 11;
2273 static const int kUnsigned = 10;
2274};
2275template<> struct MaxDecimalDigitsIn<8> {
2276 static const int kSigned = 20;
2277 static const int kUnsigned = 20;
2278};
2279
2280
2281class OutputStreamWriter {
2282 public:
2283 explicit OutputStreamWriter(v8::OutputStream* stream)
2284 : stream_(stream),
2285 chunk_size_(stream->GetChunkSize()),
2286 chunk_(chunk_size_),
2287 chunk_pos_(0),
2288 aborted_(false) {
2289 ASSERT(chunk_size_ > 0);
2290 }
2291 bool aborted() { return aborted_; }
2292 void AddCharacter(char c) {
2293 ASSERT(c != '\0');
2294 ASSERT(chunk_pos_ < chunk_size_);
2295 chunk_[chunk_pos_++] = c;
2296 MaybeWriteChunk();
2297 }
2298 void AddString(const char* s) {
2299 AddSubstring(s, StrLength(s));
2300 }
2301 void AddSubstring(const char* s, int n) {
2302 if (n <= 0) return;
2303 ASSERT(static_cast<size_t>(n) <= strlen(s));
2304 const char* s_end = s + n;
2305 while (s < s_end) {
2306 int s_chunk_size = Min(
2307 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2308 ASSERT(s_chunk_size > 0);
2309 memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2310 s += s_chunk_size;
2311 chunk_pos_ += s_chunk_size;
2312 MaybeWriteChunk();
2313 }
2314 }
2315 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2316 void Finalize() {
2317 if (aborted_) return;
2318 ASSERT(chunk_pos_ < chunk_size_);
2319 if (chunk_pos_ != 0) {
2320 WriteChunk();
2321 }
2322 stream_->EndOfStream();
2323 }
2324
2325 private:
2326 template<typename T>
2327 void AddNumberImpl(T n, const char* format) {
2328 // Buffer for the longest value plus trailing \0
2329 static const int kMaxNumberSize =
2330 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2331 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2332 int result = OS::SNPrintF(
2333 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2334 ASSERT(result != -1);
2335 chunk_pos_ += result;
2336 MaybeWriteChunk();
2337 } else {
2338 EmbeddedVector<char, kMaxNumberSize> buffer;
2339 int result = OS::SNPrintF(buffer, format, n);
2340 USE(result);
2341 ASSERT(result != -1);
2342 AddString(buffer.start());
2343 }
2344 }
2345 void MaybeWriteChunk() {
2346 ASSERT(chunk_pos_ <= chunk_size_);
2347 if (chunk_pos_ == chunk_size_) {
2348 WriteChunk();
2349 }
2350 }
2351 void WriteChunk() {
2352 if (aborted_) return;
2353 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2354 v8::OutputStream::kAbort) aborted_ = true;
2355 chunk_pos_ = 0;
2356 }
2357
2358 v8::OutputStream* stream_;
2359 int chunk_size_;
2360 ScopedVector<char> chunk_;
2361 int chunk_pos_;
2362 bool aborted_;
2363};
2364
2365
2366// type, name|index, to_node.
2367const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2368// type, name, id, self_size, children_index.
2369const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
2370
2371void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2372 ASSERT(writer_ == NULL);
2373 writer_ = new OutputStreamWriter(stream);
2374
2375 HeapSnapshot* original_snapshot = NULL;
2376 if (snapshot_->RawSnapshotSize() >=
2377 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
2378 // The snapshot is too big. Serialize a fake snapshot.
2379 original_snapshot = snapshot_;
2380 snapshot_ = CreateFakeSnapshot();
2381 }
2382
2383 SerializeImpl();
2384
2385 delete writer_;
2386 writer_ = NULL;
2387
2388 if (original_snapshot != NULL) {
2389 delete snapshot_;
2390 snapshot_ = original_snapshot;
2391 }
2392}
2393
2394
2395HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
2396 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
2397 HeapSnapshot::kFull,
2398 snapshot_->title(),
2399 snapshot_->uid());
2400 result->AddRootEntry();
2401 const char* text = snapshot_->collection()->names()->GetFormatted(
2402 "The snapshot is too big. "
2403 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
2404 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
2405 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
2406 (snapshot_->RawSnapshotSize() + MB - 1) / MB);
2407 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
2408 result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
2409 result->FillChildren();
2410 return result;
2411}
2412
2413
2414void HeapSnapshotJSONSerializer::SerializeImpl() {
2415 ASSERT(0 == snapshot_->root()->index());
2416 writer_->AddCharacter('{');
2417 writer_->AddString("\"snapshot\":{");
2418 SerializeSnapshot();
2419 if (writer_->aborted()) return;
2420 writer_->AddString("},\n");
2421 writer_->AddString("\"nodes\":[");
2422 SerializeNodes();
2423 if (writer_->aborted()) return;
2424 writer_->AddString("],\n");
2425 writer_->AddString("\"edges\":[");
2426 SerializeEdges();
2427 if (writer_->aborted()) return;
2428 writer_->AddString("],\n");
2429 writer_->AddString("\"strings\":[");
2430 SerializeStrings();
2431 if (writer_->aborted()) return;
2432 writer_->AddCharacter(']');
2433 writer_->AddCharacter('}');
2434 writer_->Finalize();
2435}
2436
2437
2438int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2439 HashMap::Entry* cache_entry = strings_.Lookup(
2440 const_cast<char*>(s), ObjectHash(s), true);
2441 if (cache_entry->value == NULL) {
2442 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2443 }
2444 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2445}
2446
2447
2448static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
2449 int number_of_digits = 0;
2450 unsigned t = value;
2451 do {
2452 ++number_of_digits;
2453 } while (t /= 10);
2454
2455 buffer_pos += number_of_digits;
2456 int result = buffer_pos;
2457 do {
2458 int last_digit = value % 10;
2459 buffer[--buffer_pos] = '0' + last_digit;
2460 value /= 10;
2461 } while (value);
2462 return result;
2463}
2464
2465
2466void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2467 bool first_edge) {
2468 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2469 static const int kBufferSize =
2470 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2471 EmbeddedVector<char, kBufferSize> buffer;
2472 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2473 || edge->type() == HeapGraphEdge::kHidden
2474 || edge->type() == HeapGraphEdge::kWeak
2475 ? edge->index() : GetStringId(edge->name());
2476 int buffer_pos = 0;
2477 if (!first_edge) {
2478 buffer[buffer_pos++] = ',';
2479 }
2480 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2481 buffer[buffer_pos++] = ',';
2482 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2483 buffer[buffer_pos++] = ',';
2484 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2485 buffer[buffer_pos++] = '\n';
2486 buffer[buffer_pos++] = '\0';
2487 writer_->AddString(buffer.start());
2488}
2489
2490
2491void HeapSnapshotJSONSerializer::SerializeEdges() {
2492 List<HeapGraphEdge*>& edges = snapshot_->children();
2493 for (int i = 0; i < edges.length(); ++i) {
2494 ASSERT(i == 0 ||
2495 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2496 SerializeEdge(edges[i], i == 0);
2497 if (writer_->aborted()) return;
2498 }
2499}
2500
2501
2502void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2503 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
2504 static const int kBufferSize =
2505 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2506 + 5 + 1 + 1;
2507 EmbeddedVector<char, kBufferSize> buffer;
2508 int buffer_pos = 0;
2509 if (entry_index(entry) != 0) {
2510 buffer[buffer_pos++] = ',';
2511 }
2512 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2513 buffer[buffer_pos++] = ',';
2514 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2515 buffer[buffer_pos++] = ',';
2516 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2517 buffer[buffer_pos++] = ',';
2518 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2519 buffer[buffer_pos++] = ',';
2520 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2521 buffer[buffer_pos++] = '\n';
2522 buffer[buffer_pos++] = '\0';
2523 writer_->AddString(buffer.start());
2524}
2525
2526
2527void HeapSnapshotJSONSerializer::SerializeNodes() {
2528 List<HeapEntry>& entries = snapshot_->entries();
2529 for (int i = 0; i < entries.length(); ++i) {
2530 SerializeNode(&entries[i]);
2531 if (writer_->aborted()) return;
2532 }
2533}
2534
2535
2536void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2537 writer_->AddString("\"title\":\"");
2538 writer_->AddString(snapshot_->title());
2539 writer_->AddString("\"");
2540 writer_->AddString(",\"uid\":");
2541 writer_->AddNumber(snapshot_->uid());
2542 writer_->AddString(",\"meta\":");
2543 // The object describing node serialization layout.
2544 // We use a set of macros to improve readability.
2545#define JSON_A(s) "[" s "]"
2546#define JSON_O(s) "{" s "}"
2547#define JSON_S(s) "\"" s "\""
2548 writer_->AddString(JSON_O(
2549 JSON_S("node_fields") ":" JSON_A(
2550 JSON_S("type") ","
2551 JSON_S("name") ","
2552 JSON_S("id") ","
2553 JSON_S("self_size") ","
2554 JSON_S("edge_count")) ","
2555 JSON_S("node_types") ":" JSON_A(
2556 JSON_A(
2557 JSON_S("hidden") ","
2558 JSON_S("array") ","
2559 JSON_S("string") ","
2560 JSON_S("object") ","
2561 JSON_S("code") ","
2562 JSON_S("closure") ","
2563 JSON_S("regexp") ","
2564 JSON_S("number") ","
2565 JSON_S("native") ","
2566 JSON_S("synthetic")) ","
2567 JSON_S("string") ","
2568 JSON_S("number") ","
2569 JSON_S("number") ","
2570 JSON_S("number") ","
2571 JSON_S("number") ","
2572 JSON_S("number")) ","
2573 JSON_S("edge_fields") ":" JSON_A(
2574 JSON_S("type") ","
2575 JSON_S("name_or_index") ","
2576 JSON_S("to_node")) ","
2577 JSON_S("edge_types") ":" JSON_A(
2578 JSON_A(
2579 JSON_S("context") ","
2580 JSON_S("element") ","
2581 JSON_S("property") ","
2582 JSON_S("internal") ","
2583 JSON_S("hidden") ","
2584 JSON_S("shortcut") ","
2585 JSON_S("weak")) ","
2586 JSON_S("string_or_number") ","
2587 JSON_S("node"))));
2588#undef JSON_S
2589#undef JSON_O
2590#undef JSON_A
2591 writer_->AddString(",\"node_count\":");
2592 writer_->AddNumber(snapshot_->entries().length());
2593 writer_->AddString(",\"edge_count\":");
2594 writer_->AddNumber(snapshot_->edges().length());
2595}
2596
2597
2598static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2599 static const char hex_chars[] = "0123456789ABCDEF";
2600 w->AddString("\\u");
2601 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2602 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2603 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2604 w->AddCharacter(hex_chars[u & 0xf]);
2605}
2606
2607void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2608 writer_->AddCharacter('\n');
2609 writer_->AddCharacter('\"');
2610 for ( ; *s != '\0'; ++s) {
2611 switch (*s) {
2612 case '\b':
2613 writer_->AddString("\\b");
2614 continue;
2615 case '\f':
2616 writer_->AddString("\\f");
2617 continue;
2618 case '\n':
2619 writer_->AddString("\\n");
2620 continue;
2621 case '\r':
2622 writer_->AddString("\\r");
2623 continue;
2624 case '\t':
2625 writer_->AddString("\\t");
2626 continue;
2627 case '\"':
2628 case '\\':
2629 writer_->AddCharacter('\\');
2630 writer_->AddCharacter(*s);
2631 continue;
2632 default:
2633 if (*s > 31 && *s < 128) {
2634 writer_->AddCharacter(*s);
2635 } else if (*s <= 31) {
2636 // Special character with no dedicated literal.
2637 WriteUChar(writer_, *s);
2638 } else {
2639 // Convert UTF-8 into \u UTF-16 literal.
2640 unsigned length = 1, cursor = 0;
2641 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2642 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2643 if (c != unibrow::Utf8::kBadChar) {
2644 WriteUChar(writer_, c);
2645 ASSERT(cursor != 0);
2646 s += cursor - 1;
2647 } else {
2648 writer_->AddCharacter('?');
2649 }
2650 }
2651 }
2652 }
2653 writer_->AddCharacter('\"');
2654}
2655
2656
2657void HeapSnapshotJSONSerializer::SerializeStrings() {
2658 List<HashMap::Entry*> sorted_strings;
2659 SortHashMap(&strings_, &sorted_strings);
2660 writer_->AddString("\"<dummy>\"");
2661 for (int i = 0; i < sorted_strings.length(); ++i) {
2662 writer_->AddCharacter(',');
2663 SerializeString(
2664 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
2665 if (writer_->aborted()) return;
2666 }
2667}
2668
2669
2670template<typename T>
2671inline static int SortUsingEntryValue(const T* x, const T* y) {
2672 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
2673 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
2674 if (x_uint > y_uint) {
2675 return 1;
2676 } else if (x_uint == y_uint) {
2677 return 0;
2678 } else {
2679 return -1;
2680 }
2681}
2682
2683
2684void HeapSnapshotJSONSerializer::SortHashMap(
2685 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
2686 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
2687 sorted_entries->Add(p);
2688 sorted_entries->Sort(SortUsingEntryValue);
2689}
2690
2691} } // namespace v8::internal