blob: cdd5a07077dbba1c7dd53e5a43d1f1362933de3b [file] [log] [blame]
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-snapshot-generator-inl.h"
31
32#include "heap-profiler.h"
33#include "debug.h"
34
35namespace v8 {
36namespace internal {
37
38
39HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
40 : type_(type),
41 from_index_(from),
42 to_index_(to),
43 name_(name) {
44 ASSERT(type == kContextVariable
45 || type == kProperty
46 || type == kInternal
47 || type == kShortcut);
48}
49
50
51HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
52 : type_(type),
53 from_index_(from),
54 to_index_(to),
55 index_(index) {
56 ASSERT(type == kElement || type == kHidden || type == kWeak);
57}
58
59
60void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
61 to_entry_ = &snapshot->entries()[to_index_];
62}
63
64
65const int HeapEntry::kNoEntry = -1;
66
67HeapEntry::HeapEntry(HeapSnapshot* snapshot,
68 Type type,
69 const char* name,
70 SnapshotObjectId id,
71 int self_size)
72 : type_(type),
73 children_count_(0),
74 children_index_(-1),
75 self_size_(self_size),
76 id_(id),
77 snapshot_(snapshot),
78 name_(name) { }
79
80
81void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
82 const char* name,
83 HeapEntry* entry) {
84 HeapGraphEdge edge(type, name, this->index(), entry->index());
85 snapshot_->edges().Add(edge);
86 ++children_count_;
87}
88
89
90void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
91 int index,
92 HeapEntry* entry) {
93 HeapGraphEdge edge(type, index, this->index(), entry->index());
94 snapshot_->edges().Add(edge);
95 ++children_count_;
96}
97
98
99Handle<HeapObject> HeapEntry::GetHeapObject() {
100 return snapshot_->collection()->FindHeapObjectById(id());
101}
102
103
104void HeapEntry::Print(
105 const char* prefix, const char* edge_name, int max_depth, int indent) {
106 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
107 OS::Print("%6d @%6u %*c %s%s: ",
108 self_size(), id(), indent, ' ', prefix, edge_name);
109 if (type() != kString) {
110 OS::Print("%s %.40s\n", TypeAsString(), name_);
111 } else {
112 OS::Print("\"");
113 const char* c = name_;
114 while (*c && (c - name_) <= 40) {
115 if (*c != '\n')
116 OS::Print("%c", *c);
117 else
118 OS::Print("\\n");
119 ++c;
120 }
121 OS::Print("\"\n");
122 }
123 if (--max_depth == 0) return;
124 Vector<HeapGraphEdge*> ch = children();
125 for (int i = 0; i < ch.length(); ++i) {
126 HeapGraphEdge& edge = *ch[i];
127 const char* edge_prefix = "";
128 EmbeddedVector<char, 64> index;
129 const char* edge_name = index.start();
130 switch (edge.type()) {
131 case HeapGraphEdge::kContextVariable:
132 edge_prefix = "#";
133 edge_name = edge.name();
134 break;
135 case HeapGraphEdge::kElement:
136 OS::SNPrintF(index, "%d", edge.index());
137 break;
138 case HeapGraphEdge::kInternal:
139 edge_prefix = "$";
140 edge_name = edge.name();
141 break;
142 case HeapGraphEdge::kProperty:
143 edge_name = edge.name();
144 break;
145 case HeapGraphEdge::kHidden:
146 edge_prefix = "$";
147 OS::SNPrintF(index, "%d", edge.index());
148 break;
149 case HeapGraphEdge::kShortcut:
150 edge_prefix = "^";
151 edge_name = edge.name();
152 break;
153 case HeapGraphEdge::kWeak:
154 edge_prefix = "w";
155 OS::SNPrintF(index, "%d", edge.index());
156 break;
157 default:
158 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
159 }
160 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
161 }
162}
163
164
165const char* HeapEntry::TypeAsString() {
166 switch (type()) {
167 case kHidden: return "/hidden/";
168 case kObject: return "/object/";
169 case kClosure: return "/closure/";
170 case kString: return "/string/";
171 case kCode: return "/code/";
172 case kArray: return "/array/";
173 case kRegExp: return "/regexp/";
174 case kHeapNumber: return "/number/";
175 case kNative: return "/native/";
176 case kSynthetic: return "/synthetic/";
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +0000177 case kContext: return "/context/";
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000178 default: return "???";
179 }
180}
181
182
183// It is very important to keep objects that form a heap snapshot
184// as small as possible.
185namespace { // Avoid littering the global namespace.
186
187template <size_t ptr_size> struct SnapshotSizeConstants;
188
189template <> struct SnapshotSizeConstants<4> {
190 static const int kExpectedHeapGraphEdgeSize = 12;
191 static const int kExpectedHeapEntrySize = 24;
192 static const int kExpectedHeapSnapshotsCollectionSize = 100;
193 static const int kExpectedHeapSnapshotSize = 136;
194 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
195};
196
197template <> struct SnapshotSizeConstants<8> {
198 static const int kExpectedHeapGraphEdgeSize = 24;
199 static const int kExpectedHeapEntrySize = 32;
200 static const int kExpectedHeapSnapshotsCollectionSize = 152;
201 static const int kExpectedHeapSnapshotSize = 168;
202 static const uint64_t kMaxSerializableSnapshotRawSize =
203 static_cast<uint64_t>(6000) * MB;
204};
205
206} // namespace
207
208HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
209 HeapSnapshot::Type type,
210 const char* title,
211 unsigned uid)
212 : collection_(collection),
213 type_(type),
214 title_(title),
215 uid_(uid),
216 root_index_(HeapEntry::kNoEntry),
217 gc_roots_index_(HeapEntry::kNoEntry),
218 natives_root_index_(HeapEntry::kNoEntry),
219 max_snapshot_js_object_id_(0) {
220 STATIC_CHECK(
221 sizeof(HeapGraphEdge) ==
222 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
223 STATIC_CHECK(
224 sizeof(HeapEntry) ==
225 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
226 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
227 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
228 }
229}
230
231
232void HeapSnapshot::Delete() {
233 collection_->RemoveSnapshot(this);
234 delete this;
235}
236
237
238void HeapSnapshot::RememberLastJSObjectId() {
239 max_snapshot_js_object_id_ = collection_->last_assigned_id();
240}
241
242
243HeapEntry* HeapSnapshot::AddRootEntry() {
244 ASSERT(root_index_ == HeapEntry::kNoEntry);
245 ASSERT(entries_.is_empty()); // Root entry must be the first one.
246 HeapEntry* entry = AddEntry(HeapEntry::kObject,
247 "",
248 HeapObjectsMap::kInternalRootObjectId,
249 0);
250 root_index_ = entry->index();
251 ASSERT(root_index_ == 0);
252 return entry;
253}
254
255
256HeapEntry* HeapSnapshot::AddGcRootsEntry() {
257 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
258 HeapEntry* entry = AddEntry(HeapEntry::kObject,
259 "(GC roots)",
260 HeapObjectsMap::kGcRootsObjectId,
261 0);
262 gc_roots_index_ = entry->index();
263 return entry;
264}
265
266
267HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
268 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
269 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
270 HeapEntry* entry = AddEntry(
271 HeapEntry::kObject,
272 VisitorSynchronization::kTagNames[tag],
273 HeapObjectsMap::GetNthGcSubrootId(tag),
274 0);
275 gc_subroot_indexes_[tag] = entry->index();
276 return entry;
277}
278
279
280HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
281 const char* name,
282 SnapshotObjectId id,
283 int size) {
284 HeapEntry entry(this, type, name, id, size);
285 entries_.Add(entry);
286 return &entries_.last();
287}
288
289
290void HeapSnapshot::FillChildren() {
291 ASSERT(children().is_empty());
292 children().Allocate(edges().length());
293 int children_index = 0;
294 for (int i = 0; i < entries().length(); ++i) {
295 HeapEntry* entry = &entries()[i];
296 children_index = entry->set_children_index(children_index);
297 }
298 ASSERT(edges().length() == children_index);
299 for (int i = 0; i < edges().length(); ++i) {
300 HeapGraphEdge* edge = &edges()[i];
301 edge->ReplaceToIndexWithEntry(this);
302 edge->from()->add_child(edge);
303 }
304}
305
306
307class FindEntryById {
308 public:
309 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
310 int operator()(HeapEntry* const* entry) {
311 if ((*entry)->id() == id_) return 0;
312 return (*entry)->id() < id_ ? -1 : 1;
313 }
314 private:
315 SnapshotObjectId id_;
316};
317
318
319HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
320 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
321 // Perform a binary search by id.
322 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
323 if (index == -1)
324 return NULL;
325 return entries_by_id->at(index);
326}
327
328
329template<class T>
330static int SortByIds(const T* entry1_ptr,
331 const T* entry2_ptr) {
332 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
333 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
334}
335
336
337List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
338 if (sorted_entries_.is_empty()) {
339 sorted_entries_.Allocate(entries_.length());
340 for (int i = 0; i < entries_.length(); ++i) {
341 sorted_entries_[i] = &entries_[i];
342 }
343 sorted_entries_.Sort(SortByIds);
344 }
345 return &sorted_entries_;
346}
347
348
349void HeapSnapshot::Print(int max_depth) {
350 root()->Print("", "", max_depth, 0);
351}
352
353
354template<typename T, class P>
355static size_t GetMemoryUsedByList(const List<T, P>& list) {
356 return list.length() * sizeof(T) + sizeof(list);
357}
358
359
360size_t HeapSnapshot::RawSnapshotSize() const {
361 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
362 sizeof(HeapSnapshot)); // NOLINT
363 return
364 sizeof(*this) +
365 GetMemoryUsedByList(entries_) +
366 GetMemoryUsedByList(edges_) +
367 GetMemoryUsedByList(children_) +
368 GetMemoryUsedByList(sorted_entries_);
369}
370
371
372// We split IDs on evens for embedder objects (see
373// HeapObjectsMap::GenerateId) and odds for native objects.
374const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
375const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
376 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
377const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
378 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
379const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
380 HeapObjectsMap::kGcRootsFirstSubrootId +
381 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
382
383HeapObjectsMap::HeapObjectsMap(Heap* heap)
384 : next_id_(kFirstAvailableObjectId),
385 entries_map_(AddressesMatch),
386 heap_(heap) {
387 // This dummy element solves a problem with entries_map_.
388 // When we do lookup in HashMap we see no difference between two cases:
389 // it has an entry with NULL as the value or it has created
390 // a new entry on the fly with NULL as the default value.
391 // With such dummy element we have a guaranty that all entries_map_ entries
392 // will have the value field grater than 0.
393 // This fact is using in MoveObject method.
394 entries_.Add(EntryInfo(0, NULL, 0));
395}
396
397
398void HeapObjectsMap::SnapshotGenerationFinished() {
399 RemoveDeadEntries();
400}
401
402
403void HeapObjectsMap::MoveObject(Address from, Address to) {
404 ASSERT(to != NULL);
405 ASSERT(from != NULL);
406 if (from == to) return;
407 void* from_value = entries_map_.Remove(from, AddressHash(from));
mstarzinger@chromium.org71fc3462013-02-27 09:34:27 +0000408 if (from_value == NULL) {
409 // It may occur that some untracked object moves to an address X and there
410 // is a tracked object at that address. In this case we should remove the
411 // entry as we know that the object has died.
412 void* to_value = entries_map_.Remove(to, AddressHash(to));
413 if (to_value != NULL) {
414 int to_entry_info_index =
415 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
416 entries_.at(to_entry_info_index).addr = NULL;
417 }
418 } else {
419 HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
420 if (to_entry->value != NULL) {
421 // We found the existing entry with to address for an old object.
422 // Without this operation we will have two EntryInfo's with the same
423 // value in addr field. It is bad because later at RemoveDeadEntries
424 // one of this entry will be removed with the corresponding entries_map_
425 // entry.
426 int to_entry_info_index =
427 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
428 entries_.at(to_entry_info_index).addr = NULL;
429 }
430 int from_entry_info_index =
431 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
432 entries_.at(from_entry_info_index).addr = to;
433 to_entry->value = from_value;
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000434 }
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000435}
436
437
438SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
439 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
440 if (entry == NULL) return 0;
441 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
442 EntryInfo& entry_info = entries_.at(entry_index);
443 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
444 return entry_info.id;
445}
446
447
448SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
449 unsigned int size) {
450 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
451 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
452 if (entry->value != NULL) {
453 int entry_index =
454 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
455 EntryInfo& entry_info = entries_.at(entry_index);
456 entry_info.accessed = true;
457 entry_info.size = size;
458 return entry_info.id;
459 }
460 entry->value = reinterpret_cast<void*>(entries_.length());
461 SnapshotObjectId id = next_id_;
462 next_id_ += kObjectIdStep;
463 entries_.Add(EntryInfo(id, addr, size));
464 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
465 return id;
466}
467
468
469void HeapObjectsMap::StopHeapObjectsTracking() {
470 time_intervals_.Clear();
471}
472
473void HeapObjectsMap::UpdateHeapObjectsMap() {
474 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
475 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
476 HeapIterator iterator(heap_);
477 for (HeapObject* obj = iterator.next();
478 obj != NULL;
479 obj = iterator.next()) {
480 FindOrAddEntry(obj->address(), obj->Size());
481 }
482 RemoveDeadEntries();
483}
484
485
486SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
487 UpdateHeapObjectsMap();
488 time_intervals_.Add(TimeInterval(next_id_));
489 int prefered_chunk_size = stream->GetChunkSize();
490 List<v8::HeapStatsUpdate> stats_buffer;
491 ASSERT(!entries_.is_empty());
492 EntryInfo* entry_info = &entries_.first();
493 EntryInfo* end_entry_info = &entries_.last() + 1;
494 for (int time_interval_index = 0;
495 time_interval_index < time_intervals_.length();
496 ++time_interval_index) {
497 TimeInterval& time_interval = time_intervals_[time_interval_index];
498 SnapshotObjectId time_interval_id = time_interval.id;
499 uint32_t entries_size = 0;
500 EntryInfo* start_entry_info = entry_info;
501 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
502 entries_size += entry_info->size;
503 ++entry_info;
504 }
505 uint32_t entries_count =
506 static_cast<uint32_t>(entry_info - start_entry_info);
507 if (time_interval.count != entries_count ||
508 time_interval.size != entries_size) {
509 stats_buffer.Add(v8::HeapStatsUpdate(
510 time_interval_index,
511 time_interval.count = entries_count,
512 time_interval.size = entries_size));
513 if (stats_buffer.length() >= prefered_chunk_size) {
514 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
515 &stats_buffer.first(), stats_buffer.length());
516 if (result == OutputStream::kAbort) return last_assigned_id();
517 stats_buffer.Clear();
518 }
519 }
520 }
521 ASSERT(entry_info == end_entry_info);
522 if (!stats_buffer.is_empty()) {
523 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
524 &stats_buffer.first(), stats_buffer.length());
525 if (result == OutputStream::kAbort) return last_assigned_id();
526 }
527 stream->EndOfStream();
528 return last_assigned_id();
529}
530
531
532void HeapObjectsMap::RemoveDeadEntries() {
533 ASSERT(entries_.length() > 0 &&
534 entries_.at(0).id == 0 &&
535 entries_.at(0).addr == NULL);
536 int first_free_entry = 1;
537 for (int i = 1; i < entries_.length(); ++i) {
538 EntryInfo& entry_info = entries_.at(i);
539 if (entry_info.accessed) {
540 if (first_free_entry != i) {
541 entries_.at(first_free_entry) = entry_info;
542 }
543 entries_.at(first_free_entry).accessed = false;
544 HashMap::Entry* entry = entries_map_.Lookup(
545 entry_info.addr, AddressHash(entry_info.addr), false);
546 ASSERT(entry);
547 entry->value = reinterpret_cast<void*>(first_free_entry);
548 ++first_free_entry;
549 } else {
550 if (entry_info.addr) {
551 entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
552 }
553 }
554 }
555 entries_.Rewind(first_free_entry);
556 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
557 entries_map_.occupancy());
558}
559
560
561SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
562 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
563 const char* label = info->GetLabel();
564 id ^= StringHasher::HashSequentialString(label,
565 static_cast<int>(strlen(label)),
566 HEAP->HashSeed());
567 intptr_t element_count = info->GetElementCount();
568 if (element_count != -1)
569 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
570 v8::internal::kZeroHashSeed);
571 return id << 1;
572}
573
574
575size_t HeapObjectsMap::GetUsedMemorySize() const {
576 return
577 sizeof(*this) +
578 sizeof(HashMap::Entry) * entries_map_.capacity() +
579 GetMemoryUsedByList(entries_) +
580 GetMemoryUsedByList(time_intervals_);
581}
582
583
584HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
585 : is_tracking_objects_(false),
586 snapshots_uids_(HeapSnapshotsMatch),
587 token_enumerator_(new TokenEnumerator()),
588 ids_(heap) {
589}
590
591
592static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
593 delete *snapshot_ptr;
594}
595
596
597HeapSnapshotsCollection::~HeapSnapshotsCollection() {
598 delete token_enumerator_;
599 snapshots_.Iterate(DeleteHeapSnapshot);
600}
601
602
603HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
604 const char* name,
605 unsigned uid) {
606 is_tracking_objects_ = true; // Start watching for heap objects moves.
607 return new HeapSnapshot(this, type, name, uid);
608}
609
610
611void HeapSnapshotsCollection::SnapshotGenerationFinished(
612 HeapSnapshot* snapshot) {
613 ids_.SnapshotGenerationFinished();
614 if (snapshot != NULL) {
615 snapshots_.Add(snapshot);
616 HashMap::Entry* entry =
617 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
618 static_cast<uint32_t>(snapshot->uid()),
619 true);
620 ASSERT(entry->value == NULL);
621 entry->value = snapshot;
622 }
623}
624
625
626HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
627 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
628 static_cast<uint32_t>(uid),
629 false);
630 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
631}
632
633
634void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
635 snapshots_.RemoveElement(snapshot);
636 unsigned uid = snapshot->uid();
637 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
638 static_cast<uint32_t>(uid));
639}
640
641
642Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
643 SnapshotObjectId id) {
644 // First perform a full GC in order to avoid dead objects.
645 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
646 "HeapSnapshotsCollection::FindHeapObjectById");
647 AssertNoAllocation no_allocation;
648 HeapObject* object = NULL;
649 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
650 // Make sure that object with the given id is still reachable.
651 for (HeapObject* obj = iterator.next();
652 obj != NULL;
653 obj = iterator.next()) {
654 if (ids_.FindEntry(obj->address()) == id) {
655 ASSERT(object == NULL);
656 object = obj;
657 // Can't break -- kFilterUnreachable requires full heap traversal.
658 }
659 }
660 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
661}
662
663
664size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
665 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
666 kExpectedHeapSnapshotsCollectionSize ==
667 sizeof(HeapSnapshotsCollection)); // NOLINT
668 size_t size = sizeof(*this);
669 size += names_.GetUsedMemorySize();
670 size += ids_.GetUsedMemorySize();
671 size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
672 size += GetMemoryUsedByList(snapshots_);
673 for (int i = 0; i < snapshots_.length(); ++i) {
674 size += snapshots_[i]->RawSnapshotSize();
675 }
676 return size;
677}
678
679
680HeapEntriesMap::HeapEntriesMap()
681 : entries_(HeapThingsMatch) {
682}
683
684
685int HeapEntriesMap::Map(HeapThing thing) {
686 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
687 if (cache_entry == NULL) return HeapEntry::kNoEntry;
688 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
689}
690
691
692void HeapEntriesMap::Pair(HeapThing thing, int entry) {
693 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
694 ASSERT(cache_entry->value == NULL);
695 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
696}
697
698
699HeapObjectsSet::HeapObjectsSet()
700 : entries_(HeapEntriesMap::HeapThingsMatch) {
701}
702
703
704void HeapObjectsSet::Clear() {
705 entries_.Clear();
706}
707
708
709bool HeapObjectsSet::Contains(Object* obj) {
710 if (!obj->IsHeapObject()) return false;
711 HeapObject* object = HeapObject::cast(obj);
712 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
713}
714
715
716void HeapObjectsSet::Insert(Object* obj) {
717 if (!obj->IsHeapObject()) return;
718 HeapObject* object = HeapObject::cast(obj);
719 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
720}
721
722
723const char* HeapObjectsSet::GetTag(Object* obj) {
724 HeapObject* object = HeapObject::cast(obj);
725 HashMap::Entry* cache_entry =
726 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
727 return cache_entry != NULL
728 ? reinterpret_cast<const char*>(cache_entry->value)
729 : NULL;
730}
731
732
733void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
734 if (!obj->IsHeapObject()) return;
735 HeapObject* object = HeapObject::cast(obj);
736 HashMap::Entry* cache_entry =
737 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
738 cache_entry->value = const_cast<char*>(tag);
739}
740
741
742HeapObject* const V8HeapExplorer::kInternalRootObject =
743 reinterpret_cast<HeapObject*>(
744 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
745HeapObject* const V8HeapExplorer::kGcRootsObject =
746 reinterpret_cast<HeapObject*>(
747 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
748HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
749 reinterpret_cast<HeapObject*>(
750 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
751HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
752 reinterpret_cast<HeapObject*>(
753 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
754
755
756V8HeapExplorer::V8HeapExplorer(
757 HeapSnapshot* snapshot,
758 SnapshottingProgressReportingInterface* progress,
759 v8::HeapProfiler::ObjectNameResolver* resolver)
760 : heap_(Isolate::Current()->heap()),
761 snapshot_(snapshot),
762 collection_(snapshot_->collection()),
763 progress_(progress),
764 filler_(NULL),
765 global_object_name_resolver_(resolver) {
766}
767
768
769V8HeapExplorer::~V8HeapExplorer() {
770}
771
772
773HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
774 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
775}
776
777
778HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
779 if (object == kInternalRootObject) {
780 snapshot_->AddRootEntry();
781 return snapshot_->root();
782 } else if (object == kGcRootsObject) {
783 HeapEntry* entry = snapshot_->AddGcRootsEntry();
784 return entry;
785 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
786 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
787 return entry;
788 } else if (object->IsJSFunction()) {
789 JSFunction* func = JSFunction::cast(object);
790 SharedFunctionInfo* shared = func->shared();
791 const char* name = shared->bound() ? "native_bind" :
792 collection_->names()->GetName(String::cast(shared->name()));
793 return AddEntry(object, HeapEntry::kClosure, name);
794 } else if (object->IsJSRegExp()) {
795 JSRegExp* re = JSRegExp::cast(object);
796 return AddEntry(object,
797 HeapEntry::kRegExp,
798 collection_->names()->GetName(re->Pattern()));
799 } else if (object->IsJSObject()) {
800 const char* name = collection_->names()->GetName(
801 GetConstructorName(JSObject::cast(object)));
802 if (object->IsJSGlobalObject()) {
803 const char* tag = objects_tags_.GetTag(object);
804 if (tag != NULL) {
805 name = collection_->names()->GetFormatted("%s / %s", name, tag);
806 }
807 }
808 return AddEntry(object, HeapEntry::kObject, name);
809 } else if (object->IsString()) {
810 return AddEntry(object,
811 HeapEntry::kString,
812 collection_->names()->GetName(String::cast(object)));
813 } else if (object->IsCode()) {
814 return AddEntry(object, HeapEntry::kCode, "");
815 } else if (object->IsSharedFunctionInfo()) {
816 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
817 return AddEntry(object,
818 HeapEntry::kCode,
819 collection_->names()->GetName(name));
820 } else if (object->IsScript()) {
821 Object* name = Script::cast(object)->name();
822 return AddEntry(object,
823 HeapEntry::kCode,
824 name->IsString()
825 ? collection_->names()->GetName(String::cast(name))
826 : "");
827 } else if (object->IsNativeContext()) {
828 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
829 } else if (object->IsContext()) {
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +0000830 return AddEntry(object, HeapEntry::kContext, "system / Context");
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000831 } else if (object->IsFixedArray() ||
832 object->IsFixedDoubleArray() ||
833 object->IsByteArray() ||
834 object->IsExternalArray()) {
835 return AddEntry(object, HeapEntry::kArray, "");
836 } else if (object->IsHeapNumber()) {
837 return AddEntry(object, HeapEntry::kHeapNumber, "number");
838 }
839 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
840}
841
842
843HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
844 HeapEntry::Type type,
845 const char* name) {
846 int object_size = object->Size();
847 SnapshotObjectId object_id =
848 collection_->GetObjectId(object->address(), object_size);
849 return snapshot_->AddEntry(type, name, object_id, object_size);
850}
851
852
853class GcSubrootsEnumerator : public ObjectVisitor {
854 public:
855 GcSubrootsEnumerator(
856 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
857 : filler_(filler),
858 explorer_(explorer),
859 previous_object_count_(0),
860 object_count_(0) {
861 }
862 void VisitPointers(Object** start, Object** end) {
863 object_count_ += end - start;
864 }
865 void Synchronize(VisitorSynchronization::SyncTag tag) {
866 // Skip empty subroots.
867 if (previous_object_count_ != object_count_) {
868 previous_object_count_ = object_count_;
869 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
870 }
871 }
872 private:
873 SnapshotFillerInterface* filler_;
874 V8HeapExplorer* explorer_;
875 intptr_t previous_object_count_;
876 intptr_t object_count_;
877};
878
879
880void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
881 filler->AddEntry(kInternalRootObject, this);
882 filler->AddEntry(kGcRootsObject, this);
883 GcSubrootsEnumerator enumerator(filler, this);
884 heap_->IterateRoots(&enumerator, VISIT_ALL);
885}
886
887
888const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
889 switch (object->map()->instance_type()) {
890 case MAP_TYPE:
891 switch (Map::cast(object)->instance_type()) {
892#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
893 case instance_type: return "system / Map (" #Name ")";
894 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
895#undef MAKE_STRING_MAP_CASE
896 default: return "system / Map";
897 }
898 case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
899 case FOREIGN_TYPE: return "system / Foreign";
900 case ODDBALL_TYPE: return "system / Oddball";
901#define MAKE_STRUCT_CASE(NAME, Name, name) \
902 case NAME##_TYPE: return "system / "#Name;
903 STRUCT_LIST(MAKE_STRUCT_CASE)
904#undef MAKE_STRUCT_CASE
905 default: return "system";
906 }
907}
908
909
910int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
911 int objects_count = 0;
912 for (HeapObject* obj = iterator->next();
913 obj != NULL;
914 obj = iterator->next()) {
915 objects_count++;
916 }
917 return objects_count;
918}
919
920
921class IndexedReferencesExtractor : public ObjectVisitor {
922 public:
923 IndexedReferencesExtractor(V8HeapExplorer* generator,
924 HeapObject* parent_obj,
925 int parent)
926 : generator_(generator),
927 parent_obj_(parent_obj),
928 parent_(parent),
929 next_index_(1) {
930 }
931 void VisitPointers(Object** start, Object** end) {
932 for (Object** p = start; p < end; p++) {
933 if (CheckVisitedAndUnmark(p)) continue;
934 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
935 }
936 }
937 static void MarkVisitedField(HeapObject* obj, int offset) {
938 if (offset < 0) return;
939 Address field = obj->address() + offset;
940 ASSERT(!Memory::Object_at(field)->IsFailure());
941 ASSERT(Memory::Object_at(field)->IsHeapObject());
942 *field |= kFailureTag;
943 }
944
945 private:
946 bool CheckVisitedAndUnmark(Object** field) {
947 if ((*field)->IsFailure()) {
948 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
949 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
950 ASSERT((*field)->IsHeapObject());
951 return true;
952 }
953 return false;
954 }
955 V8HeapExplorer* generator_;
956 HeapObject* parent_obj_;
957 int parent_;
958 int next_index_;
959};
960
961
962void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
963 HeapEntry* heap_entry = GetEntry(obj);
964 if (heap_entry == NULL) return; // No interest in this object.
965 int entry = heap_entry->index();
966
967 bool extract_indexed_refs = true;
968 if (obj->IsJSGlobalProxy()) {
969 ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
970 } else if (obj->IsJSObject()) {
971 ExtractJSObjectReferences(entry, JSObject::cast(obj));
972 } else if (obj->IsString()) {
973 ExtractStringReferences(entry, String::cast(obj));
974 } else if (obj->IsContext()) {
975 ExtractContextReferences(entry, Context::cast(obj));
976 } else if (obj->IsMap()) {
977 ExtractMapReferences(entry, Map::cast(obj));
978 } else if (obj->IsSharedFunctionInfo()) {
979 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
980 } else if (obj->IsScript()) {
981 ExtractScriptReferences(entry, Script::cast(obj));
982 } else if (obj->IsCodeCache()) {
983 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
984 } else if (obj->IsCode()) {
985 ExtractCodeReferences(entry, Code::cast(obj));
986 } else if (obj->IsJSGlobalPropertyCell()) {
987 ExtractJSGlobalPropertyCellReferences(
988 entry, JSGlobalPropertyCell::cast(obj));
989 extract_indexed_refs = false;
990 }
991 if (extract_indexed_refs) {
992 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
993 IndexedReferencesExtractor refs_extractor(this, obj, entry);
994 obj->Iterate(&refs_extractor);
995 }
996}
997
998
999void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1000 // We need to reference JS global objects from snapshot's root.
1001 // We use JSGlobalProxy because this is what embedder (e.g. browser)
1002 // uses for the global object.
1003 Object* object = proxy->map()->prototype();
1004 bool is_debug_object = false;
1005#ifdef ENABLE_DEBUGGER_SUPPORT
1006 is_debug_object = object->IsGlobalObject() &&
1007 Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1008#endif
1009 if (!is_debug_object) {
1010 SetUserGlobalReference(object);
1011 }
1012}
1013
1014
1015void V8HeapExplorer::ExtractJSObjectReferences(
1016 int entry, JSObject* js_obj) {
1017 HeapObject* obj = js_obj;
1018 ExtractClosureReferences(js_obj, entry);
1019 ExtractPropertyReferences(js_obj, entry);
1020 ExtractElementReferences(js_obj, entry);
1021 ExtractInternalReferences(js_obj, entry);
1022 SetPropertyReference(
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001023 obj, entry, heap_->proto_string(), js_obj->GetPrototype());
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001024 if (obj->IsJSFunction()) {
1025 JSFunction* js_fun = JSFunction::cast(js_obj);
1026 Object* proto_or_map = js_fun->prototype_or_initial_map();
1027 if (!proto_or_map->IsTheHole()) {
1028 if (!proto_or_map->IsMap()) {
1029 SetPropertyReference(
1030 obj, entry,
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001031 heap_->prototype_string(), proto_or_map,
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001032 NULL,
1033 JSFunction::kPrototypeOrInitialMapOffset);
1034 } else {
1035 SetPropertyReference(
1036 obj, entry,
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001037 heap_->prototype_string(), js_fun->prototype());
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001038 }
1039 }
1040 SharedFunctionInfo* shared_info = js_fun->shared();
1041 // JSFunction has either bindings or literals and never both.
1042 bool bound = shared_info->bound();
1043 TagObject(js_fun->literals_or_bindings(),
1044 bound ? "(function bindings)" : "(function literals)");
1045 SetInternalReference(js_fun, entry,
1046 bound ? "bindings" : "literals",
1047 js_fun->literals_or_bindings(),
1048 JSFunction::kLiteralsOffset);
1049 TagObject(shared_info, "(shared function info)");
1050 SetInternalReference(js_fun, entry,
1051 "shared", shared_info,
1052 JSFunction::kSharedFunctionInfoOffset);
1053 TagObject(js_fun->unchecked_context(), "(context)");
1054 SetInternalReference(js_fun, entry,
1055 "context", js_fun->unchecked_context(),
1056 JSFunction::kContextOffset);
1057 for (int i = JSFunction::kNonWeakFieldsEndOffset;
1058 i < JSFunction::kSize;
1059 i += kPointerSize) {
1060 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1061 }
1062 } else if (obj->IsGlobalObject()) {
1063 GlobalObject* global_obj = GlobalObject::cast(obj);
1064 SetInternalReference(global_obj, entry,
1065 "builtins", global_obj->builtins(),
1066 GlobalObject::kBuiltinsOffset);
1067 SetInternalReference(global_obj, entry,
1068 "native_context", global_obj->native_context(),
1069 GlobalObject::kNativeContextOffset);
1070 SetInternalReference(global_obj, entry,
1071 "global_receiver", global_obj->global_receiver(),
1072 GlobalObject::kGlobalReceiverOffset);
1073 }
1074 TagObject(js_obj->properties(), "(object properties)");
1075 SetInternalReference(obj, entry,
1076 "properties", js_obj->properties(),
1077 JSObject::kPropertiesOffset);
1078 TagObject(js_obj->elements(), "(object elements)");
1079 SetInternalReference(obj, entry,
1080 "elements", js_obj->elements(),
1081 JSObject::kElementsOffset);
1082}
1083
1084
1085void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1086 if (string->IsConsString()) {
1087 ConsString* cs = ConsString::cast(string);
1088 SetInternalReference(cs, entry, "first", cs->first(),
1089 ConsString::kFirstOffset);
1090 SetInternalReference(cs, entry, "second", cs->second(),
1091 ConsString::kSecondOffset);
1092 } else if (string->IsSlicedString()) {
1093 SlicedString* ss = SlicedString::cast(string);
1094 SetInternalReference(ss, entry, "parent", ss->parent(),
1095 SlicedString::kParentOffset);
1096 }
1097}
1098
1099
1100void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001101 if (context == context->declaration_context()) {
1102 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1103 // Add context allocated locals.
1104 int context_locals = scope_info->ContextLocalCount();
1105 for (int i = 0; i < context_locals; ++i) {
1106 String* local_name = scope_info->ContextLocalName(i);
1107 int idx = Context::MIN_CONTEXT_SLOTS + i;
1108 SetContextReference(context, entry, local_name, context->get(idx),
1109 Context::OffsetOfElementAt(idx));
1110 }
1111 if (scope_info->HasFunctionName()) {
1112 String* name = scope_info->FunctionName();
1113 VariableMode mode;
1114 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1115 if (idx >= 0) {
1116 SetContextReference(context, entry, name, context->get(idx),
1117 Context::OffsetOfElementAt(idx));
1118 }
1119 }
1120 }
1121
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001122#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1123 SetInternalReference(context, entry, #name, context->get(Context::index), \
1124 FixedArray::OffsetOfElementAt(Context::index));
1125 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1126 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1127 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1128 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1129 if (context->IsNativeContext()) {
1130 TagObject(context->jsfunction_result_caches(),
1131 "(context func. result caches)");
1132 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1133 TagObject(context->runtime_context(), "(runtime context)");
1134 TagObject(context->embedder_data(), "(context data)");
1135 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1136#undef EXTRACT_CONTEXT_FIELD
1137 for (int i = Context::FIRST_WEAK_SLOT;
1138 i < Context::NATIVE_CONTEXT_SLOTS;
1139 ++i) {
1140 SetWeakReference(context, entry, i, context->get(i),
1141 FixedArray::OffsetOfElementAt(i));
1142 }
1143 }
1144}
1145
1146
1147void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1148 SetInternalReference(map, entry,
1149 "prototype", map->prototype(), Map::kPrototypeOffset);
1150 SetInternalReference(map, entry,
1151 "constructor", map->constructor(),
1152 Map::kConstructorOffset);
1153 if (map->HasTransitionArray()) {
1154 TransitionArray* transitions = map->transitions();
1155
1156 Object* back_pointer = transitions->back_pointer_storage();
1157 TagObject(transitions->back_pointer_storage(), "(back pointer)");
1158 SetInternalReference(transitions, entry,
1159 "backpointer", back_pointer,
1160 TransitionArray::kBackPointerStorageOffset);
1161 IndexedReferencesExtractor transitions_refs(this, transitions, entry);
1162 transitions->Iterate(&transitions_refs);
1163
1164 TagObject(transitions, "(transition array)");
1165 SetInternalReference(map, entry,
1166 "transitions", transitions,
1167 Map::kTransitionsOrBackPointerOffset);
1168 } else {
1169 Object* back_pointer = map->GetBackPointer();
1170 TagObject(back_pointer, "(back pointer)");
1171 SetInternalReference(map, entry,
1172 "backpointer", back_pointer,
1173 Map::kTransitionsOrBackPointerOffset);
1174 }
1175 DescriptorArray* descriptors = map->instance_descriptors();
1176 TagObject(descriptors, "(map descriptors)");
1177 SetInternalReference(map, entry,
1178 "descriptors", descriptors,
1179 Map::kDescriptorsOffset);
1180
1181 SetInternalReference(map, entry,
1182 "code_cache", map->code_cache(),
1183 Map::kCodeCacheOffset);
1184}
1185
1186
1187void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1188 int entry, SharedFunctionInfo* shared) {
1189 HeapObject* obj = shared;
1190 SetInternalReference(obj, entry,
1191 "name", shared->name(),
1192 SharedFunctionInfo::kNameOffset);
1193 TagObject(shared->code(), "(code)");
1194 SetInternalReference(obj, entry,
1195 "code", shared->code(),
1196 SharedFunctionInfo::kCodeOffset);
1197 TagObject(shared->scope_info(), "(function scope info)");
1198 SetInternalReference(obj, entry,
1199 "scope_info", shared->scope_info(),
1200 SharedFunctionInfo::kScopeInfoOffset);
1201 SetInternalReference(obj, entry,
1202 "instance_class_name", shared->instance_class_name(),
1203 SharedFunctionInfo::kInstanceClassNameOffset);
1204 SetInternalReference(obj, entry,
1205 "script", shared->script(),
1206 SharedFunctionInfo::kScriptOffset);
1207 TagObject(shared->construct_stub(), "(code)");
1208 SetInternalReference(obj, entry,
1209 "construct_stub", shared->construct_stub(),
1210 SharedFunctionInfo::kConstructStubOffset);
1211 SetInternalReference(obj, entry,
1212 "function_data", shared->function_data(),
1213 SharedFunctionInfo::kFunctionDataOffset);
1214 SetInternalReference(obj, entry,
1215 "debug_info", shared->debug_info(),
1216 SharedFunctionInfo::kDebugInfoOffset);
1217 SetInternalReference(obj, entry,
1218 "inferred_name", shared->inferred_name(),
1219 SharedFunctionInfo::kInferredNameOffset);
1220 SetInternalReference(obj, entry,
1221 "this_property_assignments",
1222 shared->this_property_assignments(),
1223 SharedFunctionInfo::kThisPropertyAssignmentsOffset);
1224 SetWeakReference(obj, entry,
1225 1, shared->initial_map(),
1226 SharedFunctionInfo::kInitialMapOffset);
1227}
1228
1229
1230void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1231 HeapObject* obj = script;
1232 SetInternalReference(obj, entry,
1233 "source", script->source(),
1234 Script::kSourceOffset);
1235 SetInternalReference(obj, entry,
1236 "name", script->name(),
1237 Script::kNameOffset);
1238 SetInternalReference(obj, entry,
1239 "data", script->data(),
1240 Script::kDataOffset);
1241 SetInternalReference(obj, entry,
1242 "context_data", script->context_data(),
1243 Script::kContextOffset);
1244 TagObject(script->line_ends(), "(script line ends)");
1245 SetInternalReference(obj, entry,
1246 "line_ends", script->line_ends(),
1247 Script::kLineEndsOffset);
1248}
1249
1250
1251void V8HeapExplorer::ExtractCodeCacheReferences(
1252 int entry, CodeCache* code_cache) {
1253 TagObject(code_cache->default_cache(), "(default code cache)");
1254 SetInternalReference(code_cache, entry,
1255 "default_cache", code_cache->default_cache(),
1256 CodeCache::kDefaultCacheOffset);
1257 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1258 SetInternalReference(code_cache, entry,
1259 "type_cache", code_cache->normal_type_cache(),
1260 CodeCache::kNormalTypeCacheOffset);
1261}
1262
1263
1264void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1265 TagObject(code->relocation_info(), "(code relocation info)");
1266 SetInternalReference(code, entry,
1267 "relocation_info", code->relocation_info(),
1268 Code::kRelocationInfoOffset);
1269 SetInternalReference(code, entry,
1270 "handler_table", code->handler_table(),
1271 Code::kHandlerTableOffset);
1272 TagObject(code->deoptimization_data(), "(code deopt data)");
1273 SetInternalReference(code, entry,
1274 "deoptimization_data", code->deoptimization_data(),
1275 Code::kDeoptimizationDataOffset);
1276 if (code->kind() == Code::FUNCTION) {
1277 SetInternalReference(code, entry,
1278 "type_feedback_info", code->type_feedback_info(),
1279 Code::kTypeFeedbackInfoOffset);
1280 }
1281 SetInternalReference(code, entry,
1282 "gc_metadata", code->gc_metadata(),
1283 Code::kGCMetadataOffset);
1284}
1285
1286
1287void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
1288 int entry, JSGlobalPropertyCell* cell) {
1289 SetInternalReference(cell, entry, "value", cell->value());
1290}
1291
1292
1293void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1294 if (!js_obj->IsJSFunction()) return;
1295
1296 JSFunction* func = JSFunction::cast(js_obj);
1297 if (func->shared()->bound()) {
1298 FixedArray* bindings = func->function_bindings();
1299 SetNativeBindReference(js_obj, entry, "bound_this",
1300 bindings->get(JSFunction::kBoundThisIndex));
1301 SetNativeBindReference(js_obj, entry, "bound_function",
1302 bindings->get(JSFunction::kBoundFunctionIndex));
1303 for (int i = JSFunction::kBoundArgumentsStartIndex;
1304 i < bindings->length(); i++) {
1305 const char* reference_name = collection_->names()->GetFormatted(
1306 "bound_argument_%d",
1307 i - JSFunction::kBoundArgumentsStartIndex);
1308 SetNativeBindReference(js_obj, entry, reference_name,
1309 bindings->get(i));
1310 }
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001311 }
1312}
1313
1314
1315void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1316 if (js_obj->HasFastProperties()) {
1317 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1318 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1319 for (int i = 0; i < descs->number_of_descriptors(); i++) {
1320 if (descs->GetDetails(i).descriptor_index() > real_size) continue;
1321 switch (descs->GetType(i)) {
1322 case FIELD: {
1323 int index = descs->GetFieldIndex(i);
1324
ulan@chromium.org750145a2013-03-07 15:14:13 +00001325 Name* k = descs->GetKey(i);
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001326 if (index < js_obj->map()->inobject_properties()) {
1327 Object* value = js_obj->InObjectPropertyAt(index);
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001328 if (k != heap_->hidden_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001329 SetPropertyReference(
1330 js_obj, entry,
1331 k, value,
1332 NULL,
1333 js_obj->GetInObjectPropertyOffset(index));
1334 } else {
1335 TagObject(value, "(hidden properties)");
1336 SetInternalReference(
1337 js_obj, entry,
1338 "hidden_properties", value,
1339 js_obj->GetInObjectPropertyOffset(index));
1340 }
1341 } else {
1342 Object* value = js_obj->FastPropertyAt(index);
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001343 if (k != heap_->hidden_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001344 SetPropertyReference(js_obj, entry, k, value);
1345 } else {
1346 TagObject(value, "(hidden properties)");
1347 SetInternalReference(js_obj, entry, "hidden_properties", value);
1348 }
1349 }
1350 break;
1351 }
1352 case CONSTANT_FUNCTION:
1353 SetPropertyReference(
1354 js_obj, entry,
1355 descs->GetKey(i), descs->GetConstantFunction(i));
1356 break;
1357 case CALLBACKS: {
1358 Object* callback_obj = descs->GetValue(i);
1359 if (callback_obj->IsAccessorPair()) {
1360 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1361 if (Object* getter = accessors->getter()) {
1362 SetPropertyReference(js_obj, entry, descs->GetKey(i),
1363 getter, "get-%s");
1364 }
1365 if (Object* setter = accessors->setter()) {
1366 SetPropertyReference(js_obj, entry, descs->GetKey(i),
1367 setter, "set-%s");
1368 }
1369 }
1370 break;
1371 }
1372 case NORMAL: // only in slow mode
1373 case HANDLER: // only in lookup results, not in descriptors
1374 case INTERCEPTOR: // only in lookup results, not in descriptors
1375 break;
1376 case TRANSITION:
1377 case NONEXISTENT:
1378 UNREACHABLE();
1379 break;
1380 }
1381 }
1382 } else {
ulan@chromium.org750145a2013-03-07 15:14:13 +00001383 NameDictionary* dictionary = js_obj->property_dictionary();
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001384 int length = dictionary->Capacity();
1385 for (int i = 0; i < length; ++i) {
1386 Object* k = dictionary->KeyAt(i);
1387 if (dictionary->IsKey(k)) {
1388 Object* target = dictionary->ValueAt(i);
1389 // We assume that global objects can only have slow properties.
1390 Object* value = target->IsJSGlobalPropertyCell()
1391 ? JSGlobalPropertyCell::cast(target)->value()
1392 : target;
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001393 if (k != heap_->hidden_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001394 SetPropertyReference(js_obj, entry, String::cast(k), value);
1395 } else {
1396 TagObject(value, "(hidden properties)");
1397 SetInternalReference(js_obj, entry, "hidden_properties", value);
1398 }
1399 }
1400 }
1401 }
1402}
1403
1404
1405void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1406 if (js_obj->HasFastObjectElements()) {
1407 FixedArray* elements = FixedArray::cast(js_obj->elements());
1408 int length = js_obj->IsJSArray() ?
1409 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1410 elements->length();
1411 for (int i = 0; i < length; ++i) {
1412 if (!elements->get(i)->IsTheHole()) {
1413 SetElementReference(js_obj, entry, i, elements->get(i));
1414 }
1415 }
1416 } else if (js_obj->HasDictionaryElements()) {
1417 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1418 int length = dictionary->Capacity();
1419 for (int i = 0; i < length; ++i) {
1420 Object* k = dictionary->KeyAt(i);
1421 if (dictionary->IsKey(k)) {
1422 ASSERT(k->IsNumber());
1423 uint32_t index = static_cast<uint32_t>(k->Number());
1424 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1425 }
1426 }
1427 }
1428}
1429
1430
1431void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1432 int length = js_obj->GetInternalFieldCount();
1433 for (int i = 0; i < length; ++i) {
1434 Object* o = js_obj->GetInternalField(i);
1435 SetInternalReference(
1436 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1437 }
1438}
1439
1440
1441String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1442 Heap* heap = object->GetHeap();
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001443 if (object->IsJSFunction()) return heap->closure_string();
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001444 String* constructor_name = object->constructor_name();
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001445 if (constructor_name == heap->Object_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001446 // Look up an immediate "constructor" property, if it is a function,
1447 // return its name. This is for instances of binding objects, which
1448 // have prototype constructor type "Object".
1449 Object* constructor_prop = NULL;
1450 LookupResult result(heap->isolate());
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001451 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001452 if (!result.IsFound()) return object->constructor_name();
1453
1454 constructor_prop = result.GetLazyValue();
1455 if (constructor_prop->IsJSFunction()) {
1456 Object* maybe_name =
1457 JSFunction::cast(constructor_prop)->shared()->name();
1458 if (maybe_name->IsString()) {
1459 String* name = String::cast(maybe_name);
1460 if (name->length() > 0) return name;
1461 }
1462 }
1463 }
1464 return object->constructor_name();
1465}
1466
1467
1468HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1469 if (!obj->IsHeapObject()) return NULL;
1470 return filler_->FindOrAddEntry(obj, this);
1471}
1472
1473
1474class RootsReferencesExtractor : public ObjectVisitor {
1475 private:
1476 struct IndexTag {
1477 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1478 : index(index), tag(tag) { }
1479 int index;
1480 VisitorSynchronization::SyncTag tag;
1481 };
1482
1483 public:
1484 RootsReferencesExtractor()
1485 : collecting_all_references_(false),
1486 previous_reference_count_(0) {
1487 }
1488
1489 void VisitPointers(Object** start, Object** end) {
1490 if (collecting_all_references_) {
1491 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1492 } else {
1493 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1494 }
1495 }
1496
1497 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1498
1499 void FillReferences(V8HeapExplorer* explorer) {
1500 ASSERT(strong_references_.length() <= all_references_.length());
1501 for (int i = 0; i < reference_tags_.length(); ++i) {
1502 explorer->SetGcRootsReference(reference_tags_[i].tag);
1503 }
1504 int strong_index = 0, all_index = 0, tags_index = 0;
1505 while (all_index < all_references_.length()) {
1506 if (strong_index < strong_references_.length() &&
1507 strong_references_[strong_index] == all_references_[all_index]) {
1508 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1509 false,
1510 all_references_[all_index++]);
1511 ++strong_index;
1512 } else {
1513 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1514 true,
1515 all_references_[all_index++]);
1516 }
1517 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1518 }
1519 }
1520
1521 void Synchronize(VisitorSynchronization::SyncTag tag) {
1522 if (collecting_all_references_ &&
1523 previous_reference_count_ != all_references_.length()) {
1524 previous_reference_count_ = all_references_.length();
1525 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1526 }
1527 }
1528
1529 private:
1530 bool collecting_all_references_;
1531 List<Object*> strong_references_;
1532 List<Object*> all_references_;
1533 int previous_reference_count_;
1534 List<IndexTag> reference_tags_;
1535};
1536
1537
1538bool V8HeapExplorer::IterateAndExtractReferences(
1539 SnapshotFillerInterface* filler) {
1540 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1541
1542 filler_ = filler;
1543 bool interrupted = false;
1544
1545 // Heap iteration with filtering must be finished in any case.
1546 for (HeapObject* obj = iterator.next();
1547 obj != NULL;
1548 obj = iterator.next(), progress_->ProgressStep()) {
1549 if (!interrupted) {
1550 ExtractReferences(obj);
1551 if (!progress_->ProgressReport(false)) interrupted = true;
1552 }
1553 }
1554 if (interrupted) {
1555 filler_ = NULL;
1556 return false;
1557 }
1558
1559 SetRootGcRootsReference();
1560 RootsReferencesExtractor extractor;
1561 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1562 extractor.SetCollectingAllReferences();
1563 heap_->IterateRoots(&extractor, VISIT_ALL);
1564 extractor.FillReferences(this);
1565 filler_ = NULL;
1566 return progress_->ProgressReport(true);
1567}
1568
1569
1570bool V8HeapExplorer::IsEssentialObject(Object* object) {
1571 return object->IsHeapObject()
1572 && !object->IsOddball()
1573 && object != heap_->empty_byte_array()
1574 && object != heap_->empty_fixed_array()
1575 && object != heap_->empty_descriptor_array()
1576 && object != heap_->fixed_array_map()
1577 && object != heap_->global_property_cell_map()
1578 && object != heap_->shared_function_info_map()
1579 && object != heap_->free_space_map()
1580 && object != heap_->one_pointer_filler_map()
1581 && object != heap_->two_pointer_filler_map();
1582}
1583
1584
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001585void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001586 int parent_entry,
1587 String* reference_name,
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001588 Object* child_obj,
1589 int field_offset) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001590 HeapEntry* child_entry = GetEntry(child_obj);
1591 if (child_entry != NULL) {
1592 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1593 parent_entry,
1594 collection_->names()->GetName(reference_name),
1595 child_entry);
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001596 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001597 }
1598}
1599
1600
1601void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1602 int parent_entry,
1603 const char* reference_name,
1604 Object* child_obj) {
1605 HeapEntry* child_entry = GetEntry(child_obj);
1606 if (child_entry != NULL) {
1607 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1608 parent_entry,
1609 reference_name,
1610 child_entry);
1611 }
1612}
1613
1614
1615void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1616 int parent_entry,
1617 int index,
1618 Object* child_obj) {
1619 HeapEntry* child_entry = GetEntry(child_obj);
1620 if (child_entry != NULL) {
1621 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1622 parent_entry,
1623 index,
1624 child_entry);
1625 }
1626}
1627
1628
1629void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1630 int parent_entry,
1631 const char* reference_name,
1632 Object* child_obj,
1633 int field_offset) {
1634 HeapEntry* child_entry = GetEntry(child_obj);
1635 if (child_entry == NULL) return;
1636 if (IsEssentialObject(child_obj)) {
1637 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1638 parent_entry,
1639 reference_name,
1640 child_entry);
1641 }
1642 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1643}
1644
1645
1646void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1647 int parent_entry,
1648 int index,
1649 Object* child_obj,
1650 int field_offset) {
1651 HeapEntry* child_entry = GetEntry(child_obj);
1652 if (child_entry == NULL) return;
1653 if (IsEssentialObject(child_obj)) {
1654 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1655 parent_entry,
1656 collection_->names()->GetName(index),
1657 child_entry);
1658 }
1659 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1660}
1661
1662
1663void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1664 int parent_entry,
1665 int index,
1666 Object* child_obj) {
1667 HeapEntry* child_entry = GetEntry(child_obj);
1668 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1669 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1670 parent_entry,
1671 index,
1672 child_entry);
1673 }
1674}
1675
1676
1677void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1678 int parent_entry,
1679 int index,
1680 Object* child_obj,
1681 int field_offset) {
1682 HeapEntry* child_entry = GetEntry(child_obj);
1683 if (child_entry != NULL) {
1684 filler_->SetIndexedReference(HeapGraphEdge::kWeak,
1685 parent_entry,
1686 index,
1687 child_entry);
1688 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1689 }
1690}
1691
1692
1693void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1694 int parent_entry,
ulan@chromium.org750145a2013-03-07 15:14:13 +00001695 Name* reference_name,
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001696 Object* child_obj,
1697 const char* name_format_string,
1698 int field_offset) {
1699 HeapEntry* child_entry = GetEntry(child_obj);
1700 if (child_entry != NULL) {
ulan@chromium.org750145a2013-03-07 15:14:13 +00001701 HeapGraphEdge::Type type =
1702 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
1703 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
1704 const char* name = name_format_string != NULL && reference_name->IsString()
1705 ? collection_->names()->GetFormatted(
1706 name_format_string,
1707 *String::cast(reference_name)->ToCString(
1708 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)) :
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001709 collection_->names()->GetName(reference_name);
1710
1711 filler_->SetNamedReference(type,
1712 parent_entry,
1713 name,
1714 child_entry);
1715 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1716 }
1717}
1718
1719
1720void V8HeapExplorer::SetRootGcRootsReference() {
1721 filler_->SetIndexedAutoIndexReference(
1722 HeapGraphEdge::kElement,
1723 snapshot_->root()->index(),
1724 snapshot_->gc_roots());
1725}
1726
1727
1728void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1729 HeapEntry* child_entry = GetEntry(child_obj);
1730 ASSERT(child_entry != NULL);
1731 filler_->SetNamedAutoIndexReference(
1732 HeapGraphEdge::kShortcut,
1733 snapshot_->root()->index(),
1734 child_entry);
1735}
1736
1737
1738void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
1739 filler_->SetIndexedAutoIndexReference(
1740 HeapGraphEdge::kElement,
1741 snapshot_->gc_roots()->index(),
1742 snapshot_->gc_subroot(tag));
1743}
1744
1745
1746void V8HeapExplorer::SetGcSubrootReference(
1747 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
1748 HeapEntry* child_entry = GetEntry(child_obj);
1749 if (child_entry != NULL) {
1750 const char* name = GetStrongGcSubrootName(child_obj);
1751 if (name != NULL) {
1752 filler_->SetNamedReference(
1753 HeapGraphEdge::kInternal,
1754 snapshot_->gc_subroot(tag)->index(),
1755 name,
1756 child_entry);
1757 } else {
1758 filler_->SetIndexedAutoIndexReference(
1759 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
1760 snapshot_->gc_subroot(tag)->index(),
1761 child_entry);
1762 }
1763 }
1764}
1765
1766
1767const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1768 if (strong_gc_subroot_names_.is_empty()) {
1769#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
1770#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
1771 STRONG_ROOT_LIST(ROOT_NAME)
1772#undef ROOT_NAME
1773#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
1774 STRUCT_LIST(STRUCT_MAP_NAME)
1775#undef STRUCT_MAP_NAME
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001776#define STRING_NAME(name, str) NAME_ENTRY(name)
1777 INTERNALIZED_STRING_LIST(STRING_NAME)
1778#undef STRING_NAME
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001779#undef NAME_ENTRY
1780 CHECK(!strong_gc_subroot_names_.is_empty());
1781 }
1782 return strong_gc_subroot_names_.GetTag(object);
1783}
1784
1785
1786void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
1787 if (IsEssentialObject(obj)) {
1788 HeapEntry* entry = GetEntry(obj);
1789 if (entry->name()[0] == '\0') {
1790 entry->set_name(tag);
1791 }
1792 }
1793}
1794
1795
1796class GlobalObjectsEnumerator : public ObjectVisitor {
1797 public:
1798 virtual void VisitPointers(Object** start, Object** end) {
1799 for (Object** p = start; p < end; p++) {
1800 if ((*p)->IsNativeContext()) {
1801 Context* context = Context::cast(*p);
1802 JSObject* proxy = context->global_proxy();
1803 if (proxy->IsJSGlobalProxy()) {
1804 Object* global = proxy->map()->prototype();
1805 if (global->IsJSGlobalObject()) {
1806 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
1807 }
1808 }
1809 }
1810 }
1811 }
1812 int count() { return objects_.length(); }
1813 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
1814
1815 private:
1816 List<Handle<JSGlobalObject> > objects_;
1817};
1818
1819
1820// Modifies heap. Must not be run during heap traversal.
1821void V8HeapExplorer::TagGlobalObjects() {
1822 Isolate* isolate = Isolate::Current();
1823 HandleScope scope(isolate);
1824 GlobalObjectsEnumerator enumerator;
1825 isolate->global_handles()->IterateAllRoots(&enumerator);
1826 const char** urls = NewArray<const char*>(enumerator.count());
1827 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1828 if (global_object_name_resolver_) {
1829 HandleScope scope(isolate);
1830 Handle<JSGlobalObject> global_obj = enumerator.at(i);
1831 urls[i] = global_object_name_resolver_->GetName(
1832 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
1833 } else {
1834 urls[i] = NULL;
1835 }
1836 }
1837
1838 AssertNoAllocation no_allocation;
1839 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1840 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
1841 }
1842
1843 DeleteArray(urls);
1844}
1845
1846
1847class GlobalHandlesExtractor : public ObjectVisitor {
1848 public:
1849 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
1850 : explorer_(explorer) {}
1851 virtual ~GlobalHandlesExtractor() {}
1852 virtual void VisitPointers(Object** start, Object** end) {
1853 UNREACHABLE();
1854 }
1855 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
1856 explorer_->VisitSubtreeWrapper(p, class_id);
1857 }
1858 private:
1859 NativeObjectsExplorer* explorer_;
1860};
1861
1862
1863class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
1864 public:
1865 BasicHeapEntriesAllocator(
1866 HeapSnapshot* snapshot,
1867 HeapEntry::Type entries_type)
1868 : snapshot_(snapshot),
1869 collection_(snapshot_->collection()),
1870 entries_type_(entries_type) {
1871 }
1872 virtual HeapEntry* AllocateEntry(HeapThing ptr);
1873 private:
1874 HeapSnapshot* snapshot_;
1875 HeapSnapshotsCollection* collection_;
1876 HeapEntry::Type entries_type_;
1877};
1878
1879
1880HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
1881 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
1882 intptr_t elements = info->GetElementCount();
1883 intptr_t size = info->GetSizeInBytes();
1884 const char* name = elements != -1
1885 ? collection_->names()->GetFormatted(
1886 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
1887 : collection_->names()->GetCopy(info->GetLabel());
1888 return snapshot_->AddEntry(
1889 entries_type_,
1890 name,
1891 HeapObjectsMap::GenerateId(info),
1892 size != -1 ? static_cast<int>(size) : 0);
1893}
1894
1895
1896NativeObjectsExplorer::NativeObjectsExplorer(
1897 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
1898 : snapshot_(snapshot),
1899 collection_(snapshot_->collection()),
1900 progress_(progress),
1901 embedder_queried_(false),
1902 objects_by_info_(RetainedInfosMatch),
1903 native_groups_(StringsMatch),
1904 filler_(NULL) {
1905 synthetic_entries_allocator_ =
1906 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
1907 native_entries_allocator_ =
1908 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
1909}
1910
1911
1912NativeObjectsExplorer::~NativeObjectsExplorer() {
1913 for (HashMap::Entry* p = objects_by_info_.Start();
1914 p != NULL;
1915 p = objects_by_info_.Next(p)) {
1916 v8::RetainedObjectInfo* info =
1917 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
1918 info->Dispose();
1919 List<HeapObject*>* objects =
1920 reinterpret_cast<List<HeapObject*>* >(p->value);
1921 delete objects;
1922 }
1923 for (HashMap::Entry* p = native_groups_.Start();
1924 p != NULL;
1925 p = native_groups_.Next(p)) {
1926 v8::RetainedObjectInfo* info =
1927 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
1928 info->Dispose();
1929 }
1930 delete synthetic_entries_allocator_;
1931 delete native_entries_allocator_;
1932}
1933
1934
1935int NativeObjectsExplorer::EstimateObjectsCount() {
1936 FillRetainedObjects();
1937 return objects_by_info_.occupancy();
1938}
1939
1940
1941void NativeObjectsExplorer::FillRetainedObjects() {
1942 if (embedder_queried_) return;
1943 Isolate* isolate = Isolate::Current();
1944 const GCType major_gc_type = kGCTypeMarkSweepCompact;
1945 // Record objects that are joined into ObjectGroups.
1946 isolate->heap()->CallGCPrologueCallbacks(major_gc_type);
1947 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
1948 for (int i = 0; i < groups->length(); ++i) {
1949 ObjectGroup* group = groups->at(i);
1950 if (group->info_ == NULL) continue;
1951 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
1952 for (size_t j = 0; j < group->length_; ++j) {
1953 HeapObject* obj = HeapObject::cast(*group->objects_[j]);
1954 list->Add(obj);
1955 in_groups_.Insert(obj);
1956 }
1957 group->info_ = NULL; // Acquire info object ownership.
1958 }
1959 isolate->global_handles()->RemoveObjectGroups();
1960 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type);
1961 // Record objects that are not in ObjectGroups, but have class ID.
1962 GlobalHandlesExtractor extractor(this);
1963 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
1964 embedder_queried_ = true;
1965}
1966
1967void NativeObjectsExplorer::FillImplicitReferences() {
1968 Isolate* isolate = Isolate::Current();
1969 List<ImplicitRefGroup*>* groups =
1970 isolate->global_handles()->implicit_ref_groups();
1971 for (int i = 0; i < groups->length(); ++i) {
1972 ImplicitRefGroup* group = groups->at(i);
1973 HeapObject* parent = *group->parent_;
1974 int parent_entry =
1975 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
1976 ASSERT(parent_entry != HeapEntry::kNoEntry);
1977 Object*** children = group->children_;
1978 for (size_t j = 0; j < group->length_; ++j) {
1979 Object* child = *children[j];
1980 HeapEntry* child_entry =
1981 filler_->FindOrAddEntry(child, native_entries_allocator_);
1982 filler_->SetNamedReference(
1983 HeapGraphEdge::kInternal,
1984 parent_entry,
1985 "native",
1986 child_entry);
1987 }
1988 }
1989 isolate->global_handles()->RemoveImplicitRefGroups();
1990}
1991
1992List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
1993 v8::RetainedObjectInfo* info) {
1994 HashMap::Entry* entry =
1995 objects_by_info_.Lookup(info, InfoHash(info), true);
1996 if (entry->value != NULL) {
1997 info->Dispose();
1998 } else {
1999 entry->value = new List<HeapObject*>(4);
2000 }
2001 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2002}
2003
2004
2005bool NativeObjectsExplorer::IterateAndExtractReferences(
2006 SnapshotFillerInterface* filler) {
2007 filler_ = filler;
2008 FillRetainedObjects();
2009 FillImplicitReferences();
2010 if (EstimateObjectsCount() > 0) {
2011 for (HashMap::Entry* p = objects_by_info_.Start();
2012 p != NULL;
2013 p = objects_by_info_.Next(p)) {
2014 v8::RetainedObjectInfo* info =
2015 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2016 SetNativeRootReference(info);
2017 List<HeapObject*>* objects =
2018 reinterpret_cast<List<HeapObject*>* >(p->value);
2019 for (int i = 0; i < objects->length(); ++i) {
2020 SetWrapperNativeReferences(objects->at(i), info);
2021 }
2022 }
2023 SetRootNativeRootsReference();
2024 }
2025 filler_ = NULL;
2026 return true;
2027}
2028
2029
2030class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2031 public:
2032 explicit NativeGroupRetainedObjectInfo(const char* label)
2033 : disposed_(false),
2034 hash_(reinterpret_cast<intptr_t>(label)),
2035 label_(label) {
2036 }
2037
2038 virtual ~NativeGroupRetainedObjectInfo() {}
2039 virtual void Dispose() {
2040 CHECK(!disposed_);
2041 disposed_ = true;
2042 delete this;
2043 }
2044 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2045 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2046 }
2047 virtual intptr_t GetHash() { return hash_; }
2048 virtual const char* GetLabel() { return label_; }
2049
2050 private:
2051 bool disposed_;
2052 intptr_t hash_;
2053 const char* label_;
2054};
2055
2056
2057NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2058 const char* label) {
2059 const char* label_copy = collection_->names()->GetCopy(label);
2060 uint32_t hash = StringHasher::HashSequentialString(
2061 label_copy,
2062 static_cast<int>(strlen(label_copy)),
2063 HEAP->HashSeed());
2064 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2065 hash, true);
2066 if (entry->value == NULL) {
2067 entry->value = new NativeGroupRetainedObjectInfo(label);
2068 }
2069 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2070}
2071
2072
2073void NativeObjectsExplorer::SetNativeRootReference(
2074 v8::RetainedObjectInfo* info) {
2075 HeapEntry* child_entry =
2076 filler_->FindOrAddEntry(info, native_entries_allocator_);
2077 ASSERT(child_entry != NULL);
2078 NativeGroupRetainedObjectInfo* group_info =
2079 FindOrAddGroupInfo(info->GetGroupLabel());
2080 HeapEntry* group_entry =
2081 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2082 filler_->SetNamedAutoIndexReference(
2083 HeapGraphEdge::kInternal,
2084 group_entry->index(),
2085 child_entry);
2086}
2087
2088
2089void NativeObjectsExplorer::SetWrapperNativeReferences(
2090 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2091 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2092 ASSERT(wrapper_entry != NULL);
2093 HeapEntry* info_entry =
2094 filler_->FindOrAddEntry(info, native_entries_allocator_);
2095 ASSERT(info_entry != NULL);
2096 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2097 wrapper_entry->index(),
2098 "native",
2099 info_entry);
2100 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2101 info_entry->index(),
2102 wrapper_entry);
2103}
2104
2105
2106void NativeObjectsExplorer::SetRootNativeRootsReference() {
2107 for (HashMap::Entry* entry = native_groups_.Start();
2108 entry;
2109 entry = native_groups_.Next(entry)) {
2110 NativeGroupRetainedObjectInfo* group_info =
2111 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2112 HeapEntry* group_entry =
2113 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2114 ASSERT(group_entry != NULL);
2115 filler_->SetIndexedAutoIndexReference(
2116 HeapGraphEdge::kElement,
2117 snapshot_->root()->index(),
2118 group_entry);
2119 }
2120}
2121
2122
2123void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2124 if (in_groups_.Contains(*p)) return;
2125 Isolate* isolate = Isolate::Current();
2126 v8::RetainedObjectInfo* info =
2127 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2128 if (info == NULL) return;
2129 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2130}
2131
2132
2133class SnapshotFiller : public SnapshotFillerInterface {
2134 public:
2135 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2136 : snapshot_(snapshot),
2137 collection_(snapshot->collection()),
2138 entries_(entries) { }
2139 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2140 HeapEntry* entry = allocator->AllocateEntry(ptr);
2141 entries_->Pair(ptr, entry->index());
2142 return entry;
2143 }
2144 HeapEntry* FindEntry(HeapThing ptr) {
2145 int index = entries_->Map(ptr);
2146 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
2147 }
2148 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2149 HeapEntry* entry = FindEntry(ptr);
2150 return entry != NULL ? entry : AddEntry(ptr, allocator);
2151 }
2152 void SetIndexedReference(HeapGraphEdge::Type type,
2153 int parent,
2154 int index,
2155 HeapEntry* child_entry) {
2156 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2157 parent_entry->SetIndexedReference(type, index, child_entry);
2158 }
2159 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2160 int parent,
2161 HeapEntry* child_entry) {
2162 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2163 int index = parent_entry->children_count() + 1;
2164 parent_entry->SetIndexedReference(type, index, child_entry);
2165 }
2166 void SetNamedReference(HeapGraphEdge::Type type,
2167 int parent,
2168 const char* reference_name,
2169 HeapEntry* child_entry) {
2170 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2171 parent_entry->SetNamedReference(type, reference_name, child_entry);
2172 }
2173 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2174 int parent,
2175 HeapEntry* child_entry) {
2176 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2177 int index = parent_entry->children_count() + 1;
2178 parent_entry->SetNamedReference(
2179 type,
2180 collection_->names()->GetName(index),
2181 child_entry);
2182 }
2183
2184 private:
2185 HeapSnapshot* snapshot_;
2186 HeapSnapshotsCollection* collection_;
2187 HeapEntriesMap* entries_;
2188};
2189
2190
2191HeapSnapshotGenerator::HeapSnapshotGenerator(
2192 HeapSnapshot* snapshot,
2193 v8::ActivityControl* control,
2194 v8::HeapProfiler::ObjectNameResolver* resolver,
2195 Heap* heap)
2196 : snapshot_(snapshot),
2197 control_(control),
2198 v8_heap_explorer_(snapshot_, this, resolver),
2199 dom_explorer_(snapshot_, this),
2200 heap_(heap) {
2201}
2202
2203
2204bool HeapSnapshotGenerator::GenerateSnapshot() {
2205 v8_heap_explorer_.TagGlobalObjects();
2206
2207 // TODO(1562) Profiler assumes that any object that is in the heap after
2208 // full GC is reachable from the root when computing dominators.
2209 // This is not true for weakly reachable objects.
2210 // As a temporary solution we call GC twice.
2211 Isolate::Current()->heap()->CollectAllGarbage(
2212 Heap::kMakeHeapIterableMask,
2213 "HeapSnapshotGenerator::GenerateSnapshot");
2214 Isolate::Current()->heap()->CollectAllGarbage(
2215 Heap::kMakeHeapIterableMask,
2216 "HeapSnapshotGenerator::GenerateSnapshot");
2217
2218#ifdef VERIFY_HEAP
2219 Heap* debug_heap = Isolate::Current()->heap();
2220 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2221 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2222 CHECK(!debug_heap->code_space()->was_swept_conservatively());
2223 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2224 CHECK(!debug_heap->map_space()->was_swept_conservatively());
2225#endif
2226
2227 // The following code uses heap iterators, so we want the heap to be
2228 // stable. It should follow TagGlobalObjects as that can allocate.
2229 AssertNoAllocation no_alloc;
2230
2231#ifdef VERIFY_HEAP
2232 debug_heap->Verify();
2233#endif
2234
2235 SetProgressTotal(1); // 1 pass.
2236
2237#ifdef VERIFY_HEAP
2238 debug_heap->Verify();
2239#endif
2240
2241 if (!FillReferences()) return false;
2242
2243 snapshot_->FillChildren();
2244 snapshot_->RememberLastJSObjectId();
2245
2246 progress_counter_ = progress_total_;
2247 if (!ProgressReport(true)) return false;
2248 return true;
2249}
2250
2251
2252void HeapSnapshotGenerator::ProgressStep() {
2253 ++progress_counter_;
2254}
2255
2256
2257bool HeapSnapshotGenerator::ProgressReport(bool force) {
2258 const int kProgressReportGranularity = 10000;
2259 if (control_ != NULL
2260 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2261 return
2262 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2263 v8::ActivityControl::kContinue;
2264 }
2265 return true;
2266}
2267
2268
2269void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2270 if (control_ == NULL) return;
2271 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2272 progress_total_ = iterations_count * (
2273 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2274 dom_explorer_.EstimateObjectsCount());
2275 progress_counter_ = 0;
2276}
2277
2278
2279bool HeapSnapshotGenerator::FillReferences() {
2280 SnapshotFiller filler(snapshot_, &entries_);
2281 v8_heap_explorer_.AddRootEntries(&filler);
2282 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2283 && dom_explorer_.IterateAndExtractReferences(&filler);
2284}
2285
2286
2287template<int bytes> struct MaxDecimalDigitsIn;
2288template<> struct MaxDecimalDigitsIn<4> {
2289 static const int kSigned = 11;
2290 static const int kUnsigned = 10;
2291};
2292template<> struct MaxDecimalDigitsIn<8> {
2293 static const int kSigned = 20;
2294 static const int kUnsigned = 20;
2295};
2296
2297
2298class OutputStreamWriter {
2299 public:
2300 explicit OutputStreamWriter(v8::OutputStream* stream)
2301 : stream_(stream),
2302 chunk_size_(stream->GetChunkSize()),
2303 chunk_(chunk_size_),
2304 chunk_pos_(0),
2305 aborted_(false) {
2306 ASSERT(chunk_size_ > 0);
2307 }
2308 bool aborted() { return aborted_; }
2309 void AddCharacter(char c) {
2310 ASSERT(c != '\0');
2311 ASSERT(chunk_pos_ < chunk_size_);
2312 chunk_[chunk_pos_++] = c;
2313 MaybeWriteChunk();
2314 }
2315 void AddString(const char* s) {
2316 AddSubstring(s, StrLength(s));
2317 }
2318 void AddSubstring(const char* s, int n) {
2319 if (n <= 0) return;
2320 ASSERT(static_cast<size_t>(n) <= strlen(s));
2321 const char* s_end = s + n;
2322 while (s < s_end) {
2323 int s_chunk_size = Min(
2324 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2325 ASSERT(s_chunk_size > 0);
2326 memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2327 s += s_chunk_size;
2328 chunk_pos_ += s_chunk_size;
2329 MaybeWriteChunk();
2330 }
2331 }
2332 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2333 void Finalize() {
2334 if (aborted_) return;
2335 ASSERT(chunk_pos_ < chunk_size_);
2336 if (chunk_pos_ != 0) {
2337 WriteChunk();
2338 }
2339 stream_->EndOfStream();
2340 }
2341
2342 private:
2343 template<typename T>
2344 void AddNumberImpl(T n, const char* format) {
2345 // Buffer for the longest value plus trailing \0
2346 static const int kMaxNumberSize =
2347 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2348 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2349 int result = OS::SNPrintF(
2350 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2351 ASSERT(result != -1);
2352 chunk_pos_ += result;
2353 MaybeWriteChunk();
2354 } else {
2355 EmbeddedVector<char, kMaxNumberSize> buffer;
2356 int result = OS::SNPrintF(buffer, format, n);
2357 USE(result);
2358 ASSERT(result != -1);
2359 AddString(buffer.start());
2360 }
2361 }
2362 void MaybeWriteChunk() {
2363 ASSERT(chunk_pos_ <= chunk_size_);
2364 if (chunk_pos_ == chunk_size_) {
2365 WriteChunk();
2366 }
2367 }
2368 void WriteChunk() {
2369 if (aborted_) return;
2370 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2371 v8::OutputStream::kAbort) aborted_ = true;
2372 chunk_pos_ = 0;
2373 }
2374
2375 v8::OutputStream* stream_;
2376 int chunk_size_;
2377 ScopedVector<char> chunk_;
2378 int chunk_pos_;
2379 bool aborted_;
2380};
2381
2382
2383// type, name|index, to_node.
2384const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2385// type, name, id, self_size, children_index.
2386const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
2387
2388void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2389 ASSERT(writer_ == NULL);
2390 writer_ = new OutputStreamWriter(stream);
2391
2392 HeapSnapshot* original_snapshot = NULL;
2393 if (snapshot_->RawSnapshotSize() >=
2394 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
2395 // The snapshot is too big. Serialize a fake snapshot.
2396 original_snapshot = snapshot_;
2397 snapshot_ = CreateFakeSnapshot();
2398 }
2399
2400 SerializeImpl();
2401
2402 delete writer_;
2403 writer_ = NULL;
2404
2405 if (original_snapshot != NULL) {
2406 delete snapshot_;
2407 snapshot_ = original_snapshot;
2408 }
2409}
2410
2411
2412HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
2413 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
2414 HeapSnapshot::kFull,
2415 snapshot_->title(),
2416 snapshot_->uid());
2417 result->AddRootEntry();
2418 const char* text = snapshot_->collection()->names()->GetFormatted(
2419 "The snapshot is too big. "
2420 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
2421 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
2422 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
2423 (snapshot_->RawSnapshotSize() + MB - 1) / MB);
2424 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
2425 result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
2426 result->FillChildren();
2427 return result;
2428}
2429
2430
2431void HeapSnapshotJSONSerializer::SerializeImpl() {
2432 ASSERT(0 == snapshot_->root()->index());
2433 writer_->AddCharacter('{');
2434 writer_->AddString("\"snapshot\":{");
2435 SerializeSnapshot();
2436 if (writer_->aborted()) return;
2437 writer_->AddString("},\n");
2438 writer_->AddString("\"nodes\":[");
2439 SerializeNodes();
2440 if (writer_->aborted()) return;
2441 writer_->AddString("],\n");
2442 writer_->AddString("\"edges\":[");
2443 SerializeEdges();
2444 if (writer_->aborted()) return;
2445 writer_->AddString("],\n");
2446 writer_->AddString("\"strings\":[");
2447 SerializeStrings();
2448 if (writer_->aborted()) return;
2449 writer_->AddCharacter(']');
2450 writer_->AddCharacter('}');
2451 writer_->Finalize();
2452}
2453
2454
2455int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2456 HashMap::Entry* cache_entry = strings_.Lookup(
2457 const_cast<char*>(s), ObjectHash(s), true);
2458 if (cache_entry->value == NULL) {
2459 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2460 }
2461 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2462}
2463
2464
2465static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
2466 int number_of_digits = 0;
2467 unsigned t = value;
2468 do {
2469 ++number_of_digits;
2470 } while (t /= 10);
2471
2472 buffer_pos += number_of_digits;
2473 int result = buffer_pos;
2474 do {
2475 int last_digit = value % 10;
2476 buffer[--buffer_pos] = '0' + last_digit;
2477 value /= 10;
2478 } while (value);
2479 return result;
2480}
2481
2482
2483void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2484 bool first_edge) {
2485 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2486 static const int kBufferSize =
2487 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2488 EmbeddedVector<char, kBufferSize> buffer;
2489 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2490 || edge->type() == HeapGraphEdge::kHidden
2491 || edge->type() == HeapGraphEdge::kWeak
2492 ? edge->index() : GetStringId(edge->name());
2493 int buffer_pos = 0;
2494 if (!first_edge) {
2495 buffer[buffer_pos++] = ',';
2496 }
2497 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2498 buffer[buffer_pos++] = ',';
2499 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2500 buffer[buffer_pos++] = ',';
2501 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2502 buffer[buffer_pos++] = '\n';
2503 buffer[buffer_pos++] = '\0';
2504 writer_->AddString(buffer.start());
2505}
2506
2507
2508void HeapSnapshotJSONSerializer::SerializeEdges() {
2509 List<HeapGraphEdge*>& edges = snapshot_->children();
2510 for (int i = 0; i < edges.length(); ++i) {
2511 ASSERT(i == 0 ||
2512 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2513 SerializeEdge(edges[i], i == 0);
2514 if (writer_->aborted()) return;
2515 }
2516}
2517
2518
2519void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2520 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
2521 static const int kBufferSize =
2522 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2523 + 5 + 1 + 1;
2524 EmbeddedVector<char, kBufferSize> buffer;
2525 int buffer_pos = 0;
2526 if (entry_index(entry) != 0) {
2527 buffer[buffer_pos++] = ',';
2528 }
2529 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2530 buffer[buffer_pos++] = ',';
2531 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2532 buffer[buffer_pos++] = ',';
2533 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2534 buffer[buffer_pos++] = ',';
2535 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2536 buffer[buffer_pos++] = ',';
2537 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2538 buffer[buffer_pos++] = '\n';
2539 buffer[buffer_pos++] = '\0';
2540 writer_->AddString(buffer.start());
2541}
2542
2543
2544void HeapSnapshotJSONSerializer::SerializeNodes() {
2545 List<HeapEntry>& entries = snapshot_->entries();
2546 for (int i = 0; i < entries.length(); ++i) {
2547 SerializeNode(&entries[i]);
2548 if (writer_->aborted()) return;
2549 }
2550}
2551
2552
2553void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2554 writer_->AddString("\"title\":\"");
2555 writer_->AddString(snapshot_->title());
2556 writer_->AddString("\"");
2557 writer_->AddString(",\"uid\":");
2558 writer_->AddNumber(snapshot_->uid());
2559 writer_->AddString(",\"meta\":");
2560 // The object describing node serialization layout.
2561 // We use a set of macros to improve readability.
2562#define JSON_A(s) "[" s "]"
2563#define JSON_O(s) "{" s "}"
2564#define JSON_S(s) "\"" s "\""
2565 writer_->AddString(JSON_O(
2566 JSON_S("node_fields") ":" JSON_A(
2567 JSON_S("type") ","
2568 JSON_S("name") ","
2569 JSON_S("id") ","
2570 JSON_S("self_size") ","
2571 JSON_S("edge_count")) ","
2572 JSON_S("node_types") ":" JSON_A(
2573 JSON_A(
2574 JSON_S("hidden") ","
2575 JSON_S("array") ","
2576 JSON_S("string") ","
2577 JSON_S("object") ","
2578 JSON_S("code") ","
2579 JSON_S("closure") ","
2580 JSON_S("regexp") ","
2581 JSON_S("number") ","
2582 JSON_S("native") ","
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00002583 JSON_S("synthetic") ","
2584 JSON_S("context")) ","
ulan@chromium.org2e04b582013-02-21 14:06:02 +00002585 JSON_S("string") ","
2586 JSON_S("number") ","
2587 JSON_S("number") ","
2588 JSON_S("number") ","
2589 JSON_S("number") ","
2590 JSON_S("number")) ","
2591 JSON_S("edge_fields") ":" JSON_A(
2592 JSON_S("type") ","
2593 JSON_S("name_or_index") ","
2594 JSON_S("to_node")) ","
2595 JSON_S("edge_types") ":" JSON_A(
2596 JSON_A(
2597 JSON_S("context") ","
2598 JSON_S("element") ","
2599 JSON_S("property") ","
2600 JSON_S("internal") ","
2601 JSON_S("hidden") ","
2602 JSON_S("shortcut") ","
2603 JSON_S("weak")) ","
2604 JSON_S("string_or_number") ","
2605 JSON_S("node"))));
2606#undef JSON_S
2607#undef JSON_O
2608#undef JSON_A
2609 writer_->AddString(",\"node_count\":");
2610 writer_->AddNumber(snapshot_->entries().length());
2611 writer_->AddString(",\"edge_count\":");
2612 writer_->AddNumber(snapshot_->edges().length());
2613}
2614
2615
2616static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2617 static const char hex_chars[] = "0123456789ABCDEF";
2618 w->AddString("\\u");
2619 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2620 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2621 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2622 w->AddCharacter(hex_chars[u & 0xf]);
2623}
2624
2625void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2626 writer_->AddCharacter('\n');
2627 writer_->AddCharacter('\"');
2628 for ( ; *s != '\0'; ++s) {
2629 switch (*s) {
2630 case '\b':
2631 writer_->AddString("\\b");
2632 continue;
2633 case '\f':
2634 writer_->AddString("\\f");
2635 continue;
2636 case '\n':
2637 writer_->AddString("\\n");
2638 continue;
2639 case '\r':
2640 writer_->AddString("\\r");
2641 continue;
2642 case '\t':
2643 writer_->AddString("\\t");
2644 continue;
2645 case '\"':
2646 case '\\':
2647 writer_->AddCharacter('\\');
2648 writer_->AddCharacter(*s);
2649 continue;
2650 default:
2651 if (*s > 31 && *s < 128) {
2652 writer_->AddCharacter(*s);
2653 } else if (*s <= 31) {
2654 // Special character with no dedicated literal.
2655 WriteUChar(writer_, *s);
2656 } else {
2657 // Convert UTF-8 into \u UTF-16 literal.
2658 unsigned length = 1, cursor = 0;
2659 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2660 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2661 if (c != unibrow::Utf8::kBadChar) {
2662 WriteUChar(writer_, c);
2663 ASSERT(cursor != 0);
2664 s += cursor - 1;
2665 } else {
2666 writer_->AddCharacter('?');
2667 }
2668 }
2669 }
2670 }
2671 writer_->AddCharacter('\"');
2672}
2673
2674
2675void HeapSnapshotJSONSerializer::SerializeStrings() {
2676 List<HashMap::Entry*> sorted_strings;
2677 SortHashMap(&strings_, &sorted_strings);
2678 writer_->AddString("\"<dummy>\"");
2679 for (int i = 0; i < sorted_strings.length(); ++i) {
2680 writer_->AddCharacter(',');
2681 SerializeString(
2682 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
2683 if (writer_->aborted()) return;
2684 }
2685}
2686
2687
2688template<typename T>
2689inline static int SortUsingEntryValue(const T* x, const T* y) {
2690 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
2691 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
2692 if (x_uint > y_uint) {
2693 return 1;
2694 } else if (x_uint == y_uint) {
2695 return 0;
2696 } else {
2697 return -1;
2698 }
2699}
2700
2701
2702void HeapSnapshotJSONSerializer::SortHashMap(
2703 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
2704 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
2705 sorted_entries->Add(p);
2706 sorted_entries->Sort(SortUsingEntryValue);
2707}
2708
2709} } // namespace v8::internal