blob: 824e50793da84575242b0a1b3d560b8fb8d88000 [file] [log] [blame]
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "heap-snapshot-generator-inl.h"
31
32#include "heap-profiler.h"
33#include "debug.h"
34
35namespace v8 {
36namespace internal {
37
38
39HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
40 : type_(type),
41 from_index_(from),
42 to_index_(to),
43 name_(name) {
44 ASSERT(type == kContextVariable
45 || type == kProperty
46 || type == kInternal
47 || type == kShortcut);
48}
49
50
51HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
52 : type_(type),
53 from_index_(from),
54 to_index_(to),
55 index_(index) {
56 ASSERT(type == kElement || type == kHidden || type == kWeak);
57}
58
59
60void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
61 to_entry_ = &snapshot->entries()[to_index_];
62}
63
64
65const int HeapEntry::kNoEntry = -1;
66
67HeapEntry::HeapEntry(HeapSnapshot* snapshot,
68 Type type,
69 const char* name,
70 SnapshotObjectId id,
71 int self_size)
72 : type_(type),
73 children_count_(0),
74 children_index_(-1),
75 self_size_(self_size),
76 id_(id),
77 snapshot_(snapshot),
78 name_(name) { }
79
80
81void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
82 const char* name,
83 HeapEntry* entry) {
84 HeapGraphEdge edge(type, name, this->index(), entry->index());
85 snapshot_->edges().Add(edge);
86 ++children_count_;
87}
88
89
90void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
91 int index,
92 HeapEntry* entry) {
93 HeapGraphEdge edge(type, index, this->index(), entry->index());
94 snapshot_->edges().Add(edge);
95 ++children_count_;
96}
97
98
99Handle<HeapObject> HeapEntry::GetHeapObject() {
100 return snapshot_->collection()->FindHeapObjectById(id());
101}
102
103
104void HeapEntry::Print(
105 const char* prefix, const char* edge_name, int max_depth, int indent) {
106 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
107 OS::Print("%6d @%6u %*c %s%s: ",
108 self_size(), id(), indent, ' ', prefix, edge_name);
109 if (type() != kString) {
110 OS::Print("%s %.40s\n", TypeAsString(), name_);
111 } else {
112 OS::Print("\"");
113 const char* c = name_;
114 while (*c && (c - name_) <= 40) {
115 if (*c != '\n')
116 OS::Print("%c", *c);
117 else
118 OS::Print("\\n");
119 ++c;
120 }
121 OS::Print("\"\n");
122 }
123 if (--max_depth == 0) return;
124 Vector<HeapGraphEdge*> ch = children();
125 for (int i = 0; i < ch.length(); ++i) {
126 HeapGraphEdge& edge = *ch[i];
127 const char* edge_prefix = "";
128 EmbeddedVector<char, 64> index;
129 const char* edge_name = index.start();
130 switch (edge.type()) {
131 case HeapGraphEdge::kContextVariable:
132 edge_prefix = "#";
133 edge_name = edge.name();
134 break;
135 case HeapGraphEdge::kElement:
136 OS::SNPrintF(index, "%d", edge.index());
137 break;
138 case HeapGraphEdge::kInternal:
139 edge_prefix = "$";
140 edge_name = edge.name();
141 break;
142 case HeapGraphEdge::kProperty:
143 edge_name = edge.name();
144 break;
145 case HeapGraphEdge::kHidden:
146 edge_prefix = "$";
147 OS::SNPrintF(index, "%d", edge.index());
148 break;
149 case HeapGraphEdge::kShortcut:
150 edge_prefix = "^";
151 edge_name = edge.name();
152 break;
153 case HeapGraphEdge::kWeak:
154 edge_prefix = "w";
155 OS::SNPrintF(index, "%d", edge.index());
156 break;
157 default:
158 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
159 }
160 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
161 }
162}
163
164
165const char* HeapEntry::TypeAsString() {
166 switch (type()) {
167 case kHidden: return "/hidden/";
168 case kObject: return "/object/";
169 case kClosure: return "/closure/";
170 case kString: return "/string/";
171 case kCode: return "/code/";
172 case kArray: return "/array/";
173 case kRegExp: return "/regexp/";
174 case kHeapNumber: return "/number/";
175 case kNative: return "/native/";
176 case kSynthetic: return "/synthetic/";
177 default: return "???";
178 }
179}
180
181
182// It is very important to keep objects that form a heap snapshot
183// as small as possible.
184namespace { // Avoid littering the global namespace.
185
186template <size_t ptr_size> struct SnapshotSizeConstants;
187
188template <> struct SnapshotSizeConstants<4> {
189 static const int kExpectedHeapGraphEdgeSize = 12;
190 static const int kExpectedHeapEntrySize = 24;
191 static const int kExpectedHeapSnapshotsCollectionSize = 100;
192 static const int kExpectedHeapSnapshotSize = 136;
193 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
194};
195
196template <> struct SnapshotSizeConstants<8> {
197 static const int kExpectedHeapGraphEdgeSize = 24;
198 static const int kExpectedHeapEntrySize = 32;
199 static const int kExpectedHeapSnapshotsCollectionSize = 152;
200 static const int kExpectedHeapSnapshotSize = 168;
201 static const uint64_t kMaxSerializableSnapshotRawSize =
202 static_cast<uint64_t>(6000) * MB;
203};
204
205} // namespace
206
207HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
208 HeapSnapshot::Type type,
209 const char* title,
210 unsigned uid)
211 : collection_(collection),
212 type_(type),
213 title_(title),
214 uid_(uid),
215 root_index_(HeapEntry::kNoEntry),
216 gc_roots_index_(HeapEntry::kNoEntry),
217 natives_root_index_(HeapEntry::kNoEntry),
218 max_snapshot_js_object_id_(0) {
219 STATIC_CHECK(
220 sizeof(HeapGraphEdge) ==
221 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
222 STATIC_CHECK(
223 sizeof(HeapEntry) ==
224 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
225 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
226 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
227 }
228}
229
230
231void HeapSnapshot::Delete() {
232 collection_->RemoveSnapshot(this);
233 delete this;
234}
235
236
237void HeapSnapshot::RememberLastJSObjectId() {
238 max_snapshot_js_object_id_ = collection_->last_assigned_id();
239}
240
241
242HeapEntry* HeapSnapshot::AddRootEntry() {
243 ASSERT(root_index_ == HeapEntry::kNoEntry);
244 ASSERT(entries_.is_empty()); // Root entry must be the first one.
245 HeapEntry* entry = AddEntry(HeapEntry::kObject,
246 "",
247 HeapObjectsMap::kInternalRootObjectId,
248 0);
249 root_index_ = entry->index();
250 ASSERT(root_index_ == 0);
251 return entry;
252}
253
254
255HeapEntry* HeapSnapshot::AddGcRootsEntry() {
256 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
257 HeapEntry* entry = AddEntry(HeapEntry::kObject,
258 "(GC roots)",
259 HeapObjectsMap::kGcRootsObjectId,
260 0);
261 gc_roots_index_ = entry->index();
262 return entry;
263}
264
265
266HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
267 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
268 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
269 HeapEntry* entry = AddEntry(
270 HeapEntry::kObject,
271 VisitorSynchronization::kTagNames[tag],
272 HeapObjectsMap::GetNthGcSubrootId(tag),
273 0);
274 gc_subroot_indexes_[tag] = entry->index();
275 return entry;
276}
277
278
279HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
280 const char* name,
281 SnapshotObjectId id,
282 int size) {
283 HeapEntry entry(this, type, name, id, size);
284 entries_.Add(entry);
285 return &entries_.last();
286}
287
288
289void HeapSnapshot::FillChildren() {
290 ASSERT(children().is_empty());
291 children().Allocate(edges().length());
292 int children_index = 0;
293 for (int i = 0; i < entries().length(); ++i) {
294 HeapEntry* entry = &entries()[i];
295 children_index = entry->set_children_index(children_index);
296 }
297 ASSERT(edges().length() == children_index);
298 for (int i = 0; i < edges().length(); ++i) {
299 HeapGraphEdge* edge = &edges()[i];
300 edge->ReplaceToIndexWithEntry(this);
301 edge->from()->add_child(edge);
302 }
303}
304
305
306class FindEntryById {
307 public:
308 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
309 int operator()(HeapEntry* const* entry) {
310 if ((*entry)->id() == id_) return 0;
311 return (*entry)->id() < id_ ? -1 : 1;
312 }
313 private:
314 SnapshotObjectId id_;
315};
316
317
318HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
319 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
320 // Perform a binary search by id.
321 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
322 if (index == -1)
323 return NULL;
324 return entries_by_id->at(index);
325}
326
327
328template<class T>
329static int SortByIds(const T* entry1_ptr,
330 const T* entry2_ptr) {
331 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
332 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
333}
334
335
336List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
337 if (sorted_entries_.is_empty()) {
338 sorted_entries_.Allocate(entries_.length());
339 for (int i = 0; i < entries_.length(); ++i) {
340 sorted_entries_[i] = &entries_[i];
341 }
342 sorted_entries_.Sort(SortByIds);
343 }
344 return &sorted_entries_;
345}
346
347
348void HeapSnapshot::Print(int max_depth) {
349 root()->Print("", "", max_depth, 0);
350}
351
352
353template<typename T, class P>
354static size_t GetMemoryUsedByList(const List<T, P>& list) {
355 return list.length() * sizeof(T) + sizeof(list);
356}
357
358
359size_t HeapSnapshot::RawSnapshotSize() const {
360 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
361 sizeof(HeapSnapshot)); // NOLINT
362 return
363 sizeof(*this) +
364 GetMemoryUsedByList(entries_) +
365 GetMemoryUsedByList(edges_) +
366 GetMemoryUsedByList(children_) +
367 GetMemoryUsedByList(sorted_entries_);
368}
369
370
371// We split IDs on evens for embedder objects (see
372// HeapObjectsMap::GenerateId) and odds for native objects.
373const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
374const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
375 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
376const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
377 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
378const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
379 HeapObjectsMap::kGcRootsFirstSubrootId +
380 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
381
382HeapObjectsMap::HeapObjectsMap(Heap* heap)
383 : next_id_(kFirstAvailableObjectId),
384 entries_map_(AddressesMatch),
385 heap_(heap) {
386 // This dummy element solves a problem with entries_map_.
387 // When we do lookup in HashMap we see no difference between two cases:
388 // it has an entry with NULL as the value or it has created
389 // a new entry on the fly with NULL as the default value.
390 // With such dummy element we have a guaranty that all entries_map_ entries
391 // will have the value field grater than 0.
392 // This fact is using in MoveObject method.
393 entries_.Add(EntryInfo(0, NULL, 0));
394}
395
396
397void HeapObjectsMap::SnapshotGenerationFinished() {
398 RemoveDeadEntries();
399}
400
401
402void HeapObjectsMap::MoveObject(Address from, Address to) {
403 ASSERT(to != NULL);
404 ASSERT(from != NULL);
405 if (from == to) return;
406 void* from_value = entries_map_.Remove(from, AddressHash(from));
mstarzinger@chromium.org71fc3462013-02-27 09:34:27 +0000407 if (from_value == NULL) {
408 // It may occur that some untracked object moves to an address X and there
409 // is a tracked object at that address. In this case we should remove the
410 // entry as we know that the object has died.
411 void* to_value = entries_map_.Remove(to, AddressHash(to));
412 if (to_value != NULL) {
413 int to_entry_info_index =
414 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
415 entries_.at(to_entry_info_index).addr = NULL;
416 }
417 } else {
418 HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
419 if (to_entry->value != NULL) {
420 // We found the existing entry with to address for an old object.
421 // Without this operation we will have two EntryInfo's with the same
422 // value in addr field. It is bad because later at RemoveDeadEntries
423 // one of this entry will be removed with the corresponding entries_map_
424 // entry.
425 int to_entry_info_index =
426 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
427 entries_.at(to_entry_info_index).addr = NULL;
428 }
429 int from_entry_info_index =
430 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
431 entries_.at(from_entry_info_index).addr = to;
432 to_entry->value = from_value;
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000433 }
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000434}
435
436
437SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
438 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
439 if (entry == NULL) return 0;
440 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
441 EntryInfo& entry_info = entries_.at(entry_index);
442 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
443 return entry_info.id;
444}
445
446
447SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
448 unsigned int size) {
449 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
450 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
451 if (entry->value != NULL) {
452 int entry_index =
453 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
454 EntryInfo& entry_info = entries_.at(entry_index);
455 entry_info.accessed = true;
456 entry_info.size = size;
457 return entry_info.id;
458 }
459 entry->value = reinterpret_cast<void*>(entries_.length());
460 SnapshotObjectId id = next_id_;
461 next_id_ += kObjectIdStep;
462 entries_.Add(EntryInfo(id, addr, size));
463 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
464 return id;
465}
466
467
468void HeapObjectsMap::StopHeapObjectsTracking() {
469 time_intervals_.Clear();
470}
471
472void HeapObjectsMap::UpdateHeapObjectsMap() {
473 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
474 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
475 HeapIterator iterator(heap_);
476 for (HeapObject* obj = iterator.next();
477 obj != NULL;
478 obj = iterator.next()) {
479 FindOrAddEntry(obj->address(), obj->Size());
480 }
481 RemoveDeadEntries();
482}
483
484
485SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
486 UpdateHeapObjectsMap();
487 time_intervals_.Add(TimeInterval(next_id_));
488 int prefered_chunk_size = stream->GetChunkSize();
489 List<v8::HeapStatsUpdate> stats_buffer;
490 ASSERT(!entries_.is_empty());
491 EntryInfo* entry_info = &entries_.first();
492 EntryInfo* end_entry_info = &entries_.last() + 1;
493 for (int time_interval_index = 0;
494 time_interval_index < time_intervals_.length();
495 ++time_interval_index) {
496 TimeInterval& time_interval = time_intervals_[time_interval_index];
497 SnapshotObjectId time_interval_id = time_interval.id;
498 uint32_t entries_size = 0;
499 EntryInfo* start_entry_info = entry_info;
500 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
501 entries_size += entry_info->size;
502 ++entry_info;
503 }
504 uint32_t entries_count =
505 static_cast<uint32_t>(entry_info - start_entry_info);
506 if (time_interval.count != entries_count ||
507 time_interval.size != entries_size) {
508 stats_buffer.Add(v8::HeapStatsUpdate(
509 time_interval_index,
510 time_interval.count = entries_count,
511 time_interval.size = entries_size));
512 if (stats_buffer.length() >= prefered_chunk_size) {
513 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
514 &stats_buffer.first(), stats_buffer.length());
515 if (result == OutputStream::kAbort) return last_assigned_id();
516 stats_buffer.Clear();
517 }
518 }
519 }
520 ASSERT(entry_info == end_entry_info);
521 if (!stats_buffer.is_empty()) {
522 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
523 &stats_buffer.first(), stats_buffer.length());
524 if (result == OutputStream::kAbort) return last_assigned_id();
525 }
526 stream->EndOfStream();
527 return last_assigned_id();
528}
529
530
531void HeapObjectsMap::RemoveDeadEntries() {
532 ASSERT(entries_.length() > 0 &&
533 entries_.at(0).id == 0 &&
534 entries_.at(0).addr == NULL);
535 int first_free_entry = 1;
536 for (int i = 1; i < entries_.length(); ++i) {
537 EntryInfo& entry_info = entries_.at(i);
538 if (entry_info.accessed) {
539 if (first_free_entry != i) {
540 entries_.at(first_free_entry) = entry_info;
541 }
542 entries_.at(first_free_entry).accessed = false;
543 HashMap::Entry* entry = entries_map_.Lookup(
544 entry_info.addr, AddressHash(entry_info.addr), false);
545 ASSERT(entry);
546 entry->value = reinterpret_cast<void*>(first_free_entry);
547 ++first_free_entry;
548 } else {
549 if (entry_info.addr) {
550 entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
551 }
552 }
553 }
554 entries_.Rewind(first_free_entry);
555 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
556 entries_map_.occupancy());
557}
558
559
560SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
561 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
562 const char* label = info->GetLabel();
563 id ^= StringHasher::HashSequentialString(label,
564 static_cast<int>(strlen(label)),
565 HEAP->HashSeed());
566 intptr_t element_count = info->GetElementCount();
567 if (element_count != -1)
568 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
569 v8::internal::kZeroHashSeed);
570 return id << 1;
571}
572
573
574size_t HeapObjectsMap::GetUsedMemorySize() const {
575 return
576 sizeof(*this) +
577 sizeof(HashMap::Entry) * entries_map_.capacity() +
578 GetMemoryUsedByList(entries_) +
579 GetMemoryUsedByList(time_intervals_);
580}
581
582
583HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
584 : is_tracking_objects_(false),
585 snapshots_uids_(HeapSnapshotsMatch),
586 token_enumerator_(new TokenEnumerator()),
587 ids_(heap) {
588}
589
590
591static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
592 delete *snapshot_ptr;
593}
594
595
596HeapSnapshotsCollection::~HeapSnapshotsCollection() {
597 delete token_enumerator_;
598 snapshots_.Iterate(DeleteHeapSnapshot);
599}
600
601
602HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
603 const char* name,
604 unsigned uid) {
605 is_tracking_objects_ = true; // Start watching for heap objects moves.
606 return new HeapSnapshot(this, type, name, uid);
607}
608
609
610void HeapSnapshotsCollection::SnapshotGenerationFinished(
611 HeapSnapshot* snapshot) {
612 ids_.SnapshotGenerationFinished();
613 if (snapshot != NULL) {
614 snapshots_.Add(snapshot);
615 HashMap::Entry* entry =
616 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
617 static_cast<uint32_t>(snapshot->uid()),
618 true);
619 ASSERT(entry->value == NULL);
620 entry->value = snapshot;
621 }
622}
623
624
625HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
626 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
627 static_cast<uint32_t>(uid),
628 false);
629 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
630}
631
632
633void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
634 snapshots_.RemoveElement(snapshot);
635 unsigned uid = snapshot->uid();
636 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
637 static_cast<uint32_t>(uid));
638}
639
640
641Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
642 SnapshotObjectId id) {
643 // First perform a full GC in order to avoid dead objects.
644 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
645 "HeapSnapshotsCollection::FindHeapObjectById");
646 AssertNoAllocation no_allocation;
647 HeapObject* object = NULL;
648 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
649 // Make sure that object with the given id is still reachable.
650 for (HeapObject* obj = iterator.next();
651 obj != NULL;
652 obj = iterator.next()) {
653 if (ids_.FindEntry(obj->address()) == id) {
654 ASSERT(object == NULL);
655 object = obj;
656 // Can't break -- kFilterUnreachable requires full heap traversal.
657 }
658 }
659 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
660}
661
662
663size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
664 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
665 kExpectedHeapSnapshotsCollectionSize ==
666 sizeof(HeapSnapshotsCollection)); // NOLINT
667 size_t size = sizeof(*this);
668 size += names_.GetUsedMemorySize();
669 size += ids_.GetUsedMemorySize();
670 size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
671 size += GetMemoryUsedByList(snapshots_);
672 for (int i = 0; i < snapshots_.length(); ++i) {
673 size += snapshots_[i]->RawSnapshotSize();
674 }
675 return size;
676}
677
678
679HeapEntriesMap::HeapEntriesMap()
680 : entries_(HeapThingsMatch) {
681}
682
683
684int HeapEntriesMap::Map(HeapThing thing) {
685 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
686 if (cache_entry == NULL) return HeapEntry::kNoEntry;
687 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
688}
689
690
691void HeapEntriesMap::Pair(HeapThing thing, int entry) {
692 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
693 ASSERT(cache_entry->value == NULL);
694 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
695}
696
697
698HeapObjectsSet::HeapObjectsSet()
699 : entries_(HeapEntriesMap::HeapThingsMatch) {
700}
701
702
703void HeapObjectsSet::Clear() {
704 entries_.Clear();
705}
706
707
708bool HeapObjectsSet::Contains(Object* obj) {
709 if (!obj->IsHeapObject()) return false;
710 HeapObject* object = HeapObject::cast(obj);
711 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
712}
713
714
715void HeapObjectsSet::Insert(Object* obj) {
716 if (!obj->IsHeapObject()) return;
717 HeapObject* object = HeapObject::cast(obj);
718 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
719}
720
721
722const char* HeapObjectsSet::GetTag(Object* obj) {
723 HeapObject* object = HeapObject::cast(obj);
724 HashMap::Entry* cache_entry =
725 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
726 return cache_entry != NULL
727 ? reinterpret_cast<const char*>(cache_entry->value)
728 : NULL;
729}
730
731
732void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
733 if (!obj->IsHeapObject()) return;
734 HeapObject* object = HeapObject::cast(obj);
735 HashMap::Entry* cache_entry =
736 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
737 cache_entry->value = const_cast<char*>(tag);
738}
739
740
741HeapObject* const V8HeapExplorer::kInternalRootObject =
742 reinterpret_cast<HeapObject*>(
743 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
744HeapObject* const V8HeapExplorer::kGcRootsObject =
745 reinterpret_cast<HeapObject*>(
746 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
747HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
748 reinterpret_cast<HeapObject*>(
749 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
750HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
751 reinterpret_cast<HeapObject*>(
752 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
753
754
755V8HeapExplorer::V8HeapExplorer(
756 HeapSnapshot* snapshot,
757 SnapshottingProgressReportingInterface* progress,
758 v8::HeapProfiler::ObjectNameResolver* resolver)
759 : heap_(Isolate::Current()->heap()),
760 snapshot_(snapshot),
761 collection_(snapshot_->collection()),
762 progress_(progress),
763 filler_(NULL),
764 global_object_name_resolver_(resolver) {
765}
766
767
768V8HeapExplorer::~V8HeapExplorer() {
769}
770
771
772HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
773 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
774}
775
776
777HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
778 if (object == kInternalRootObject) {
779 snapshot_->AddRootEntry();
780 return snapshot_->root();
781 } else if (object == kGcRootsObject) {
782 HeapEntry* entry = snapshot_->AddGcRootsEntry();
783 return entry;
784 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
785 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
786 return entry;
787 } else if (object->IsJSFunction()) {
788 JSFunction* func = JSFunction::cast(object);
789 SharedFunctionInfo* shared = func->shared();
790 const char* name = shared->bound() ? "native_bind" :
791 collection_->names()->GetName(String::cast(shared->name()));
792 return AddEntry(object, HeapEntry::kClosure, name);
793 } else if (object->IsJSRegExp()) {
794 JSRegExp* re = JSRegExp::cast(object);
795 return AddEntry(object,
796 HeapEntry::kRegExp,
797 collection_->names()->GetName(re->Pattern()));
798 } else if (object->IsJSObject()) {
799 const char* name = collection_->names()->GetName(
800 GetConstructorName(JSObject::cast(object)));
801 if (object->IsJSGlobalObject()) {
802 const char* tag = objects_tags_.GetTag(object);
803 if (tag != NULL) {
804 name = collection_->names()->GetFormatted("%s / %s", name, tag);
805 }
806 }
807 return AddEntry(object, HeapEntry::kObject, name);
808 } else if (object->IsString()) {
809 return AddEntry(object,
810 HeapEntry::kString,
811 collection_->names()->GetName(String::cast(object)));
812 } else if (object->IsCode()) {
813 return AddEntry(object, HeapEntry::kCode, "");
814 } else if (object->IsSharedFunctionInfo()) {
815 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
816 return AddEntry(object,
817 HeapEntry::kCode,
818 collection_->names()->GetName(name));
819 } else if (object->IsScript()) {
820 Object* name = Script::cast(object)->name();
821 return AddEntry(object,
822 HeapEntry::kCode,
823 name->IsString()
824 ? collection_->names()->GetName(String::cast(name))
825 : "");
826 } else if (object->IsNativeContext()) {
827 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
828 } else if (object->IsContext()) {
svenpanne@chromium.org876cca82013-03-18 14:43:20 +0000829 return AddEntry(object, HeapEntry::kObject, "system / Context");
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000830 } else if (object->IsFixedArray() ||
831 object->IsFixedDoubleArray() ||
832 object->IsByteArray() ||
833 object->IsExternalArray()) {
834 return AddEntry(object, HeapEntry::kArray, "");
835 } else if (object->IsHeapNumber()) {
836 return AddEntry(object, HeapEntry::kHeapNumber, "number");
837 }
838 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
839}
840
841
842HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
843 HeapEntry::Type type,
844 const char* name) {
845 int object_size = object->Size();
846 SnapshotObjectId object_id =
847 collection_->GetObjectId(object->address(), object_size);
848 return snapshot_->AddEntry(type, name, object_id, object_size);
849}
850
851
852class GcSubrootsEnumerator : public ObjectVisitor {
853 public:
854 GcSubrootsEnumerator(
855 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
856 : filler_(filler),
857 explorer_(explorer),
858 previous_object_count_(0),
859 object_count_(0) {
860 }
861 void VisitPointers(Object** start, Object** end) {
862 object_count_ += end - start;
863 }
864 void Synchronize(VisitorSynchronization::SyncTag tag) {
865 // Skip empty subroots.
866 if (previous_object_count_ != object_count_) {
867 previous_object_count_ = object_count_;
868 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
869 }
870 }
871 private:
872 SnapshotFillerInterface* filler_;
873 V8HeapExplorer* explorer_;
874 intptr_t previous_object_count_;
875 intptr_t object_count_;
876};
877
878
879void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
880 filler->AddEntry(kInternalRootObject, this);
881 filler->AddEntry(kGcRootsObject, this);
882 GcSubrootsEnumerator enumerator(filler, this);
883 heap_->IterateRoots(&enumerator, VISIT_ALL);
884}
885
886
887const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
888 switch (object->map()->instance_type()) {
889 case MAP_TYPE:
890 switch (Map::cast(object)->instance_type()) {
891#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
892 case instance_type: return "system / Map (" #Name ")";
893 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
894#undef MAKE_STRING_MAP_CASE
895 default: return "system / Map";
896 }
897 case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
898 case FOREIGN_TYPE: return "system / Foreign";
899 case ODDBALL_TYPE: return "system / Oddball";
900#define MAKE_STRUCT_CASE(NAME, Name, name) \
901 case NAME##_TYPE: return "system / "#Name;
902 STRUCT_LIST(MAKE_STRUCT_CASE)
903#undef MAKE_STRUCT_CASE
904 default: return "system";
905 }
906}
907
908
909int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
910 int objects_count = 0;
911 for (HeapObject* obj = iterator->next();
912 obj != NULL;
913 obj = iterator->next()) {
914 objects_count++;
915 }
916 return objects_count;
917}
918
919
920class IndexedReferencesExtractor : public ObjectVisitor {
921 public:
922 IndexedReferencesExtractor(V8HeapExplorer* generator,
923 HeapObject* parent_obj,
924 int parent)
925 : generator_(generator),
926 parent_obj_(parent_obj),
927 parent_(parent),
928 next_index_(1) {
929 }
930 void VisitPointers(Object** start, Object** end) {
931 for (Object** p = start; p < end; p++) {
932 if (CheckVisitedAndUnmark(p)) continue;
933 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
934 }
935 }
936 static void MarkVisitedField(HeapObject* obj, int offset) {
937 if (offset < 0) return;
938 Address field = obj->address() + offset;
939 ASSERT(!Memory::Object_at(field)->IsFailure());
940 ASSERT(Memory::Object_at(field)->IsHeapObject());
941 *field |= kFailureTag;
942 }
943
944 private:
945 bool CheckVisitedAndUnmark(Object** field) {
946 if ((*field)->IsFailure()) {
947 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
948 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
949 ASSERT((*field)->IsHeapObject());
950 return true;
951 }
952 return false;
953 }
954 V8HeapExplorer* generator_;
955 HeapObject* parent_obj_;
956 int parent_;
957 int next_index_;
958};
959
960
961void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
962 HeapEntry* heap_entry = GetEntry(obj);
963 if (heap_entry == NULL) return; // No interest in this object.
964 int entry = heap_entry->index();
965
966 bool extract_indexed_refs = true;
967 if (obj->IsJSGlobalProxy()) {
968 ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
969 } else if (obj->IsJSObject()) {
970 ExtractJSObjectReferences(entry, JSObject::cast(obj));
971 } else if (obj->IsString()) {
972 ExtractStringReferences(entry, String::cast(obj));
973 } else if (obj->IsContext()) {
974 ExtractContextReferences(entry, Context::cast(obj));
975 } else if (obj->IsMap()) {
976 ExtractMapReferences(entry, Map::cast(obj));
977 } else if (obj->IsSharedFunctionInfo()) {
978 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
979 } else if (obj->IsScript()) {
980 ExtractScriptReferences(entry, Script::cast(obj));
981 } else if (obj->IsCodeCache()) {
982 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
983 } else if (obj->IsCode()) {
984 ExtractCodeReferences(entry, Code::cast(obj));
985 } else if (obj->IsJSGlobalPropertyCell()) {
986 ExtractJSGlobalPropertyCellReferences(
987 entry, JSGlobalPropertyCell::cast(obj));
988 extract_indexed_refs = false;
989 }
990 if (extract_indexed_refs) {
991 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
992 IndexedReferencesExtractor refs_extractor(this, obj, entry);
993 obj->Iterate(&refs_extractor);
994 }
995}
996
997
998void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
999 // We need to reference JS global objects from snapshot's root.
1000 // We use JSGlobalProxy because this is what embedder (e.g. browser)
1001 // uses for the global object.
1002 Object* object = proxy->map()->prototype();
1003 bool is_debug_object = false;
1004#ifdef ENABLE_DEBUGGER_SUPPORT
1005 is_debug_object = object->IsGlobalObject() &&
1006 Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1007#endif
1008 if (!is_debug_object) {
1009 SetUserGlobalReference(object);
1010 }
1011}
1012
1013
1014void V8HeapExplorer::ExtractJSObjectReferences(
1015 int entry, JSObject* js_obj) {
1016 HeapObject* obj = js_obj;
1017 ExtractClosureReferences(js_obj, entry);
1018 ExtractPropertyReferences(js_obj, entry);
1019 ExtractElementReferences(js_obj, entry);
1020 ExtractInternalReferences(js_obj, entry);
1021 SetPropertyReference(
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001022 obj, entry, heap_->proto_string(), js_obj->GetPrototype());
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001023 if (obj->IsJSFunction()) {
1024 JSFunction* js_fun = JSFunction::cast(js_obj);
1025 Object* proto_or_map = js_fun->prototype_or_initial_map();
1026 if (!proto_or_map->IsTheHole()) {
1027 if (!proto_or_map->IsMap()) {
1028 SetPropertyReference(
1029 obj, entry,
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001030 heap_->prototype_string(), proto_or_map,
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001031 NULL,
1032 JSFunction::kPrototypeOrInitialMapOffset);
1033 } else {
1034 SetPropertyReference(
1035 obj, entry,
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001036 heap_->prototype_string(), js_fun->prototype());
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001037 }
1038 }
1039 SharedFunctionInfo* shared_info = js_fun->shared();
1040 // JSFunction has either bindings or literals and never both.
1041 bool bound = shared_info->bound();
1042 TagObject(js_fun->literals_or_bindings(),
1043 bound ? "(function bindings)" : "(function literals)");
1044 SetInternalReference(js_fun, entry,
1045 bound ? "bindings" : "literals",
1046 js_fun->literals_or_bindings(),
1047 JSFunction::kLiteralsOffset);
1048 TagObject(shared_info, "(shared function info)");
1049 SetInternalReference(js_fun, entry,
1050 "shared", shared_info,
1051 JSFunction::kSharedFunctionInfoOffset);
1052 TagObject(js_fun->unchecked_context(), "(context)");
1053 SetInternalReference(js_fun, entry,
1054 "context", js_fun->unchecked_context(),
1055 JSFunction::kContextOffset);
1056 for (int i = JSFunction::kNonWeakFieldsEndOffset;
1057 i < JSFunction::kSize;
1058 i += kPointerSize) {
1059 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1060 }
1061 } else if (obj->IsGlobalObject()) {
1062 GlobalObject* global_obj = GlobalObject::cast(obj);
1063 SetInternalReference(global_obj, entry,
1064 "builtins", global_obj->builtins(),
1065 GlobalObject::kBuiltinsOffset);
1066 SetInternalReference(global_obj, entry,
1067 "native_context", global_obj->native_context(),
1068 GlobalObject::kNativeContextOffset);
1069 SetInternalReference(global_obj, entry,
1070 "global_receiver", global_obj->global_receiver(),
1071 GlobalObject::kGlobalReceiverOffset);
1072 }
1073 TagObject(js_obj->properties(), "(object properties)");
1074 SetInternalReference(obj, entry,
1075 "properties", js_obj->properties(),
1076 JSObject::kPropertiesOffset);
1077 TagObject(js_obj->elements(), "(object elements)");
1078 SetInternalReference(obj, entry,
1079 "elements", js_obj->elements(),
1080 JSObject::kElementsOffset);
1081}
1082
1083
1084void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1085 if (string->IsConsString()) {
1086 ConsString* cs = ConsString::cast(string);
1087 SetInternalReference(cs, entry, "first", cs->first(),
1088 ConsString::kFirstOffset);
1089 SetInternalReference(cs, entry, "second", cs->second(),
1090 ConsString::kSecondOffset);
1091 } else if (string->IsSlicedString()) {
1092 SlicedString* ss = SlicedString::cast(string);
1093 SetInternalReference(ss, entry, "parent", ss->parent(),
1094 SlicedString::kParentOffset);
1095 }
1096}
1097
1098
1099void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001100 if (context == context->declaration_context()) {
1101 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1102 // Add context allocated locals.
1103 int context_locals = scope_info->ContextLocalCount();
1104 for (int i = 0; i < context_locals; ++i) {
1105 String* local_name = scope_info->ContextLocalName(i);
1106 int idx = Context::MIN_CONTEXT_SLOTS + i;
1107 SetContextReference(context, entry, local_name, context->get(idx),
1108 Context::OffsetOfElementAt(idx));
1109 }
1110 if (scope_info->HasFunctionName()) {
1111 String* name = scope_info->FunctionName();
1112 VariableMode mode;
1113 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1114 if (idx >= 0) {
1115 SetContextReference(context, entry, name, context->get(idx),
1116 Context::OffsetOfElementAt(idx));
1117 }
1118 }
1119 }
1120
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001121#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1122 SetInternalReference(context, entry, #name, context->get(Context::index), \
1123 FixedArray::OffsetOfElementAt(Context::index));
1124 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1125 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1126 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1127 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1128 if (context->IsNativeContext()) {
1129 TagObject(context->jsfunction_result_caches(),
1130 "(context func. result caches)");
1131 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1132 TagObject(context->runtime_context(), "(runtime context)");
1133 TagObject(context->embedder_data(), "(context data)");
1134 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1135#undef EXTRACT_CONTEXT_FIELD
1136 for (int i = Context::FIRST_WEAK_SLOT;
1137 i < Context::NATIVE_CONTEXT_SLOTS;
1138 ++i) {
1139 SetWeakReference(context, entry, i, context->get(i),
1140 FixedArray::OffsetOfElementAt(i));
1141 }
1142 }
1143}
1144
1145
1146void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1147 SetInternalReference(map, entry,
1148 "prototype", map->prototype(), Map::kPrototypeOffset);
1149 SetInternalReference(map, entry,
1150 "constructor", map->constructor(),
1151 Map::kConstructorOffset);
1152 if (map->HasTransitionArray()) {
1153 TransitionArray* transitions = map->transitions();
1154
1155 Object* back_pointer = transitions->back_pointer_storage();
1156 TagObject(transitions->back_pointer_storage(), "(back pointer)");
1157 SetInternalReference(transitions, entry,
1158 "backpointer", back_pointer,
1159 TransitionArray::kBackPointerStorageOffset);
1160 IndexedReferencesExtractor transitions_refs(this, transitions, entry);
1161 transitions->Iterate(&transitions_refs);
1162
1163 TagObject(transitions, "(transition array)");
1164 SetInternalReference(map, entry,
1165 "transitions", transitions,
1166 Map::kTransitionsOrBackPointerOffset);
1167 } else {
1168 Object* back_pointer = map->GetBackPointer();
1169 TagObject(back_pointer, "(back pointer)");
1170 SetInternalReference(map, entry,
1171 "backpointer", back_pointer,
1172 Map::kTransitionsOrBackPointerOffset);
1173 }
1174 DescriptorArray* descriptors = map->instance_descriptors();
1175 TagObject(descriptors, "(map descriptors)");
1176 SetInternalReference(map, entry,
1177 "descriptors", descriptors,
1178 Map::kDescriptorsOffset);
1179
1180 SetInternalReference(map, entry,
1181 "code_cache", map->code_cache(),
1182 Map::kCodeCacheOffset);
1183}
1184
1185
1186void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1187 int entry, SharedFunctionInfo* shared) {
1188 HeapObject* obj = shared;
1189 SetInternalReference(obj, entry,
1190 "name", shared->name(),
1191 SharedFunctionInfo::kNameOffset);
1192 TagObject(shared->code(), "(code)");
1193 SetInternalReference(obj, entry,
1194 "code", shared->code(),
1195 SharedFunctionInfo::kCodeOffset);
1196 TagObject(shared->scope_info(), "(function scope info)");
1197 SetInternalReference(obj, entry,
1198 "scope_info", shared->scope_info(),
1199 SharedFunctionInfo::kScopeInfoOffset);
1200 SetInternalReference(obj, entry,
1201 "instance_class_name", shared->instance_class_name(),
1202 SharedFunctionInfo::kInstanceClassNameOffset);
1203 SetInternalReference(obj, entry,
1204 "script", shared->script(),
1205 SharedFunctionInfo::kScriptOffset);
1206 TagObject(shared->construct_stub(), "(code)");
1207 SetInternalReference(obj, entry,
1208 "construct_stub", shared->construct_stub(),
1209 SharedFunctionInfo::kConstructStubOffset);
1210 SetInternalReference(obj, entry,
1211 "function_data", shared->function_data(),
1212 SharedFunctionInfo::kFunctionDataOffset);
1213 SetInternalReference(obj, entry,
1214 "debug_info", shared->debug_info(),
1215 SharedFunctionInfo::kDebugInfoOffset);
1216 SetInternalReference(obj, entry,
1217 "inferred_name", shared->inferred_name(),
1218 SharedFunctionInfo::kInferredNameOffset);
1219 SetInternalReference(obj, entry,
1220 "this_property_assignments",
1221 shared->this_property_assignments(),
1222 SharedFunctionInfo::kThisPropertyAssignmentsOffset);
1223 SetWeakReference(obj, entry,
1224 1, shared->initial_map(),
1225 SharedFunctionInfo::kInitialMapOffset);
1226}
1227
1228
1229void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1230 HeapObject* obj = script;
1231 SetInternalReference(obj, entry,
1232 "source", script->source(),
1233 Script::kSourceOffset);
1234 SetInternalReference(obj, entry,
1235 "name", script->name(),
1236 Script::kNameOffset);
1237 SetInternalReference(obj, entry,
1238 "data", script->data(),
1239 Script::kDataOffset);
1240 SetInternalReference(obj, entry,
1241 "context_data", script->context_data(),
1242 Script::kContextOffset);
1243 TagObject(script->line_ends(), "(script line ends)");
1244 SetInternalReference(obj, entry,
1245 "line_ends", script->line_ends(),
1246 Script::kLineEndsOffset);
1247}
1248
1249
1250void V8HeapExplorer::ExtractCodeCacheReferences(
1251 int entry, CodeCache* code_cache) {
1252 TagObject(code_cache->default_cache(), "(default code cache)");
1253 SetInternalReference(code_cache, entry,
1254 "default_cache", code_cache->default_cache(),
1255 CodeCache::kDefaultCacheOffset);
1256 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1257 SetInternalReference(code_cache, entry,
1258 "type_cache", code_cache->normal_type_cache(),
1259 CodeCache::kNormalTypeCacheOffset);
1260}
1261
1262
1263void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1264 TagObject(code->relocation_info(), "(code relocation info)");
1265 SetInternalReference(code, entry,
1266 "relocation_info", code->relocation_info(),
1267 Code::kRelocationInfoOffset);
1268 SetInternalReference(code, entry,
1269 "handler_table", code->handler_table(),
1270 Code::kHandlerTableOffset);
1271 TagObject(code->deoptimization_data(), "(code deopt data)");
1272 SetInternalReference(code, entry,
1273 "deoptimization_data", code->deoptimization_data(),
1274 Code::kDeoptimizationDataOffset);
1275 if (code->kind() == Code::FUNCTION) {
1276 SetInternalReference(code, entry,
1277 "type_feedback_info", code->type_feedback_info(),
1278 Code::kTypeFeedbackInfoOffset);
1279 }
1280 SetInternalReference(code, entry,
1281 "gc_metadata", code->gc_metadata(),
1282 Code::kGCMetadataOffset);
1283}
1284
1285
1286void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
1287 int entry, JSGlobalPropertyCell* cell) {
1288 SetInternalReference(cell, entry, "value", cell->value());
1289}
1290
1291
1292void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1293 if (!js_obj->IsJSFunction()) return;
1294
1295 JSFunction* func = JSFunction::cast(js_obj);
1296 if (func->shared()->bound()) {
1297 FixedArray* bindings = func->function_bindings();
1298 SetNativeBindReference(js_obj, entry, "bound_this",
1299 bindings->get(JSFunction::kBoundThisIndex));
1300 SetNativeBindReference(js_obj, entry, "bound_function",
1301 bindings->get(JSFunction::kBoundFunctionIndex));
1302 for (int i = JSFunction::kBoundArgumentsStartIndex;
1303 i < bindings->length(); i++) {
1304 const char* reference_name = collection_->names()->GetFormatted(
1305 "bound_argument_%d",
1306 i - JSFunction::kBoundArgumentsStartIndex);
1307 SetNativeBindReference(js_obj, entry, reference_name,
1308 bindings->get(i));
1309 }
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001310 }
1311}
1312
1313
1314void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1315 if (js_obj->HasFastProperties()) {
1316 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1317 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1318 for (int i = 0; i < descs->number_of_descriptors(); i++) {
1319 if (descs->GetDetails(i).descriptor_index() > real_size) continue;
1320 switch (descs->GetType(i)) {
1321 case FIELD: {
1322 int index = descs->GetFieldIndex(i);
1323
ulan@chromium.org750145a2013-03-07 15:14:13 +00001324 Name* k = descs->GetKey(i);
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001325 if (index < js_obj->map()->inobject_properties()) {
1326 Object* value = js_obj->InObjectPropertyAt(index);
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001327 if (k != heap_->hidden_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001328 SetPropertyReference(
1329 js_obj, entry,
1330 k, value,
1331 NULL,
1332 js_obj->GetInObjectPropertyOffset(index));
1333 } else {
1334 TagObject(value, "(hidden properties)");
1335 SetInternalReference(
1336 js_obj, entry,
1337 "hidden_properties", value,
1338 js_obj->GetInObjectPropertyOffset(index));
1339 }
1340 } else {
1341 Object* value = js_obj->FastPropertyAt(index);
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001342 if (k != heap_->hidden_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001343 SetPropertyReference(js_obj, entry, k, value);
1344 } else {
1345 TagObject(value, "(hidden properties)");
1346 SetInternalReference(js_obj, entry, "hidden_properties", value);
1347 }
1348 }
1349 break;
1350 }
1351 case CONSTANT_FUNCTION:
1352 SetPropertyReference(
1353 js_obj, entry,
1354 descs->GetKey(i), descs->GetConstantFunction(i));
1355 break;
1356 case CALLBACKS: {
1357 Object* callback_obj = descs->GetValue(i);
1358 if (callback_obj->IsAccessorPair()) {
1359 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1360 if (Object* getter = accessors->getter()) {
1361 SetPropertyReference(js_obj, entry, descs->GetKey(i),
1362 getter, "get-%s");
1363 }
1364 if (Object* setter = accessors->setter()) {
1365 SetPropertyReference(js_obj, entry, descs->GetKey(i),
1366 setter, "set-%s");
1367 }
1368 }
1369 break;
1370 }
1371 case NORMAL: // only in slow mode
1372 case HANDLER: // only in lookup results, not in descriptors
1373 case INTERCEPTOR: // only in lookup results, not in descriptors
1374 break;
1375 case TRANSITION:
1376 case NONEXISTENT:
1377 UNREACHABLE();
1378 break;
1379 }
1380 }
1381 } else {
ulan@chromium.org750145a2013-03-07 15:14:13 +00001382 NameDictionary* dictionary = js_obj->property_dictionary();
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001383 int length = dictionary->Capacity();
1384 for (int i = 0; i < length; ++i) {
1385 Object* k = dictionary->KeyAt(i);
1386 if (dictionary->IsKey(k)) {
1387 Object* target = dictionary->ValueAt(i);
1388 // We assume that global objects can only have slow properties.
1389 Object* value = target->IsJSGlobalPropertyCell()
1390 ? JSGlobalPropertyCell::cast(target)->value()
1391 : target;
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001392 if (k != heap_->hidden_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001393 SetPropertyReference(js_obj, entry, String::cast(k), value);
1394 } else {
1395 TagObject(value, "(hidden properties)");
1396 SetInternalReference(js_obj, entry, "hidden_properties", value);
1397 }
1398 }
1399 }
1400 }
1401}
1402
1403
1404void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1405 if (js_obj->HasFastObjectElements()) {
1406 FixedArray* elements = FixedArray::cast(js_obj->elements());
1407 int length = js_obj->IsJSArray() ?
1408 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1409 elements->length();
1410 for (int i = 0; i < length; ++i) {
1411 if (!elements->get(i)->IsTheHole()) {
1412 SetElementReference(js_obj, entry, i, elements->get(i));
1413 }
1414 }
1415 } else if (js_obj->HasDictionaryElements()) {
1416 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1417 int length = dictionary->Capacity();
1418 for (int i = 0; i < length; ++i) {
1419 Object* k = dictionary->KeyAt(i);
1420 if (dictionary->IsKey(k)) {
1421 ASSERT(k->IsNumber());
1422 uint32_t index = static_cast<uint32_t>(k->Number());
1423 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1424 }
1425 }
1426 }
1427}
1428
1429
1430void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1431 int length = js_obj->GetInternalFieldCount();
1432 for (int i = 0; i < length; ++i) {
1433 Object* o = js_obj->GetInternalField(i);
1434 SetInternalReference(
1435 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1436 }
1437}
1438
1439
1440String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1441 Heap* heap = object->GetHeap();
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001442 if (object->IsJSFunction()) return heap->closure_string();
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001443 String* constructor_name = object->constructor_name();
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001444 if (constructor_name == heap->Object_string()) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001445 // Look up an immediate "constructor" property, if it is a function,
1446 // return its name. This is for instances of binding objects, which
1447 // have prototype constructor type "Object".
1448 Object* constructor_prop = NULL;
1449 LookupResult result(heap->isolate());
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001450 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001451 if (!result.IsFound()) return object->constructor_name();
1452
1453 constructor_prop = result.GetLazyValue();
1454 if (constructor_prop->IsJSFunction()) {
1455 Object* maybe_name =
1456 JSFunction::cast(constructor_prop)->shared()->name();
1457 if (maybe_name->IsString()) {
1458 String* name = String::cast(maybe_name);
1459 if (name->length() > 0) return name;
1460 }
1461 }
1462 }
1463 return object->constructor_name();
1464}
1465
1466
1467HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1468 if (!obj->IsHeapObject()) return NULL;
1469 return filler_->FindOrAddEntry(obj, this);
1470}
1471
1472
1473class RootsReferencesExtractor : public ObjectVisitor {
1474 private:
1475 struct IndexTag {
1476 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1477 : index(index), tag(tag) { }
1478 int index;
1479 VisitorSynchronization::SyncTag tag;
1480 };
1481
1482 public:
1483 RootsReferencesExtractor()
1484 : collecting_all_references_(false),
1485 previous_reference_count_(0) {
1486 }
1487
1488 void VisitPointers(Object** start, Object** end) {
1489 if (collecting_all_references_) {
1490 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1491 } else {
1492 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1493 }
1494 }
1495
1496 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1497
1498 void FillReferences(V8HeapExplorer* explorer) {
1499 ASSERT(strong_references_.length() <= all_references_.length());
1500 for (int i = 0; i < reference_tags_.length(); ++i) {
1501 explorer->SetGcRootsReference(reference_tags_[i].tag);
1502 }
1503 int strong_index = 0, all_index = 0, tags_index = 0;
1504 while (all_index < all_references_.length()) {
1505 if (strong_index < strong_references_.length() &&
1506 strong_references_[strong_index] == all_references_[all_index]) {
1507 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1508 false,
1509 all_references_[all_index++]);
1510 ++strong_index;
1511 } else {
1512 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1513 true,
1514 all_references_[all_index++]);
1515 }
1516 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1517 }
1518 }
1519
1520 void Synchronize(VisitorSynchronization::SyncTag tag) {
1521 if (collecting_all_references_ &&
1522 previous_reference_count_ != all_references_.length()) {
1523 previous_reference_count_ = all_references_.length();
1524 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1525 }
1526 }
1527
1528 private:
1529 bool collecting_all_references_;
1530 List<Object*> strong_references_;
1531 List<Object*> all_references_;
1532 int previous_reference_count_;
1533 List<IndexTag> reference_tags_;
1534};
1535
1536
1537bool V8HeapExplorer::IterateAndExtractReferences(
1538 SnapshotFillerInterface* filler) {
1539 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1540
1541 filler_ = filler;
1542 bool interrupted = false;
1543
1544 // Heap iteration with filtering must be finished in any case.
1545 for (HeapObject* obj = iterator.next();
1546 obj != NULL;
1547 obj = iterator.next(), progress_->ProgressStep()) {
1548 if (!interrupted) {
1549 ExtractReferences(obj);
1550 if (!progress_->ProgressReport(false)) interrupted = true;
1551 }
1552 }
1553 if (interrupted) {
1554 filler_ = NULL;
1555 return false;
1556 }
1557
1558 SetRootGcRootsReference();
1559 RootsReferencesExtractor extractor;
1560 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1561 extractor.SetCollectingAllReferences();
1562 heap_->IterateRoots(&extractor, VISIT_ALL);
1563 extractor.FillReferences(this);
1564 filler_ = NULL;
1565 return progress_->ProgressReport(true);
1566}
1567
1568
1569bool V8HeapExplorer::IsEssentialObject(Object* object) {
1570 return object->IsHeapObject()
1571 && !object->IsOddball()
1572 && object != heap_->empty_byte_array()
1573 && object != heap_->empty_fixed_array()
1574 && object != heap_->empty_descriptor_array()
1575 && object != heap_->fixed_array_map()
1576 && object != heap_->global_property_cell_map()
1577 && object != heap_->shared_function_info_map()
1578 && object != heap_->free_space_map()
1579 && object != heap_->one_pointer_filler_map()
1580 && object != heap_->two_pointer_filler_map();
1581}
1582
1583
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001584void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001585 int parent_entry,
1586 String* reference_name,
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001587 Object* child_obj,
1588 int field_offset) {
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001589 HeapEntry* child_entry = GetEntry(child_obj);
1590 if (child_entry != NULL) {
1591 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1592 parent_entry,
1593 collection_->names()->GetName(reference_name),
1594 child_entry);
svenpanne@chromium.org2bda5432013-03-15 12:39:50 +00001595 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001596 }
1597}
1598
1599
1600void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1601 int parent_entry,
1602 const char* reference_name,
1603 Object* child_obj) {
1604 HeapEntry* child_entry = GetEntry(child_obj);
1605 if (child_entry != NULL) {
1606 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1607 parent_entry,
1608 reference_name,
1609 child_entry);
1610 }
1611}
1612
1613
1614void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1615 int parent_entry,
1616 int index,
1617 Object* child_obj) {
1618 HeapEntry* child_entry = GetEntry(child_obj);
1619 if (child_entry != NULL) {
1620 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1621 parent_entry,
1622 index,
1623 child_entry);
1624 }
1625}
1626
1627
1628void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1629 int parent_entry,
1630 const char* reference_name,
1631 Object* child_obj,
1632 int field_offset) {
1633 HeapEntry* child_entry = GetEntry(child_obj);
1634 if (child_entry == NULL) return;
1635 if (IsEssentialObject(child_obj)) {
1636 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1637 parent_entry,
1638 reference_name,
1639 child_entry);
1640 }
1641 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1642}
1643
1644
1645void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1646 int parent_entry,
1647 int index,
1648 Object* child_obj,
1649 int field_offset) {
1650 HeapEntry* child_entry = GetEntry(child_obj);
1651 if (child_entry == NULL) return;
1652 if (IsEssentialObject(child_obj)) {
1653 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1654 parent_entry,
1655 collection_->names()->GetName(index),
1656 child_entry);
1657 }
1658 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1659}
1660
1661
1662void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1663 int parent_entry,
1664 int index,
1665 Object* child_obj) {
1666 HeapEntry* child_entry = GetEntry(child_obj);
1667 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1668 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1669 parent_entry,
1670 index,
1671 child_entry);
1672 }
1673}
1674
1675
1676void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1677 int parent_entry,
1678 int index,
1679 Object* child_obj,
1680 int field_offset) {
1681 HeapEntry* child_entry = GetEntry(child_obj);
1682 if (child_entry != NULL) {
1683 filler_->SetIndexedReference(HeapGraphEdge::kWeak,
1684 parent_entry,
1685 index,
1686 child_entry);
1687 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1688 }
1689}
1690
1691
1692void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1693 int parent_entry,
ulan@chromium.org750145a2013-03-07 15:14:13 +00001694 Name* reference_name,
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001695 Object* child_obj,
1696 const char* name_format_string,
1697 int field_offset) {
1698 HeapEntry* child_entry = GetEntry(child_obj);
1699 if (child_entry != NULL) {
ulan@chromium.org750145a2013-03-07 15:14:13 +00001700 HeapGraphEdge::Type type =
1701 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
1702 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
1703 const char* name = name_format_string != NULL && reference_name->IsString()
1704 ? collection_->names()->GetFormatted(
1705 name_format_string,
1706 *String::cast(reference_name)->ToCString(
1707 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)) :
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001708 collection_->names()->GetName(reference_name);
1709
1710 filler_->SetNamedReference(type,
1711 parent_entry,
1712 name,
1713 child_entry);
1714 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1715 }
1716}
1717
1718
1719void V8HeapExplorer::SetRootGcRootsReference() {
1720 filler_->SetIndexedAutoIndexReference(
1721 HeapGraphEdge::kElement,
1722 snapshot_->root()->index(),
1723 snapshot_->gc_roots());
1724}
1725
1726
1727void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1728 HeapEntry* child_entry = GetEntry(child_obj);
1729 ASSERT(child_entry != NULL);
1730 filler_->SetNamedAutoIndexReference(
1731 HeapGraphEdge::kShortcut,
1732 snapshot_->root()->index(),
1733 child_entry);
1734}
1735
1736
1737void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
1738 filler_->SetIndexedAutoIndexReference(
1739 HeapGraphEdge::kElement,
1740 snapshot_->gc_roots()->index(),
1741 snapshot_->gc_subroot(tag));
1742}
1743
1744
1745void V8HeapExplorer::SetGcSubrootReference(
1746 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
1747 HeapEntry* child_entry = GetEntry(child_obj);
1748 if (child_entry != NULL) {
1749 const char* name = GetStrongGcSubrootName(child_obj);
1750 if (name != NULL) {
1751 filler_->SetNamedReference(
1752 HeapGraphEdge::kInternal,
1753 snapshot_->gc_subroot(tag)->index(),
1754 name,
1755 child_entry);
1756 } else {
1757 filler_->SetIndexedAutoIndexReference(
1758 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
1759 snapshot_->gc_subroot(tag)->index(),
1760 child_entry);
1761 }
1762 }
1763}
1764
1765
1766const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1767 if (strong_gc_subroot_names_.is_empty()) {
1768#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
1769#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
1770 STRONG_ROOT_LIST(ROOT_NAME)
1771#undef ROOT_NAME
1772#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
1773 STRUCT_LIST(STRUCT_MAP_NAME)
1774#undef STRUCT_MAP_NAME
yangguo@chromium.org4a9f6552013-03-04 14:46:33 +00001775#define STRING_NAME(name, str) NAME_ENTRY(name)
1776 INTERNALIZED_STRING_LIST(STRING_NAME)
1777#undef STRING_NAME
ulan@chromium.org2e04b582013-02-21 14:06:02 +00001778#undef NAME_ENTRY
1779 CHECK(!strong_gc_subroot_names_.is_empty());
1780 }
1781 return strong_gc_subroot_names_.GetTag(object);
1782}
1783
1784
1785void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
1786 if (IsEssentialObject(obj)) {
1787 HeapEntry* entry = GetEntry(obj);
1788 if (entry->name()[0] == '\0') {
1789 entry->set_name(tag);
1790 }
1791 }
1792}
1793
1794
1795class GlobalObjectsEnumerator : public ObjectVisitor {
1796 public:
1797 virtual void VisitPointers(Object** start, Object** end) {
1798 for (Object** p = start; p < end; p++) {
1799 if ((*p)->IsNativeContext()) {
1800 Context* context = Context::cast(*p);
1801 JSObject* proxy = context->global_proxy();
1802 if (proxy->IsJSGlobalProxy()) {
1803 Object* global = proxy->map()->prototype();
1804 if (global->IsJSGlobalObject()) {
1805 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
1806 }
1807 }
1808 }
1809 }
1810 }
1811 int count() { return objects_.length(); }
1812 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
1813
1814 private:
1815 List<Handle<JSGlobalObject> > objects_;
1816};
1817
1818
1819// Modifies heap. Must not be run during heap traversal.
1820void V8HeapExplorer::TagGlobalObjects() {
1821 Isolate* isolate = Isolate::Current();
1822 HandleScope scope(isolate);
1823 GlobalObjectsEnumerator enumerator;
1824 isolate->global_handles()->IterateAllRoots(&enumerator);
1825 const char** urls = NewArray<const char*>(enumerator.count());
1826 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1827 if (global_object_name_resolver_) {
1828 HandleScope scope(isolate);
1829 Handle<JSGlobalObject> global_obj = enumerator.at(i);
1830 urls[i] = global_object_name_resolver_->GetName(
1831 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
1832 } else {
1833 urls[i] = NULL;
1834 }
1835 }
1836
1837 AssertNoAllocation no_allocation;
1838 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1839 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
1840 }
1841
1842 DeleteArray(urls);
1843}
1844
1845
1846class GlobalHandlesExtractor : public ObjectVisitor {
1847 public:
1848 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
1849 : explorer_(explorer) {}
1850 virtual ~GlobalHandlesExtractor() {}
1851 virtual void VisitPointers(Object** start, Object** end) {
1852 UNREACHABLE();
1853 }
1854 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
1855 explorer_->VisitSubtreeWrapper(p, class_id);
1856 }
1857 private:
1858 NativeObjectsExplorer* explorer_;
1859};
1860
1861
1862class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
1863 public:
1864 BasicHeapEntriesAllocator(
1865 HeapSnapshot* snapshot,
1866 HeapEntry::Type entries_type)
1867 : snapshot_(snapshot),
1868 collection_(snapshot_->collection()),
1869 entries_type_(entries_type) {
1870 }
1871 virtual HeapEntry* AllocateEntry(HeapThing ptr);
1872 private:
1873 HeapSnapshot* snapshot_;
1874 HeapSnapshotsCollection* collection_;
1875 HeapEntry::Type entries_type_;
1876};
1877
1878
1879HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
1880 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
1881 intptr_t elements = info->GetElementCount();
1882 intptr_t size = info->GetSizeInBytes();
1883 const char* name = elements != -1
1884 ? collection_->names()->GetFormatted(
1885 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
1886 : collection_->names()->GetCopy(info->GetLabel());
1887 return snapshot_->AddEntry(
1888 entries_type_,
1889 name,
1890 HeapObjectsMap::GenerateId(info),
1891 size != -1 ? static_cast<int>(size) : 0);
1892}
1893
1894
1895NativeObjectsExplorer::NativeObjectsExplorer(
1896 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
1897 : snapshot_(snapshot),
1898 collection_(snapshot_->collection()),
1899 progress_(progress),
1900 embedder_queried_(false),
1901 objects_by_info_(RetainedInfosMatch),
1902 native_groups_(StringsMatch),
1903 filler_(NULL) {
1904 synthetic_entries_allocator_ =
1905 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
1906 native_entries_allocator_ =
1907 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
1908}
1909
1910
1911NativeObjectsExplorer::~NativeObjectsExplorer() {
1912 for (HashMap::Entry* p = objects_by_info_.Start();
1913 p != NULL;
1914 p = objects_by_info_.Next(p)) {
1915 v8::RetainedObjectInfo* info =
1916 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
1917 info->Dispose();
1918 List<HeapObject*>* objects =
1919 reinterpret_cast<List<HeapObject*>* >(p->value);
1920 delete objects;
1921 }
1922 for (HashMap::Entry* p = native_groups_.Start();
1923 p != NULL;
1924 p = native_groups_.Next(p)) {
1925 v8::RetainedObjectInfo* info =
1926 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
1927 info->Dispose();
1928 }
1929 delete synthetic_entries_allocator_;
1930 delete native_entries_allocator_;
1931}
1932
1933
1934int NativeObjectsExplorer::EstimateObjectsCount() {
1935 FillRetainedObjects();
1936 return objects_by_info_.occupancy();
1937}
1938
1939
1940void NativeObjectsExplorer::FillRetainedObjects() {
1941 if (embedder_queried_) return;
1942 Isolate* isolate = Isolate::Current();
1943 const GCType major_gc_type = kGCTypeMarkSweepCompact;
1944 // Record objects that are joined into ObjectGroups.
1945 isolate->heap()->CallGCPrologueCallbacks(major_gc_type);
1946 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
1947 for (int i = 0; i < groups->length(); ++i) {
1948 ObjectGroup* group = groups->at(i);
1949 if (group->info_ == NULL) continue;
1950 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
1951 for (size_t j = 0; j < group->length_; ++j) {
1952 HeapObject* obj = HeapObject::cast(*group->objects_[j]);
1953 list->Add(obj);
1954 in_groups_.Insert(obj);
1955 }
1956 group->info_ = NULL; // Acquire info object ownership.
1957 }
1958 isolate->global_handles()->RemoveObjectGroups();
1959 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type);
1960 // Record objects that are not in ObjectGroups, but have class ID.
1961 GlobalHandlesExtractor extractor(this);
1962 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
1963 embedder_queried_ = true;
1964}
1965
1966void NativeObjectsExplorer::FillImplicitReferences() {
1967 Isolate* isolate = Isolate::Current();
1968 List<ImplicitRefGroup*>* groups =
1969 isolate->global_handles()->implicit_ref_groups();
1970 for (int i = 0; i < groups->length(); ++i) {
1971 ImplicitRefGroup* group = groups->at(i);
1972 HeapObject* parent = *group->parent_;
1973 int parent_entry =
1974 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
1975 ASSERT(parent_entry != HeapEntry::kNoEntry);
1976 Object*** children = group->children_;
1977 for (size_t j = 0; j < group->length_; ++j) {
1978 Object* child = *children[j];
1979 HeapEntry* child_entry =
1980 filler_->FindOrAddEntry(child, native_entries_allocator_);
1981 filler_->SetNamedReference(
1982 HeapGraphEdge::kInternal,
1983 parent_entry,
1984 "native",
1985 child_entry);
1986 }
1987 }
1988 isolate->global_handles()->RemoveImplicitRefGroups();
1989}
1990
1991List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
1992 v8::RetainedObjectInfo* info) {
1993 HashMap::Entry* entry =
1994 objects_by_info_.Lookup(info, InfoHash(info), true);
1995 if (entry->value != NULL) {
1996 info->Dispose();
1997 } else {
1998 entry->value = new List<HeapObject*>(4);
1999 }
2000 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2001}
2002
2003
2004bool NativeObjectsExplorer::IterateAndExtractReferences(
2005 SnapshotFillerInterface* filler) {
2006 filler_ = filler;
2007 FillRetainedObjects();
2008 FillImplicitReferences();
2009 if (EstimateObjectsCount() > 0) {
2010 for (HashMap::Entry* p = objects_by_info_.Start();
2011 p != NULL;
2012 p = objects_by_info_.Next(p)) {
2013 v8::RetainedObjectInfo* info =
2014 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2015 SetNativeRootReference(info);
2016 List<HeapObject*>* objects =
2017 reinterpret_cast<List<HeapObject*>* >(p->value);
2018 for (int i = 0; i < objects->length(); ++i) {
2019 SetWrapperNativeReferences(objects->at(i), info);
2020 }
2021 }
2022 SetRootNativeRootsReference();
2023 }
2024 filler_ = NULL;
2025 return true;
2026}
2027
2028
2029class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2030 public:
2031 explicit NativeGroupRetainedObjectInfo(const char* label)
2032 : disposed_(false),
2033 hash_(reinterpret_cast<intptr_t>(label)),
2034 label_(label) {
2035 }
2036
2037 virtual ~NativeGroupRetainedObjectInfo() {}
2038 virtual void Dispose() {
2039 CHECK(!disposed_);
2040 disposed_ = true;
2041 delete this;
2042 }
2043 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2044 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2045 }
2046 virtual intptr_t GetHash() { return hash_; }
2047 virtual const char* GetLabel() { return label_; }
2048
2049 private:
2050 bool disposed_;
2051 intptr_t hash_;
2052 const char* label_;
2053};
2054
2055
2056NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2057 const char* label) {
2058 const char* label_copy = collection_->names()->GetCopy(label);
2059 uint32_t hash = StringHasher::HashSequentialString(
2060 label_copy,
2061 static_cast<int>(strlen(label_copy)),
2062 HEAP->HashSeed());
2063 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2064 hash, true);
2065 if (entry->value == NULL) {
2066 entry->value = new NativeGroupRetainedObjectInfo(label);
2067 }
2068 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2069}
2070
2071
2072void NativeObjectsExplorer::SetNativeRootReference(
2073 v8::RetainedObjectInfo* info) {
2074 HeapEntry* child_entry =
2075 filler_->FindOrAddEntry(info, native_entries_allocator_);
2076 ASSERT(child_entry != NULL);
2077 NativeGroupRetainedObjectInfo* group_info =
2078 FindOrAddGroupInfo(info->GetGroupLabel());
2079 HeapEntry* group_entry =
2080 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2081 filler_->SetNamedAutoIndexReference(
2082 HeapGraphEdge::kInternal,
2083 group_entry->index(),
2084 child_entry);
2085}
2086
2087
2088void NativeObjectsExplorer::SetWrapperNativeReferences(
2089 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2090 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2091 ASSERT(wrapper_entry != NULL);
2092 HeapEntry* info_entry =
2093 filler_->FindOrAddEntry(info, native_entries_allocator_);
2094 ASSERT(info_entry != NULL);
2095 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2096 wrapper_entry->index(),
2097 "native",
2098 info_entry);
2099 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2100 info_entry->index(),
2101 wrapper_entry);
2102}
2103
2104
2105void NativeObjectsExplorer::SetRootNativeRootsReference() {
2106 for (HashMap::Entry* entry = native_groups_.Start();
2107 entry;
2108 entry = native_groups_.Next(entry)) {
2109 NativeGroupRetainedObjectInfo* group_info =
2110 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2111 HeapEntry* group_entry =
2112 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2113 ASSERT(group_entry != NULL);
2114 filler_->SetIndexedAutoIndexReference(
2115 HeapGraphEdge::kElement,
2116 snapshot_->root()->index(),
2117 group_entry);
2118 }
2119}
2120
2121
2122void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2123 if (in_groups_.Contains(*p)) return;
2124 Isolate* isolate = Isolate::Current();
2125 v8::RetainedObjectInfo* info =
2126 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2127 if (info == NULL) return;
2128 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2129}
2130
2131
2132class SnapshotFiller : public SnapshotFillerInterface {
2133 public:
2134 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2135 : snapshot_(snapshot),
2136 collection_(snapshot->collection()),
2137 entries_(entries) { }
2138 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2139 HeapEntry* entry = allocator->AllocateEntry(ptr);
2140 entries_->Pair(ptr, entry->index());
2141 return entry;
2142 }
2143 HeapEntry* FindEntry(HeapThing ptr) {
2144 int index = entries_->Map(ptr);
2145 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
2146 }
2147 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2148 HeapEntry* entry = FindEntry(ptr);
2149 return entry != NULL ? entry : AddEntry(ptr, allocator);
2150 }
2151 void SetIndexedReference(HeapGraphEdge::Type type,
2152 int parent,
2153 int index,
2154 HeapEntry* child_entry) {
2155 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2156 parent_entry->SetIndexedReference(type, index, child_entry);
2157 }
2158 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2159 int parent,
2160 HeapEntry* child_entry) {
2161 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2162 int index = parent_entry->children_count() + 1;
2163 parent_entry->SetIndexedReference(type, index, child_entry);
2164 }
2165 void SetNamedReference(HeapGraphEdge::Type type,
2166 int parent,
2167 const char* reference_name,
2168 HeapEntry* child_entry) {
2169 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2170 parent_entry->SetNamedReference(type, reference_name, child_entry);
2171 }
2172 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2173 int parent,
2174 HeapEntry* child_entry) {
2175 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2176 int index = parent_entry->children_count() + 1;
2177 parent_entry->SetNamedReference(
2178 type,
2179 collection_->names()->GetName(index),
2180 child_entry);
2181 }
2182
2183 private:
2184 HeapSnapshot* snapshot_;
2185 HeapSnapshotsCollection* collection_;
2186 HeapEntriesMap* entries_;
2187};
2188
2189
2190HeapSnapshotGenerator::HeapSnapshotGenerator(
2191 HeapSnapshot* snapshot,
2192 v8::ActivityControl* control,
2193 v8::HeapProfiler::ObjectNameResolver* resolver,
2194 Heap* heap)
2195 : snapshot_(snapshot),
2196 control_(control),
2197 v8_heap_explorer_(snapshot_, this, resolver),
2198 dom_explorer_(snapshot_, this),
2199 heap_(heap) {
2200}
2201
2202
2203bool HeapSnapshotGenerator::GenerateSnapshot() {
2204 v8_heap_explorer_.TagGlobalObjects();
2205
2206 // TODO(1562) Profiler assumes that any object that is in the heap after
2207 // full GC is reachable from the root when computing dominators.
2208 // This is not true for weakly reachable objects.
2209 // As a temporary solution we call GC twice.
2210 Isolate::Current()->heap()->CollectAllGarbage(
2211 Heap::kMakeHeapIterableMask,
2212 "HeapSnapshotGenerator::GenerateSnapshot");
2213 Isolate::Current()->heap()->CollectAllGarbage(
2214 Heap::kMakeHeapIterableMask,
2215 "HeapSnapshotGenerator::GenerateSnapshot");
2216
2217#ifdef VERIFY_HEAP
2218 Heap* debug_heap = Isolate::Current()->heap();
2219 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2220 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2221 CHECK(!debug_heap->code_space()->was_swept_conservatively());
2222 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2223 CHECK(!debug_heap->map_space()->was_swept_conservatively());
2224#endif
2225
2226 // The following code uses heap iterators, so we want the heap to be
2227 // stable. It should follow TagGlobalObjects as that can allocate.
2228 AssertNoAllocation no_alloc;
2229
2230#ifdef VERIFY_HEAP
2231 debug_heap->Verify();
2232#endif
2233
2234 SetProgressTotal(1); // 1 pass.
2235
2236#ifdef VERIFY_HEAP
2237 debug_heap->Verify();
2238#endif
2239
2240 if (!FillReferences()) return false;
2241
2242 snapshot_->FillChildren();
2243 snapshot_->RememberLastJSObjectId();
2244
2245 progress_counter_ = progress_total_;
2246 if (!ProgressReport(true)) return false;
2247 return true;
2248}
2249
2250
2251void HeapSnapshotGenerator::ProgressStep() {
2252 ++progress_counter_;
2253}
2254
2255
2256bool HeapSnapshotGenerator::ProgressReport(bool force) {
2257 const int kProgressReportGranularity = 10000;
2258 if (control_ != NULL
2259 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2260 return
2261 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2262 v8::ActivityControl::kContinue;
2263 }
2264 return true;
2265}
2266
2267
2268void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2269 if (control_ == NULL) return;
2270 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2271 progress_total_ = iterations_count * (
2272 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2273 dom_explorer_.EstimateObjectsCount());
2274 progress_counter_ = 0;
2275}
2276
2277
2278bool HeapSnapshotGenerator::FillReferences() {
2279 SnapshotFiller filler(snapshot_, &entries_);
2280 v8_heap_explorer_.AddRootEntries(&filler);
2281 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2282 && dom_explorer_.IterateAndExtractReferences(&filler);
2283}
2284
2285
2286template<int bytes> struct MaxDecimalDigitsIn;
2287template<> struct MaxDecimalDigitsIn<4> {
2288 static const int kSigned = 11;
2289 static const int kUnsigned = 10;
2290};
2291template<> struct MaxDecimalDigitsIn<8> {
2292 static const int kSigned = 20;
2293 static const int kUnsigned = 20;
2294};
2295
2296
2297class OutputStreamWriter {
2298 public:
2299 explicit OutputStreamWriter(v8::OutputStream* stream)
2300 : stream_(stream),
2301 chunk_size_(stream->GetChunkSize()),
2302 chunk_(chunk_size_),
2303 chunk_pos_(0),
2304 aborted_(false) {
2305 ASSERT(chunk_size_ > 0);
2306 }
2307 bool aborted() { return aborted_; }
2308 void AddCharacter(char c) {
2309 ASSERT(c != '\0');
2310 ASSERT(chunk_pos_ < chunk_size_);
2311 chunk_[chunk_pos_++] = c;
2312 MaybeWriteChunk();
2313 }
2314 void AddString(const char* s) {
2315 AddSubstring(s, StrLength(s));
2316 }
2317 void AddSubstring(const char* s, int n) {
2318 if (n <= 0) return;
2319 ASSERT(static_cast<size_t>(n) <= strlen(s));
2320 const char* s_end = s + n;
2321 while (s < s_end) {
2322 int s_chunk_size = Min(
2323 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2324 ASSERT(s_chunk_size > 0);
2325 memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2326 s += s_chunk_size;
2327 chunk_pos_ += s_chunk_size;
2328 MaybeWriteChunk();
2329 }
2330 }
2331 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2332 void Finalize() {
2333 if (aborted_) return;
2334 ASSERT(chunk_pos_ < chunk_size_);
2335 if (chunk_pos_ != 0) {
2336 WriteChunk();
2337 }
2338 stream_->EndOfStream();
2339 }
2340
2341 private:
2342 template<typename T>
2343 void AddNumberImpl(T n, const char* format) {
2344 // Buffer for the longest value plus trailing \0
2345 static const int kMaxNumberSize =
2346 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2347 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2348 int result = OS::SNPrintF(
2349 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2350 ASSERT(result != -1);
2351 chunk_pos_ += result;
2352 MaybeWriteChunk();
2353 } else {
2354 EmbeddedVector<char, kMaxNumberSize> buffer;
2355 int result = OS::SNPrintF(buffer, format, n);
2356 USE(result);
2357 ASSERT(result != -1);
2358 AddString(buffer.start());
2359 }
2360 }
2361 void MaybeWriteChunk() {
2362 ASSERT(chunk_pos_ <= chunk_size_);
2363 if (chunk_pos_ == chunk_size_) {
2364 WriteChunk();
2365 }
2366 }
2367 void WriteChunk() {
2368 if (aborted_) return;
2369 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2370 v8::OutputStream::kAbort) aborted_ = true;
2371 chunk_pos_ = 0;
2372 }
2373
2374 v8::OutputStream* stream_;
2375 int chunk_size_;
2376 ScopedVector<char> chunk_;
2377 int chunk_pos_;
2378 bool aborted_;
2379};
2380
2381
2382// type, name|index, to_node.
2383const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2384// type, name, id, self_size, children_index.
2385const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
2386
2387void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2388 ASSERT(writer_ == NULL);
2389 writer_ = new OutputStreamWriter(stream);
2390
2391 HeapSnapshot* original_snapshot = NULL;
2392 if (snapshot_->RawSnapshotSize() >=
2393 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
2394 // The snapshot is too big. Serialize a fake snapshot.
2395 original_snapshot = snapshot_;
2396 snapshot_ = CreateFakeSnapshot();
2397 }
2398
2399 SerializeImpl();
2400
2401 delete writer_;
2402 writer_ = NULL;
2403
2404 if (original_snapshot != NULL) {
2405 delete snapshot_;
2406 snapshot_ = original_snapshot;
2407 }
2408}
2409
2410
2411HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
2412 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
2413 HeapSnapshot::kFull,
2414 snapshot_->title(),
2415 snapshot_->uid());
2416 result->AddRootEntry();
2417 const char* text = snapshot_->collection()->names()->GetFormatted(
2418 "The snapshot is too big. "
2419 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
2420 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
2421 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
2422 (snapshot_->RawSnapshotSize() + MB - 1) / MB);
2423 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
2424 result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
2425 result->FillChildren();
2426 return result;
2427}
2428
2429
2430void HeapSnapshotJSONSerializer::SerializeImpl() {
2431 ASSERT(0 == snapshot_->root()->index());
2432 writer_->AddCharacter('{');
2433 writer_->AddString("\"snapshot\":{");
2434 SerializeSnapshot();
2435 if (writer_->aborted()) return;
2436 writer_->AddString("},\n");
2437 writer_->AddString("\"nodes\":[");
2438 SerializeNodes();
2439 if (writer_->aborted()) return;
2440 writer_->AddString("],\n");
2441 writer_->AddString("\"edges\":[");
2442 SerializeEdges();
2443 if (writer_->aborted()) return;
2444 writer_->AddString("],\n");
2445 writer_->AddString("\"strings\":[");
2446 SerializeStrings();
2447 if (writer_->aborted()) return;
2448 writer_->AddCharacter(']');
2449 writer_->AddCharacter('}');
2450 writer_->Finalize();
2451}
2452
2453
2454int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2455 HashMap::Entry* cache_entry = strings_.Lookup(
2456 const_cast<char*>(s), ObjectHash(s), true);
2457 if (cache_entry->value == NULL) {
2458 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2459 }
2460 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2461}
2462
2463
2464static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
2465 int number_of_digits = 0;
2466 unsigned t = value;
2467 do {
2468 ++number_of_digits;
2469 } while (t /= 10);
2470
2471 buffer_pos += number_of_digits;
2472 int result = buffer_pos;
2473 do {
2474 int last_digit = value % 10;
2475 buffer[--buffer_pos] = '0' + last_digit;
2476 value /= 10;
2477 } while (value);
2478 return result;
2479}
2480
2481
2482void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2483 bool first_edge) {
2484 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2485 static const int kBufferSize =
2486 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2487 EmbeddedVector<char, kBufferSize> buffer;
2488 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2489 || edge->type() == HeapGraphEdge::kHidden
2490 || edge->type() == HeapGraphEdge::kWeak
2491 ? edge->index() : GetStringId(edge->name());
2492 int buffer_pos = 0;
2493 if (!first_edge) {
2494 buffer[buffer_pos++] = ',';
2495 }
2496 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2497 buffer[buffer_pos++] = ',';
2498 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2499 buffer[buffer_pos++] = ',';
2500 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2501 buffer[buffer_pos++] = '\n';
2502 buffer[buffer_pos++] = '\0';
2503 writer_->AddString(buffer.start());
2504}
2505
2506
2507void HeapSnapshotJSONSerializer::SerializeEdges() {
2508 List<HeapGraphEdge*>& edges = snapshot_->children();
2509 for (int i = 0; i < edges.length(); ++i) {
2510 ASSERT(i == 0 ||
2511 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2512 SerializeEdge(edges[i], i == 0);
2513 if (writer_->aborted()) return;
2514 }
2515}
2516
2517
2518void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2519 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
2520 static const int kBufferSize =
2521 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2522 + 5 + 1 + 1;
2523 EmbeddedVector<char, kBufferSize> buffer;
2524 int buffer_pos = 0;
2525 if (entry_index(entry) != 0) {
2526 buffer[buffer_pos++] = ',';
2527 }
2528 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2529 buffer[buffer_pos++] = ',';
2530 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2531 buffer[buffer_pos++] = ',';
2532 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2533 buffer[buffer_pos++] = ',';
2534 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2535 buffer[buffer_pos++] = ',';
2536 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2537 buffer[buffer_pos++] = '\n';
2538 buffer[buffer_pos++] = '\0';
2539 writer_->AddString(buffer.start());
2540}
2541
2542
2543void HeapSnapshotJSONSerializer::SerializeNodes() {
2544 List<HeapEntry>& entries = snapshot_->entries();
2545 for (int i = 0; i < entries.length(); ++i) {
2546 SerializeNode(&entries[i]);
2547 if (writer_->aborted()) return;
2548 }
2549}
2550
2551
2552void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2553 writer_->AddString("\"title\":\"");
2554 writer_->AddString(snapshot_->title());
2555 writer_->AddString("\"");
2556 writer_->AddString(",\"uid\":");
2557 writer_->AddNumber(snapshot_->uid());
2558 writer_->AddString(",\"meta\":");
2559 // The object describing node serialization layout.
2560 // We use a set of macros to improve readability.
2561#define JSON_A(s) "[" s "]"
2562#define JSON_O(s) "{" s "}"
2563#define JSON_S(s) "\"" s "\""
2564 writer_->AddString(JSON_O(
2565 JSON_S("node_fields") ":" JSON_A(
2566 JSON_S("type") ","
2567 JSON_S("name") ","
2568 JSON_S("id") ","
2569 JSON_S("self_size") ","
2570 JSON_S("edge_count")) ","
2571 JSON_S("node_types") ":" JSON_A(
2572 JSON_A(
2573 JSON_S("hidden") ","
2574 JSON_S("array") ","
2575 JSON_S("string") ","
2576 JSON_S("object") ","
2577 JSON_S("code") ","
2578 JSON_S("closure") ","
2579 JSON_S("regexp") ","
2580 JSON_S("number") ","
2581 JSON_S("native") ","
svenpanne@chromium.org876cca82013-03-18 14:43:20 +00002582 JSON_S("synthetic")) ","
ulan@chromium.org2e04b582013-02-21 14:06:02 +00002583 JSON_S("string") ","
2584 JSON_S("number") ","
2585 JSON_S("number") ","
2586 JSON_S("number") ","
2587 JSON_S("number") ","
2588 JSON_S("number")) ","
2589 JSON_S("edge_fields") ":" JSON_A(
2590 JSON_S("type") ","
2591 JSON_S("name_or_index") ","
2592 JSON_S("to_node")) ","
2593 JSON_S("edge_types") ":" JSON_A(
2594 JSON_A(
2595 JSON_S("context") ","
2596 JSON_S("element") ","
2597 JSON_S("property") ","
2598 JSON_S("internal") ","
2599 JSON_S("hidden") ","
2600 JSON_S("shortcut") ","
2601 JSON_S("weak")) ","
2602 JSON_S("string_or_number") ","
2603 JSON_S("node"))));
2604#undef JSON_S
2605#undef JSON_O
2606#undef JSON_A
2607 writer_->AddString(",\"node_count\":");
2608 writer_->AddNumber(snapshot_->entries().length());
2609 writer_->AddString(",\"edge_count\":");
2610 writer_->AddNumber(snapshot_->edges().length());
2611}
2612
2613
2614static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2615 static const char hex_chars[] = "0123456789ABCDEF";
2616 w->AddString("\\u");
2617 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2618 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2619 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2620 w->AddCharacter(hex_chars[u & 0xf]);
2621}
2622
2623void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2624 writer_->AddCharacter('\n');
2625 writer_->AddCharacter('\"');
2626 for ( ; *s != '\0'; ++s) {
2627 switch (*s) {
2628 case '\b':
2629 writer_->AddString("\\b");
2630 continue;
2631 case '\f':
2632 writer_->AddString("\\f");
2633 continue;
2634 case '\n':
2635 writer_->AddString("\\n");
2636 continue;
2637 case '\r':
2638 writer_->AddString("\\r");
2639 continue;
2640 case '\t':
2641 writer_->AddString("\\t");
2642 continue;
2643 case '\"':
2644 case '\\':
2645 writer_->AddCharacter('\\');
2646 writer_->AddCharacter(*s);
2647 continue;
2648 default:
2649 if (*s > 31 && *s < 128) {
2650 writer_->AddCharacter(*s);
2651 } else if (*s <= 31) {
2652 // Special character with no dedicated literal.
2653 WriteUChar(writer_, *s);
2654 } else {
2655 // Convert UTF-8 into \u UTF-16 literal.
2656 unsigned length = 1, cursor = 0;
2657 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2658 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2659 if (c != unibrow::Utf8::kBadChar) {
2660 WriteUChar(writer_, c);
2661 ASSERT(cursor != 0);
2662 s += cursor - 1;
2663 } else {
2664 writer_->AddCharacter('?');
2665 }
2666 }
2667 }
2668 }
2669 writer_->AddCharacter('\"');
2670}
2671
2672
2673void HeapSnapshotJSONSerializer::SerializeStrings() {
2674 List<HashMap::Entry*> sorted_strings;
2675 SortHashMap(&strings_, &sorted_strings);
2676 writer_->AddString("\"<dummy>\"");
2677 for (int i = 0; i < sorted_strings.length(); ++i) {
2678 writer_->AddCharacter(',');
2679 SerializeString(
2680 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
2681 if (writer_->aborted()) return;
2682 }
2683}
2684
2685
2686template<typename T>
2687inline static int SortUsingEntryValue(const T* x, const T* y) {
2688 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
2689 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
2690 if (x_uint > y_uint) {
2691 return 1;
2692 } else if (x_uint == y_uint) {
2693 return 0;
2694 } else {
2695 return -1;
2696 }
2697}
2698
2699
2700void HeapSnapshotJSONSerializer::SortHashMap(
2701 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
2702 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
2703 sorted_entries->Add(p);
2704 sorted_entries->Sort(SortUsingEntryValue);
2705}
2706
2707} } // namespace v8::internal