blob: fc43f9f47141e9a5448327d7e5578168c3a7b940 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/profiler/heap-snapshot-generator.h"
6
7#include "src/code-stubs.h"
8#include "src/conversions.h"
9#include "src/debug/debug.h"
10#include "src/objects-body-descriptors.h"
11#include "src/profiler/allocation-tracker.h"
12#include "src/profiler/heap-profiler.h"
13#include "src/profiler/heap-snapshot-generator-inl.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014
15namespace v8 {
16namespace internal {
17
18
19HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
20 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
21 to_index_(to),
22 name_(name) {
23 DCHECK(type == kContextVariable
24 || type == kProperty
25 || type == kInternal
26 || type == kShortcut
27 || type == kWeak);
28}
29
30
31HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
32 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
33 to_index_(to),
34 index_(index) {
35 DCHECK(type == kElement || type == kHidden);
36}
37
38
39void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
40 to_entry_ = &snapshot->entries()[to_index_];
41}
42
43
44const int HeapEntry::kNoEntry = -1;
45
46HeapEntry::HeapEntry(HeapSnapshot* snapshot,
47 Type type,
48 const char* name,
49 SnapshotObjectId id,
50 size_t self_size,
51 unsigned trace_node_id)
52 : type_(type),
53 children_count_(0),
54 children_index_(-1),
55 self_size_(self_size),
56 snapshot_(snapshot),
57 name_(name),
58 id_(id),
59 trace_node_id_(trace_node_id) { }
60
61
62void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
63 const char* name,
64 HeapEntry* entry) {
65 HeapGraphEdge edge(type, name, this->index(), entry->index());
66 snapshot_->edges().Add(edge);
67 ++children_count_;
68}
69
70
71void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
72 int index,
73 HeapEntry* entry) {
74 HeapGraphEdge edge(type, index, this->index(), entry->index());
75 snapshot_->edges().Add(edge);
76 ++children_count_;
77}
78
79
80void HeapEntry::Print(
81 const char* prefix, const char* edge_name, int max_depth, int indent) {
82 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
83 base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
84 ' ', prefix, edge_name);
85 if (type() != kString) {
86 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
87 } else {
88 base::OS::Print("\"");
89 const char* c = name_;
90 while (*c && (c - name_) <= 40) {
91 if (*c != '\n')
92 base::OS::Print("%c", *c);
93 else
94 base::OS::Print("\\n");
95 ++c;
96 }
97 base::OS::Print("\"\n");
98 }
99 if (--max_depth == 0) return;
100 Vector<HeapGraphEdge*> ch = children();
101 for (int i = 0; i < ch.length(); ++i) {
102 HeapGraphEdge& edge = *ch[i];
103 const char* edge_prefix = "";
104 EmbeddedVector<char, 64> index;
105 const char* edge_name = index.start();
106 switch (edge.type()) {
107 case HeapGraphEdge::kContextVariable:
108 edge_prefix = "#";
109 edge_name = edge.name();
110 break;
111 case HeapGraphEdge::kElement:
112 SNPrintF(index, "%d", edge.index());
113 break;
114 case HeapGraphEdge::kInternal:
115 edge_prefix = "$";
116 edge_name = edge.name();
117 break;
118 case HeapGraphEdge::kProperty:
119 edge_name = edge.name();
120 break;
121 case HeapGraphEdge::kHidden:
122 edge_prefix = "$";
123 SNPrintF(index, "%d", edge.index());
124 break;
125 case HeapGraphEdge::kShortcut:
126 edge_prefix = "^";
127 edge_name = edge.name();
128 break;
129 case HeapGraphEdge::kWeak:
130 edge_prefix = "w";
131 edge_name = edge.name();
132 break;
133 default:
134 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
135 }
136 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
137 }
138}
139
140
141const char* HeapEntry::TypeAsString() {
142 switch (type()) {
143 case kHidden: return "/hidden/";
144 case kObject: return "/object/";
145 case kClosure: return "/closure/";
146 case kString: return "/string/";
147 case kCode: return "/code/";
148 case kArray: return "/array/";
149 case kRegExp: return "/regexp/";
150 case kHeapNumber: return "/number/";
151 case kNative: return "/native/";
152 case kSynthetic: return "/synthetic/";
153 case kConsString: return "/concatenated string/";
154 case kSlicedString: return "/sliced string/";
155 case kSymbol: return "/symbol/";
156 case kSimdValue: return "/simd/";
157 default: return "???";
158 }
159}
160
161
162// It is very important to keep objects that form a heap snapshot
163// as small as possible.
164namespace { // Avoid littering the global namespace.
165
166template <size_t ptr_size> struct SnapshotSizeConstants;
167
168template <> struct SnapshotSizeConstants<4> {
169 static const int kExpectedHeapGraphEdgeSize = 12;
170 static const int kExpectedHeapEntrySize = 28;
171};
172
173template <> struct SnapshotSizeConstants<8> {
174 static const int kExpectedHeapGraphEdgeSize = 24;
175 static const int kExpectedHeapEntrySize = 40;
176};
177
178} // namespace
179
180
181HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
182 : profiler_(profiler),
183 root_index_(HeapEntry::kNoEntry),
184 gc_roots_index_(HeapEntry::kNoEntry),
185 max_snapshot_js_object_id_(0) {
186 STATIC_ASSERT(
187 sizeof(HeapGraphEdge) ==
188 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
189 STATIC_ASSERT(
190 sizeof(HeapEntry) ==
191 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
192 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
193 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
194 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
195 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
196 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
197 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
198 }
199}
200
201
202void HeapSnapshot::Delete() {
203 profiler_->RemoveSnapshot(this);
204 delete this;
205}
206
207
208void HeapSnapshot::RememberLastJSObjectId() {
209 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
210}
211
212
213void HeapSnapshot::AddSyntheticRootEntries() {
214 AddRootEntry();
215 AddGcRootsEntry();
216 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
217 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
218 AddGcSubrootEntry(tag, id);
219 id += HeapObjectsMap::kObjectIdStep;
220 }
221 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
222}
223
224
225HeapEntry* HeapSnapshot::AddRootEntry() {
226 DCHECK(root_index_ == HeapEntry::kNoEntry);
227 DCHECK(entries_.is_empty()); // Root entry must be the first one.
228 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
229 "",
230 HeapObjectsMap::kInternalRootObjectId,
231 0,
232 0);
233 root_index_ = entry->index();
234 DCHECK(root_index_ == 0);
235 return entry;
236}
237
238
239HeapEntry* HeapSnapshot::AddGcRootsEntry() {
240 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
241 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
242 "(GC roots)",
243 HeapObjectsMap::kGcRootsObjectId,
244 0,
245 0);
246 gc_roots_index_ = entry->index();
247 return entry;
248}
249
250
251HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
252 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
253 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
254 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
255 VisitorSynchronization::kTagNames[tag], id, 0, 0);
256 gc_subroot_indexes_[tag] = entry->index();
257 return entry;
258}
259
260
261HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
262 const char* name,
263 SnapshotObjectId id,
264 size_t size,
265 unsigned trace_node_id) {
266 HeapEntry entry(this, type, name, id, size, trace_node_id);
267 entries_.Add(entry);
268 return &entries_.last();
269}
270
271
272void HeapSnapshot::FillChildren() {
273 DCHECK(children().is_empty());
274 children().Allocate(edges().length());
275 int children_index = 0;
276 for (int i = 0; i < entries().length(); ++i) {
277 HeapEntry* entry = &entries()[i];
278 children_index = entry->set_children_index(children_index);
279 }
280 DCHECK(edges().length() == children_index);
281 for (int i = 0; i < edges().length(); ++i) {
282 HeapGraphEdge* edge = &edges()[i];
283 edge->ReplaceToIndexWithEntry(this);
284 edge->from()->add_child(edge);
285 }
286}
287
288
289class FindEntryById {
290 public:
291 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
292 int operator()(HeapEntry* const* entry) {
293 if ((*entry)->id() == id_) return 0;
294 return (*entry)->id() < id_ ? -1 : 1;
295 }
296 private:
297 SnapshotObjectId id_;
298};
299
300
301HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
302 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
303 // Perform a binary search by id.
304 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
305 if (index == -1)
306 return NULL;
307 return entries_by_id->at(index);
308}
309
310
311template<class T>
312static int SortByIds(const T* entry1_ptr,
313 const T* entry2_ptr) {
314 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
315 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
316}
317
318
319List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
320 if (sorted_entries_.is_empty()) {
321 sorted_entries_.Allocate(entries_.length());
322 for (int i = 0; i < entries_.length(); ++i) {
323 sorted_entries_[i] = &entries_[i];
324 }
325 sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
326 SortByIds);
327 }
328 return &sorted_entries_;
329}
330
331
332void HeapSnapshot::Print(int max_depth) {
333 root()->Print("", "", max_depth, 0);
334}
335
336
337size_t HeapSnapshot::RawSnapshotSize() const {
338 return
339 sizeof(*this) +
340 GetMemoryUsedByList(entries_) +
341 GetMemoryUsedByList(edges_) +
342 GetMemoryUsedByList(children_) +
343 GetMemoryUsedByList(sorted_entries_);
344}
345
346
347// We split IDs on evens for embedder objects (see
348// HeapObjectsMap::GenerateId) and odds for native objects.
349const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
350const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
351 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
352const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
353 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
354const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
355 HeapObjectsMap::kGcRootsFirstSubrootId +
356 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
357
358
359static bool AddressesMatch(void* key1, void* key2) {
360 return key1 == key2;
361}
362
363
364HeapObjectsMap::HeapObjectsMap(Heap* heap)
365 : next_id_(kFirstAvailableObjectId),
366 entries_map_(AddressesMatch),
367 heap_(heap) {
368 // This dummy element solves a problem with entries_map_.
369 // When we do lookup in HashMap we see no difference between two cases:
370 // it has an entry with NULL as the value or it has created
371 // a new entry on the fly with NULL as the default value.
372 // With such dummy element we have a guaranty that all entries_map_ entries
373 // will have the value field grater than 0.
374 // This fact is using in MoveObject method.
375 entries_.Add(EntryInfo(0, NULL, 0));
376}
377
378
379bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
380 DCHECK(to != NULL);
381 DCHECK(from != NULL);
382 if (from == to) return false;
383 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
384 if (from_value == NULL) {
385 // It may occur that some untracked object moves to an address X and there
386 // is a tracked object at that address. In this case we should remove the
387 // entry as we know that the object has died.
388 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
389 if (to_value != NULL) {
390 int to_entry_info_index =
391 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
392 entries_.at(to_entry_info_index).addr = NULL;
393 }
394 } else {
395 HashMap::Entry* to_entry =
396 entries_map_.LookupOrInsert(to, ComputePointerHash(to));
397 if (to_entry->value != NULL) {
398 // We found the existing entry with to address for an old object.
399 // Without this operation we will have two EntryInfo's with the same
400 // value in addr field. It is bad because later at RemoveDeadEntries
401 // one of this entry will be removed with the corresponding entries_map_
402 // entry.
403 int to_entry_info_index =
404 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
405 entries_.at(to_entry_info_index).addr = NULL;
406 }
407 int from_entry_info_index =
408 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
409 entries_.at(from_entry_info_index).addr = to;
410 // Size of an object can change during its life, so to keep information
411 // about the object in entries_ consistent, we have to adjust size when the
412 // object is migrated.
413 if (FLAG_heap_profiler_trace_objects) {
414 PrintF("Move object from %p to %p old size %6d new size %6d\n",
415 from,
416 to,
417 entries_.at(from_entry_info_index).size,
418 object_size);
419 }
420 entries_.at(from_entry_info_index).size = object_size;
421 to_entry->value = from_value;
422 }
423 return from_value != NULL;
424}
425
426
427void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
428 FindOrAddEntry(addr, size, false);
429}
430
431
432SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
433 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr));
434 if (entry == NULL) return 0;
435 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
436 EntryInfo& entry_info = entries_.at(entry_index);
437 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
438 return entry_info.id;
439}
440
441
442SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
443 unsigned int size,
444 bool accessed) {
445 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
446 HashMap::Entry* entry =
447 entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
448 if (entry->value != NULL) {
449 int entry_index =
450 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
451 EntryInfo& entry_info = entries_.at(entry_index);
452 entry_info.accessed = accessed;
453 if (FLAG_heap_profiler_trace_objects) {
454 PrintF("Update object size : %p with old size %d and new size %d\n",
455 addr,
456 entry_info.size,
457 size);
458 }
459 entry_info.size = size;
460 return entry_info.id;
461 }
462 entry->value = reinterpret_cast<void*>(entries_.length());
463 SnapshotObjectId id = next_id_;
464 next_id_ += kObjectIdStep;
465 entries_.Add(EntryInfo(id, addr, size, accessed));
466 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
467 return id;
468}
469
470
471void HeapObjectsMap::StopHeapObjectsTracking() {
472 time_intervals_.Clear();
473}
474
475
476void HeapObjectsMap::UpdateHeapObjectsMap() {
477 if (FLAG_heap_profiler_trace_objects) {
478 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
479 entries_map_.occupancy());
480 }
481 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
482 "HeapObjectsMap::UpdateHeapObjectsMap");
483 HeapIterator iterator(heap_);
484 for (HeapObject* obj = iterator.next();
485 obj != NULL;
486 obj = iterator.next()) {
487 FindOrAddEntry(obj->address(), obj->Size());
488 if (FLAG_heap_profiler_trace_objects) {
489 PrintF("Update object : %p %6d. Next address is %p\n",
490 obj->address(),
491 obj->Size(),
492 obj->address() + obj->Size());
493 }
494 }
495 RemoveDeadEntries();
496 if (FLAG_heap_profiler_trace_objects) {
497 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
498 entries_map_.occupancy());
499 }
500}
501
502
503namespace {
504
505
506struct HeapObjectInfo {
507 HeapObjectInfo(HeapObject* obj, int expected_size)
508 : obj(obj),
509 expected_size(expected_size) {
510 }
511
512 HeapObject* obj;
513 int expected_size;
514
515 bool IsValid() const { return expected_size == obj->Size(); }
516
517 void Print() const {
518 if (expected_size == 0) {
519 PrintF("Untracked object : %p %6d. Next address is %p\n",
520 obj->address(),
521 obj->Size(),
522 obj->address() + obj->Size());
523 } else if (obj->Size() != expected_size) {
524 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
525 expected_size,
526 obj->address(),
527 obj->Size(),
528 obj->address() + obj->Size());
529 } else {
530 PrintF("Good object : %p %6d. Next address is %p\n",
531 obj->address(),
532 expected_size,
533 obj->address() + obj->Size());
534 }
535 }
536};
537
538
539static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
540 if (a->obj < b->obj) return -1;
541 if (a->obj > b->obj) return 1;
542 return 0;
543}
544
545
546} // namespace
547
548
549int HeapObjectsMap::FindUntrackedObjects() {
550 List<HeapObjectInfo> heap_objects(1000);
551
552 HeapIterator iterator(heap_);
553 int untracked = 0;
554 for (HeapObject* obj = iterator.next();
555 obj != NULL;
556 obj = iterator.next()) {
557 HashMap::Entry* entry =
558 entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
559 if (entry == NULL) {
560 ++untracked;
561 if (FLAG_heap_profiler_trace_objects) {
562 heap_objects.Add(HeapObjectInfo(obj, 0));
563 }
564 } else {
565 int entry_index = static_cast<int>(
566 reinterpret_cast<intptr_t>(entry->value));
567 EntryInfo& entry_info = entries_.at(entry_index);
568 if (FLAG_heap_profiler_trace_objects) {
569 heap_objects.Add(HeapObjectInfo(obj,
570 static_cast<int>(entry_info.size)));
571 if (obj->Size() != static_cast<int>(entry_info.size))
572 ++untracked;
573 } else {
574 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
575 }
576 }
577 }
578 if (FLAG_heap_profiler_trace_objects) {
579 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
580 entries_map_.occupancy());
581 heap_objects.Sort(comparator);
582 int last_printed_object = -1;
583 bool print_next_object = false;
584 for (int i = 0; i < heap_objects.length(); ++i) {
585 const HeapObjectInfo& object_info = heap_objects[i];
586 if (!object_info.IsValid()) {
587 ++untracked;
588 if (last_printed_object != i - 1) {
589 if (i > 0) {
590 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
591 heap_objects[i - 1].Print();
592 }
593 }
594 object_info.Print();
595 last_printed_object = i;
596 print_next_object = true;
597 } else if (print_next_object) {
598 object_info.Print();
599 print_next_object = false;
600 last_printed_object = i;
601 }
602 }
603 if (last_printed_object < heap_objects.length() - 1) {
604 PrintF("Last %d objects were skipped\n",
605 heap_objects.length() - 1 - last_printed_object);
606 }
607 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
608 entries_map_.occupancy());
609 }
610 return untracked;
611}
612
613
614SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
615 int64_t* timestamp_us) {
616 UpdateHeapObjectsMap();
617 time_intervals_.Add(TimeInterval(next_id_));
618 int prefered_chunk_size = stream->GetChunkSize();
619 List<v8::HeapStatsUpdate> stats_buffer;
620 DCHECK(!entries_.is_empty());
621 EntryInfo* entry_info = &entries_.first();
622 EntryInfo* end_entry_info = &entries_.last() + 1;
623 for (int time_interval_index = 0;
624 time_interval_index < time_intervals_.length();
625 ++time_interval_index) {
626 TimeInterval& time_interval = time_intervals_[time_interval_index];
627 SnapshotObjectId time_interval_id = time_interval.id;
628 uint32_t entries_size = 0;
629 EntryInfo* start_entry_info = entry_info;
630 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
631 entries_size += entry_info->size;
632 ++entry_info;
633 }
634 uint32_t entries_count =
635 static_cast<uint32_t>(entry_info - start_entry_info);
636 if (time_interval.count != entries_count ||
637 time_interval.size != entries_size) {
638 stats_buffer.Add(v8::HeapStatsUpdate(
639 time_interval_index,
640 time_interval.count = entries_count,
641 time_interval.size = entries_size));
642 if (stats_buffer.length() >= prefered_chunk_size) {
643 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
644 &stats_buffer.first(), stats_buffer.length());
645 if (result == OutputStream::kAbort) return last_assigned_id();
646 stats_buffer.Clear();
647 }
648 }
649 }
650 DCHECK(entry_info == end_entry_info);
651 if (!stats_buffer.is_empty()) {
652 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
653 &stats_buffer.first(), stats_buffer.length());
654 if (result == OutputStream::kAbort) return last_assigned_id();
655 }
656 stream->EndOfStream();
657 if (timestamp_us) {
658 *timestamp_us = (time_intervals_.last().timestamp -
659 time_intervals_[0].timestamp).InMicroseconds();
660 }
661 return last_assigned_id();
662}
663
664
665void HeapObjectsMap::RemoveDeadEntries() {
666 DCHECK(entries_.length() > 0 &&
667 entries_.at(0).id == 0 &&
668 entries_.at(0).addr == NULL);
669 int first_free_entry = 1;
670 for (int i = 1; i < entries_.length(); ++i) {
671 EntryInfo& entry_info = entries_.at(i);
672 if (entry_info.accessed) {
673 if (first_free_entry != i) {
674 entries_.at(first_free_entry) = entry_info;
675 }
676 entries_.at(first_free_entry).accessed = false;
677 HashMap::Entry* entry = entries_map_.Lookup(
678 entry_info.addr, ComputePointerHash(entry_info.addr));
679 DCHECK(entry);
680 entry->value = reinterpret_cast<void*>(first_free_entry);
681 ++first_free_entry;
682 } else {
683 if (entry_info.addr) {
684 entries_map_.Remove(entry_info.addr,
685 ComputePointerHash(entry_info.addr));
686 }
687 }
688 }
689 entries_.Rewind(first_free_entry);
690 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
691 entries_map_.occupancy());
692}
693
694
695SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
696 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
697 const char* label = info->GetLabel();
698 id ^= StringHasher::HashSequentialString(label,
699 static_cast<int>(strlen(label)),
700 heap_->HashSeed());
701 intptr_t element_count = info->GetElementCount();
702 if (element_count != -1)
703 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
704 v8::internal::kZeroHashSeed);
705 return id << 1;
706}
707
708
709size_t HeapObjectsMap::GetUsedMemorySize() const {
710 return
711 sizeof(*this) +
712 sizeof(HashMap::Entry) * entries_map_.capacity() +
713 GetMemoryUsedByList(entries_) +
714 GetMemoryUsedByList(time_intervals_);
715}
716
717
718HeapEntriesMap::HeapEntriesMap()
719 : entries_(HashMap::PointersMatch) {
720}
721
722
723int HeapEntriesMap::Map(HeapThing thing) {
724 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
725 if (cache_entry == NULL) return HeapEntry::kNoEntry;
726 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
727}
728
729
730void HeapEntriesMap::Pair(HeapThing thing, int entry) {
731 HashMap::Entry* cache_entry = entries_.LookupOrInsert(thing, Hash(thing));
732 DCHECK(cache_entry->value == NULL);
733 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
734}
735
736
737HeapObjectsSet::HeapObjectsSet()
738 : entries_(HashMap::PointersMatch) {
739}
740
741
742void HeapObjectsSet::Clear() {
743 entries_.Clear();
744}
745
746
747bool HeapObjectsSet::Contains(Object* obj) {
748 if (!obj->IsHeapObject()) return false;
749 HeapObject* object = HeapObject::cast(obj);
750 return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
751}
752
753
754void HeapObjectsSet::Insert(Object* obj) {
755 if (!obj->IsHeapObject()) return;
756 HeapObject* object = HeapObject::cast(obj);
757 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
758}
759
760
761const char* HeapObjectsSet::GetTag(Object* obj) {
762 HeapObject* object = HeapObject::cast(obj);
763 HashMap::Entry* cache_entry =
764 entries_.Lookup(object, HeapEntriesMap::Hash(object));
765 return cache_entry != NULL
766 ? reinterpret_cast<const char*>(cache_entry->value)
767 : NULL;
768}
769
770
771void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
772 if (!obj->IsHeapObject()) return;
773 HeapObject* object = HeapObject::cast(obj);
774 HashMap::Entry* cache_entry =
775 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
776 cache_entry->value = const_cast<char*>(tag);
777}
778
779
780V8HeapExplorer::V8HeapExplorer(
781 HeapSnapshot* snapshot,
782 SnapshottingProgressReportingInterface* progress,
783 v8::HeapProfiler::ObjectNameResolver* resolver)
784 : heap_(snapshot->profiler()->heap_object_map()->heap()),
785 snapshot_(snapshot),
786 names_(snapshot_->profiler()->names()),
787 heap_object_map_(snapshot_->profiler()->heap_object_map()),
788 progress_(progress),
789 filler_(NULL),
790 global_object_name_resolver_(resolver) {
791}
792
793
794V8HeapExplorer::~V8HeapExplorer() {
795}
796
797
798HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
799 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
800}
801
802
803HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
804 if (object->IsJSFunction()) {
805 JSFunction* func = JSFunction::cast(object);
806 SharedFunctionInfo* shared = func->shared();
807 const char* name = names_->GetName(String::cast(shared->name()));
808 return AddEntry(object, HeapEntry::kClosure, name);
809 } else if (object->IsJSBoundFunction()) {
810 return AddEntry(object, HeapEntry::kClosure, "native_bind");
811 } else if (object->IsJSRegExp()) {
812 JSRegExp* re = JSRegExp::cast(object);
813 return AddEntry(object,
814 HeapEntry::kRegExp,
815 names_->GetName(re->Pattern()));
816 } else if (object->IsJSObject()) {
817 const char* name = names_->GetName(
818 GetConstructorName(JSObject::cast(object)));
819 if (object->IsJSGlobalObject()) {
820 const char* tag = objects_tags_.GetTag(object);
821 if (tag != NULL) {
822 name = names_->GetFormatted("%s / %s", name, tag);
823 }
824 }
825 return AddEntry(object, HeapEntry::kObject, name);
826 } else if (object->IsString()) {
827 String* string = String::cast(object);
828 if (string->IsConsString())
829 return AddEntry(object,
830 HeapEntry::kConsString,
831 "(concatenated string)");
832 if (string->IsSlicedString())
833 return AddEntry(object,
834 HeapEntry::kSlicedString,
835 "(sliced string)");
836 return AddEntry(object,
837 HeapEntry::kString,
838 names_->GetName(String::cast(object)));
839 } else if (object->IsSymbol()) {
840 if (Symbol::cast(object)->is_private())
841 return AddEntry(object, HeapEntry::kHidden, "private symbol");
842 else
843 return AddEntry(object, HeapEntry::kSymbol, "symbol");
844 } else if (object->IsCode()) {
845 return AddEntry(object, HeapEntry::kCode, "");
846 } else if (object->IsSharedFunctionInfo()) {
847 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
848 return AddEntry(object,
849 HeapEntry::kCode,
850 names_->GetName(name));
851 } else if (object->IsScript()) {
852 Object* name = Script::cast(object)->name();
853 return AddEntry(object,
854 HeapEntry::kCode,
855 name->IsString()
856 ? names_->GetName(String::cast(name))
857 : "");
858 } else if (object->IsNativeContext()) {
859 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
860 } else if (object->IsContext()) {
861 return AddEntry(object, HeapEntry::kObject, "system / Context");
862 } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
863 object->IsByteArray()) {
864 return AddEntry(object, HeapEntry::kArray, "");
865 } else if (object->IsHeapNumber()) {
866 return AddEntry(object, HeapEntry::kHeapNumber, "number");
867 } else if (object->IsSimd128Value()) {
868 return AddEntry(object, HeapEntry::kSimdValue, "simd");
869 }
870 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
871}
872
873
874HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
875 HeapEntry::Type type,
876 const char* name) {
877 return AddEntry(object->address(), type, name, object->Size());
878}
879
880
881HeapEntry* V8HeapExplorer::AddEntry(Address address,
882 HeapEntry::Type type,
883 const char* name,
884 size_t size) {
885 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
886 address, static_cast<unsigned int>(size));
887 unsigned trace_node_id = 0;
888 if (AllocationTracker* allocation_tracker =
889 snapshot_->profiler()->allocation_tracker()) {
890 trace_node_id =
891 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
892 }
893 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
894}
895
896
897class SnapshotFiller {
898 public:
899 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
900 : snapshot_(snapshot),
901 names_(snapshot->profiler()->names()),
902 entries_(entries) { }
903 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
904 HeapEntry* entry = allocator->AllocateEntry(ptr);
905 entries_->Pair(ptr, entry->index());
906 return entry;
907 }
908 HeapEntry* FindEntry(HeapThing ptr) {
909 int index = entries_->Map(ptr);
910 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
911 }
912 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
913 HeapEntry* entry = FindEntry(ptr);
914 return entry != NULL ? entry : AddEntry(ptr, allocator);
915 }
916 void SetIndexedReference(HeapGraphEdge::Type type,
917 int parent,
918 int index,
919 HeapEntry* child_entry) {
920 HeapEntry* parent_entry = &snapshot_->entries()[parent];
921 parent_entry->SetIndexedReference(type, index, child_entry);
922 }
923 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
924 int parent,
925 HeapEntry* child_entry) {
926 HeapEntry* parent_entry = &snapshot_->entries()[parent];
927 int index = parent_entry->children_count() + 1;
928 parent_entry->SetIndexedReference(type, index, child_entry);
929 }
930 void SetNamedReference(HeapGraphEdge::Type type,
931 int parent,
932 const char* reference_name,
933 HeapEntry* child_entry) {
934 HeapEntry* parent_entry = &snapshot_->entries()[parent];
935 parent_entry->SetNamedReference(type, reference_name, child_entry);
936 }
937 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
938 int parent,
939 HeapEntry* child_entry) {
940 HeapEntry* parent_entry = &snapshot_->entries()[parent];
941 int index = parent_entry->children_count() + 1;
942 parent_entry->SetNamedReference(
943 type,
944 names_->GetName(index),
945 child_entry);
946 }
947
948 private:
949 HeapSnapshot* snapshot_;
950 StringsStorage* names_;
951 HeapEntriesMap* entries_;
952};
953
954
955const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
956 switch (object->map()->instance_type()) {
957 case MAP_TYPE:
958 switch (Map::cast(object)->instance_type()) {
959#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
960 case instance_type: return "system / Map (" #Name ")";
961 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
962#undef MAKE_STRING_MAP_CASE
963 default: return "system / Map";
964 }
965 case CELL_TYPE: return "system / Cell";
966 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
967 case FOREIGN_TYPE: return "system / Foreign";
968 case ODDBALL_TYPE: return "system / Oddball";
969#define MAKE_STRUCT_CASE(NAME, Name, name) \
970 case NAME##_TYPE: return "system / "#Name;
971 STRUCT_LIST(MAKE_STRUCT_CASE)
972#undef MAKE_STRUCT_CASE
973 default: return "system";
974 }
975}
976
977
978int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
979 int objects_count = 0;
980 for (HeapObject* obj = iterator->next();
981 obj != NULL;
982 obj = iterator->next()) {
983 objects_count++;
984 }
985 return objects_count;
986}
987
988
989class IndexedReferencesExtractor : public ObjectVisitor {
990 public:
991 IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj,
992 int parent)
993 : generator_(generator),
994 parent_obj_(parent_obj),
995 parent_start_(HeapObject::RawField(parent_obj_, 0)),
996 parent_end_(HeapObject::RawField(parent_obj_, parent_obj_->Size())),
997 parent_(parent),
998 next_index_(0) {}
999 void VisitCodeEntry(Address entry_address) override {
1000 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1001 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1002 generator_->TagCodeObject(code);
1003 }
1004 void VisitPointers(Object** start, Object** end) override {
1005 for (Object** p = start; p < end; p++) {
1006 intptr_t index =
1007 static_cast<intptr_t>(p - HeapObject::RawField(parent_obj_, 0));
1008 ++next_index_;
1009 // |p| could be outside of the object, e.g., while visiting RelocInfo of
1010 // code objects.
1011 if (p >= parent_start_ && p < parent_end_ && generator_->marks_[index]) {
1012 generator_->marks_[index] = false;
1013 continue;
1014 }
1015 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1016 }
1017 }
1018
1019 private:
1020 V8HeapExplorer* generator_;
1021 HeapObject* parent_obj_;
1022 Object** parent_start_;
1023 Object** parent_end_;
1024 int parent_;
1025 int next_index_;
1026};
1027
1028
1029bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1030 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1031
1032 if (obj->IsJSGlobalProxy()) {
1033 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1034 } else if (obj->IsJSArrayBuffer()) {
1035 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1036 } else if (obj->IsJSObject()) {
1037 if (obj->IsJSWeakSet()) {
1038 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1039 } else if (obj->IsJSWeakMap()) {
1040 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1041 } else if (obj->IsJSSet()) {
1042 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1043 } else if (obj->IsJSMap()) {
1044 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1045 }
1046 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1047 } else if (obj->IsString()) {
1048 ExtractStringReferences(entry, String::cast(obj));
1049 } else if (obj->IsSymbol()) {
1050 ExtractSymbolReferences(entry, Symbol::cast(obj));
1051 } else if (obj->IsMap()) {
1052 ExtractMapReferences(entry, Map::cast(obj));
1053 } else if (obj->IsSharedFunctionInfo()) {
1054 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1055 } else if (obj->IsScript()) {
1056 ExtractScriptReferences(entry, Script::cast(obj));
1057 } else if (obj->IsAccessorInfo()) {
1058 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1059 } else if (obj->IsAccessorPair()) {
1060 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1061 } else if (obj->IsCodeCache()) {
1062 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1063 } else if (obj->IsCode()) {
1064 ExtractCodeReferences(entry, Code::cast(obj));
1065 } else if (obj->IsBox()) {
1066 ExtractBoxReferences(entry, Box::cast(obj));
1067 } else if (obj->IsCell()) {
1068 ExtractCellReferences(entry, Cell::cast(obj));
1069 } else if (obj->IsPropertyCell()) {
1070 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1071 } else if (obj->IsAllocationSite()) {
1072 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1073 }
1074 return true;
1075}
1076
1077
1078bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1079 if (!obj->IsFixedArray()) return false;
1080
1081 if (obj->IsContext()) {
1082 ExtractContextReferences(entry, Context::cast(obj));
1083 } else {
1084 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1085 }
1086 return true;
1087}
1088
1089
1090void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1091 int entry, JSGlobalProxy* proxy) {
1092 SetInternalReference(proxy, entry,
1093 "native_context", proxy->native_context(),
1094 JSGlobalProxy::kNativeContextOffset);
1095}
1096
1097
1098void V8HeapExplorer::ExtractJSObjectReferences(
1099 int entry, JSObject* js_obj) {
1100 HeapObject* obj = js_obj;
1101 ExtractPropertyReferences(js_obj, entry);
1102 ExtractElementReferences(js_obj, entry);
1103 ExtractInternalReferences(js_obj, entry);
1104 PrototypeIterator iter(heap_->isolate(), js_obj);
1105 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1106 if (obj->IsJSBoundFunction()) {
1107 JSBoundFunction* js_fun = JSBoundFunction::cast(obj);
1108 TagObject(js_fun->bound_arguments(), "(bound arguments)");
1109 SetInternalReference(js_fun, entry, "bindings", js_fun->bound_arguments(),
1110 JSBoundFunction::kBoundArgumentsOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111 SetNativeBindReference(js_obj, entry, "bound_this", js_fun->bound_this());
1112 SetNativeBindReference(js_obj, entry, "bound_function",
1113 js_fun->bound_target_function());
1114 FixedArray* bindings = js_fun->bound_arguments();
1115 for (int i = 0; i < bindings->length(); i++) {
1116 const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1117 SetNativeBindReference(js_obj, entry, reference_name, bindings->get(i));
1118 }
1119 } else if (obj->IsJSFunction()) {
1120 JSFunction* js_fun = JSFunction::cast(js_obj);
1121 Object* proto_or_map = js_fun->prototype_or_initial_map();
1122 if (!proto_or_map->IsTheHole()) {
1123 if (!proto_or_map->IsMap()) {
1124 SetPropertyReference(
1125 obj, entry,
1126 heap_->prototype_string(), proto_or_map,
1127 NULL,
1128 JSFunction::kPrototypeOrInitialMapOffset);
1129 } else {
1130 SetPropertyReference(
1131 obj, entry,
1132 heap_->prototype_string(), js_fun->prototype());
1133 SetInternalReference(
1134 obj, entry, "initial_map", proto_or_map,
1135 JSFunction::kPrototypeOrInitialMapOffset);
1136 }
1137 }
1138 SharedFunctionInfo* shared_info = js_fun->shared();
1139 TagObject(js_fun->literals(), "(function literals)");
1140 SetInternalReference(js_fun, entry, "literals", js_fun->literals(),
1141 JSFunction::kLiteralsOffset);
1142 TagObject(shared_info, "(shared function info)");
1143 SetInternalReference(js_fun, entry,
1144 "shared", shared_info,
1145 JSFunction::kSharedFunctionInfoOffset);
1146 TagObject(js_fun->context(), "(context)");
1147 SetInternalReference(js_fun, entry,
1148 "context", js_fun->context(),
1149 JSFunction::kContextOffset);
1150 SetWeakReference(js_fun, entry,
1151 "next_function_link", js_fun->next_function_link(),
1152 JSFunction::kNextFunctionLinkOffset);
1153 // Ensure no new weak references appeared in JSFunction.
1154 STATIC_ASSERT(JSFunction::kCodeEntryOffset ==
1155 JSFunction::kNonWeakFieldsEndOffset);
1156 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
1157 JSFunction::kNextFunctionLinkOffset);
1158 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1159 == JSFunction::kSize);
1160 } else if (obj->IsJSGlobalObject()) {
1161 JSGlobalObject* global_obj = JSGlobalObject::cast(obj);
1162 SetInternalReference(global_obj, entry, "native_context",
1163 global_obj->native_context(),
1164 JSGlobalObject::kNativeContextOffset);
1165 SetInternalReference(global_obj, entry, "global_proxy",
1166 global_obj->global_proxy(),
1167 JSGlobalObject::kGlobalProxyOffset);
1168 STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
1169 2 * kPointerSize);
1170 } else if (obj->IsJSArrayBufferView()) {
1171 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1172 SetInternalReference(view, entry, "buffer", view->buffer(),
1173 JSArrayBufferView::kBufferOffset);
1174 }
1175 TagObject(js_obj->properties(), "(object properties)");
1176 SetInternalReference(obj, entry,
1177 "properties", js_obj->properties(),
1178 JSObject::kPropertiesOffset);
1179 TagObject(js_obj->elements(), "(object elements)");
1180 SetInternalReference(obj, entry,
1181 "elements", js_obj->elements(),
1182 JSObject::kElementsOffset);
1183}
1184
1185
1186void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1187 if (string->IsConsString()) {
1188 ConsString* cs = ConsString::cast(string);
1189 SetInternalReference(cs, entry, "first", cs->first(),
1190 ConsString::kFirstOffset);
1191 SetInternalReference(cs, entry, "second", cs->second(),
1192 ConsString::kSecondOffset);
1193 } else if (string->IsSlicedString()) {
1194 SlicedString* ss = SlicedString::cast(string);
1195 SetInternalReference(ss, entry, "parent", ss->parent(),
1196 SlicedString::kParentOffset);
1197 }
1198}
1199
1200
1201void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1202 SetInternalReference(symbol, entry,
1203 "name", symbol->name(),
1204 Symbol::kNameOffset);
1205}
1206
1207
1208void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1209 JSCollection* collection) {
1210 SetInternalReference(collection, entry, "table", collection->table(),
1211 JSCollection::kTableOffset);
1212}
1213
1214
1215void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1216 int entry, JSWeakCollection* collection) {
1217 MarkAsWeakContainer(collection->table());
1218 SetInternalReference(collection, entry,
1219 "table", collection->table(),
1220 JSWeakCollection::kTableOffset);
1221}
1222
1223
1224void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1225 if (context == context->declaration_context()) {
1226 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1227 // Add context allocated locals.
1228 int context_locals = scope_info->ContextLocalCount();
1229 for (int i = 0; i < context_locals; ++i) {
1230 String* local_name = scope_info->ContextLocalName(i);
1231 int idx = Context::MIN_CONTEXT_SLOTS + i;
1232 SetContextReference(context, entry, local_name, context->get(idx),
1233 Context::OffsetOfElementAt(idx));
1234 }
1235 if (scope_info->HasFunctionName()) {
1236 String* name = scope_info->FunctionName();
1237 VariableMode mode;
1238 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1239 if (idx >= 0) {
1240 SetContextReference(context, entry, name, context->get(idx),
1241 Context::OffsetOfElementAt(idx));
1242 }
1243 }
1244 }
1245
1246#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1247 if (Context::index < Context::FIRST_WEAK_SLOT || \
1248 Context::index == Context::MAP_CACHE_INDEX) { \
1249 SetInternalReference(context, entry, #name, context->get(Context::index), \
1250 FixedArray::OffsetOfElementAt(Context::index)); \
1251 } else { \
1252 SetWeakReference(context, entry, #name, context->get(Context::index), \
1253 FixedArray::OffsetOfElementAt(Context::index)); \
1254 }
1255 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1256 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1257 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, HeapObject, extension);
1258 EXTRACT_CONTEXT_FIELD(NATIVE_CONTEXT_INDEX, Context, native_context);
1259 if (context->IsNativeContext()) {
1260 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1261 TagObject(context->embedder_data(), "(context data)");
1262 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD)
1263 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1264 optimized_functions_list);
1265 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1266 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1267 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1268#undef EXTRACT_CONTEXT_FIELD
1269 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1270 Context::FIRST_WEAK_SLOT);
1271 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1272 Context::NATIVE_CONTEXT_SLOTS);
1273 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1274 Context::NATIVE_CONTEXT_SLOTS);
1275 }
1276}
1277
1278
1279void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1280 Object* raw_transitions_or_prototype_info = map->raw_transitions();
1281 if (TransitionArray::IsFullTransitionArray(
1282 raw_transitions_or_prototype_info)) {
1283 TransitionArray* transitions =
1284 TransitionArray::cast(raw_transitions_or_prototype_info);
1285 int transitions_entry = GetEntry(transitions)->index();
1286
1287 if (map->CanTransition()) {
1288 if (transitions->HasPrototypeTransitions()) {
1289 FixedArray* prototype_transitions =
1290 transitions->GetPrototypeTransitions();
1291 MarkAsWeakContainer(prototype_transitions);
1292 TagObject(prototype_transitions, "(prototype transitions");
1293 SetInternalReference(transitions, transitions_entry,
1294 "prototype_transitions", prototype_transitions);
1295 }
1296 // TODO(alph): transitions keys are strong links.
1297 MarkAsWeakContainer(transitions);
1298 }
1299
1300 TagObject(transitions, "(transition array)");
1301 SetInternalReference(map, entry, "transitions", transitions,
1302 Map::kTransitionsOrPrototypeInfoOffset);
1303 } else if (TransitionArray::IsSimpleTransition(
1304 raw_transitions_or_prototype_info)) {
1305 TagObject(raw_transitions_or_prototype_info, "(transition)");
1306 SetInternalReference(map, entry, "transition",
1307 raw_transitions_or_prototype_info,
1308 Map::kTransitionsOrPrototypeInfoOffset);
1309 } else if (map->is_prototype_map()) {
1310 TagObject(raw_transitions_or_prototype_info, "prototype_info");
1311 SetInternalReference(map, entry, "prototype_info",
1312 raw_transitions_or_prototype_info,
1313 Map::kTransitionsOrPrototypeInfoOffset);
1314 }
1315 DescriptorArray* descriptors = map->instance_descriptors();
1316 TagObject(descriptors, "(map descriptors)");
1317 SetInternalReference(map, entry,
1318 "descriptors", descriptors,
1319 Map::kDescriptorsOffset);
1320
1321 MarkAsWeakContainer(map->code_cache());
1322 SetInternalReference(map, entry,
1323 "code_cache", map->code_cache(),
1324 Map::kCodeCacheOffset);
1325 SetInternalReference(map, entry,
1326 "prototype", map->prototype(), Map::kPrototypeOffset);
1327 Object* constructor_or_backpointer = map->constructor_or_backpointer();
1328 if (constructor_or_backpointer->IsMap()) {
1329 TagObject(constructor_or_backpointer, "(back pointer)");
1330 SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1331 Map::kConstructorOrBackPointerOffset);
1332 } else {
1333 SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1334 Map::kConstructorOrBackPointerOffset);
1335 }
1336 TagObject(map->dependent_code(), "(dependent code)");
1337 MarkAsWeakContainer(map->dependent_code());
1338 SetInternalReference(map, entry,
1339 "dependent_code", map->dependent_code(),
1340 Map::kDependentCodeOffset);
1341}
1342
1343
1344void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1345 int entry, SharedFunctionInfo* shared) {
1346 HeapObject* obj = shared;
1347 String* shared_name = shared->DebugName();
1348 const char* name = NULL;
1349 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1350 name = names_->GetName(shared_name);
1351 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1352 } else {
1353 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1354 Code::Kind2String(shared->code()->kind())));
1355 }
1356
1357 SetInternalReference(obj, entry,
1358 "name", shared->name(),
1359 SharedFunctionInfo::kNameOffset);
1360 SetInternalReference(obj, entry,
1361 "code", shared->code(),
1362 SharedFunctionInfo::kCodeOffset);
1363 TagObject(shared->scope_info(), "(function scope info)");
1364 SetInternalReference(obj, entry,
1365 "scope_info", shared->scope_info(),
1366 SharedFunctionInfo::kScopeInfoOffset);
1367 SetInternalReference(obj, entry,
1368 "instance_class_name", shared->instance_class_name(),
1369 SharedFunctionInfo::kInstanceClassNameOffset);
1370 SetInternalReference(obj, entry,
1371 "script", shared->script(),
1372 SharedFunctionInfo::kScriptOffset);
1373 const char* construct_stub_name = name ?
1374 names_->GetFormatted("(construct stub code for %s)", name) :
1375 "(construct stub code)";
1376 TagObject(shared->construct_stub(), construct_stub_name);
1377 SetInternalReference(obj, entry,
1378 "construct_stub", shared->construct_stub(),
1379 SharedFunctionInfo::kConstructStubOffset);
1380 SetInternalReference(obj, entry,
1381 "function_data", shared->function_data(),
1382 SharedFunctionInfo::kFunctionDataOffset);
1383 SetInternalReference(obj, entry,
1384 "debug_info", shared->debug_info(),
1385 SharedFunctionInfo::kDebugInfoOffset);
1386 SetInternalReference(obj, entry,
1387 "inferred_name", shared->inferred_name(),
1388 SharedFunctionInfo::kInferredNameOffset);
1389 SetInternalReference(obj, entry,
1390 "optimized_code_map", shared->optimized_code_map(),
1391 SharedFunctionInfo::kOptimizedCodeMapOffset);
1392 SetInternalReference(obj, entry,
1393 "feedback_vector", shared->feedback_vector(),
1394 SharedFunctionInfo::kFeedbackVectorOffset);
1395}
1396
1397
1398void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1399 HeapObject* obj = script;
1400 SetInternalReference(obj, entry,
1401 "source", script->source(),
1402 Script::kSourceOffset);
1403 SetInternalReference(obj, entry,
1404 "name", script->name(),
1405 Script::kNameOffset);
1406 SetInternalReference(obj, entry,
1407 "context_data", script->context_data(),
1408 Script::kContextOffset);
1409 TagObject(script->line_ends(), "(script line ends)");
1410 SetInternalReference(obj, entry,
1411 "line_ends", script->line_ends(),
1412 Script::kLineEndsOffset);
1413}
1414
1415
1416void V8HeapExplorer::ExtractAccessorInfoReferences(
1417 int entry, AccessorInfo* accessor_info) {
1418 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1419 AccessorInfo::kNameOffset);
1420 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1421 accessor_info->expected_receiver_type(),
1422 AccessorInfo::kExpectedReceiverTypeOffset);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001423 if (accessor_info->IsAccessorInfo()) {
1424 AccessorInfo* executable_accessor_info = AccessorInfo::cast(accessor_info);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001425 SetInternalReference(executable_accessor_info, entry, "getter",
1426 executable_accessor_info->getter(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001427 AccessorInfo::kGetterOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428 SetInternalReference(executable_accessor_info, entry, "setter",
1429 executable_accessor_info->setter(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001430 AccessorInfo::kSetterOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 SetInternalReference(executable_accessor_info, entry, "data",
1432 executable_accessor_info->data(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001433 AccessorInfo::kDataOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001434 }
1435}
1436
1437
1438void V8HeapExplorer::ExtractAccessorPairReferences(
1439 int entry, AccessorPair* accessors) {
1440 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1441 AccessorPair::kGetterOffset);
1442 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1443 AccessorPair::kSetterOffset);
1444}
1445
1446
1447void V8HeapExplorer::ExtractCodeCacheReferences(
1448 int entry, CodeCache* code_cache) {
1449 TagObject(code_cache->default_cache(), "(default code cache)");
1450 SetInternalReference(code_cache, entry,
1451 "default_cache", code_cache->default_cache(),
1452 CodeCache::kDefaultCacheOffset);
1453 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1454 SetInternalReference(code_cache, entry,
1455 "type_cache", code_cache->normal_type_cache(),
1456 CodeCache::kNormalTypeCacheOffset);
1457}
1458
1459
1460void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1461 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1462}
1463
1464
1465void V8HeapExplorer::TagCodeObject(Code* code) {
1466 if (code->kind() == Code::STUB) {
1467 TagObject(code, names_->GetFormatted(
1468 "(%s code)",
1469 CodeStub::MajorName(CodeStub::GetMajorKey(code))));
1470 }
1471}
1472
1473
1474void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1475 TagCodeObject(code);
1476 TagObject(code->relocation_info(), "(code relocation info)");
1477 SetInternalReference(code, entry,
1478 "relocation_info", code->relocation_info(),
1479 Code::kRelocationInfoOffset);
1480 SetInternalReference(code, entry,
1481 "handler_table", code->handler_table(),
1482 Code::kHandlerTableOffset);
1483 TagObject(code->deoptimization_data(), "(code deopt data)");
1484 SetInternalReference(code, entry,
1485 "deoptimization_data", code->deoptimization_data(),
1486 Code::kDeoptimizationDataOffset);
1487 if (code->kind() == Code::FUNCTION) {
1488 SetInternalReference(code, entry,
1489 "type_feedback_info", code->type_feedback_info(),
1490 Code::kTypeFeedbackInfoOffset);
1491 }
1492 SetInternalReference(code, entry,
1493 "gc_metadata", code->gc_metadata(),
1494 Code::kGCMetadataOffset);
1495 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1496 SetWeakReference(code, entry,
1497 "next_code_link", code->next_code_link(),
1498 Code::kNextCodeLinkOffset);
1499 }
1500}
1501
1502
1503void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1504 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1505}
1506
1507
1508void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1509 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1510}
1511
1512
1513void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1514 PropertyCell* cell) {
1515 SetInternalReference(cell, entry, "value", cell->value(),
1516 PropertyCell::kValueOffset);
1517 MarkAsWeakContainer(cell->dependent_code());
1518 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1519 PropertyCell::kDependentCodeOffset);
1520}
1521
1522
1523void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1524 AllocationSite* site) {
1525 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1526 AllocationSite::kTransitionInfoOffset);
1527 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1528 AllocationSite::kNestedSiteOffset);
1529 MarkAsWeakContainer(site->dependent_code());
1530 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1531 AllocationSite::kDependentCodeOffset);
1532 // Do not visit weak_next as it is not visited by the StaticVisitor,
1533 // and we're not very interested in weak_next field here.
1534 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
Ben Murdoch097c5b22016-05-18 11:27:45 +01001535 AllocationSite::kPointerFieldsEndOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536}
1537
1538
1539class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1540 public:
1541 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1542 : size_(size)
1543 , explorer_(explorer) {
1544 }
1545 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1546 return explorer_->AddEntry(
1547 static_cast<Address>(ptr),
1548 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1549 }
1550 private:
1551 size_t size_;
1552 V8HeapExplorer* explorer_;
1553};
1554
1555
1556void V8HeapExplorer::ExtractJSArrayBufferReferences(
1557 int entry, JSArrayBuffer* buffer) {
1558 // Setup a reference to a native memory backing_store object.
1559 if (!buffer->backing_store())
1560 return;
1561 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1562 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1563 HeapEntry* data_entry =
1564 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1565 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1566 entry, "backing_store", data_entry);
1567}
1568
1569
1570void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1571 bool is_weak = weak_containers_.Contains(array);
1572 for (int i = 0, l = array->length(); i < l; ++i) {
1573 if (is_weak) {
1574 SetWeakReference(array, entry,
1575 i, array->get(i), array->OffsetOfElementAt(i));
1576 } else {
1577 SetInternalReference(array, entry,
1578 i, array->get(i), array->OffsetOfElementAt(i));
1579 }
1580 }
1581}
1582
1583
1584void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1585 if (js_obj->HasFastProperties()) {
1586 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1587 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1588 for (int i = 0; i < real_size; i++) {
1589 PropertyDetails details = descs->GetDetails(i);
1590 switch (details.location()) {
1591 case kField: {
1592 Representation r = details.representation();
1593 if (r.IsSmi() || r.IsDouble()) break;
1594
1595 Name* k = descs->GetKey(i);
1596 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1597 Object* value = js_obj->RawFastPropertyAt(field_index);
1598 int field_offset =
1599 field_index.is_inobject() ? field_index.offset() : -1;
1600
Ben Murdoch097c5b22016-05-18 11:27:45 +01001601 if (k != heap_->hidden_properties_symbol()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001602 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1603 value, NULL, field_offset);
1604 } else {
1605 TagObject(value, "(hidden properties)");
1606 SetInternalReference(js_obj, entry, "hidden_properties", value,
1607 field_offset);
1608 }
1609 break;
1610 }
1611 case kDescriptor:
1612 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1613 descs->GetKey(i),
1614 descs->GetValue(i));
1615 break;
1616 }
1617 }
1618 } else if (js_obj->IsJSGlobalObject()) {
1619 // We assume that global objects can only have slow properties.
1620 GlobalDictionary* dictionary = js_obj->global_dictionary();
1621 int length = dictionary->Capacity();
1622 for (int i = 0; i < length; ++i) {
1623 Object* k = dictionary->KeyAt(i);
1624 if (dictionary->IsKey(k)) {
1625 DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1626 PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1627 Object* value = cell->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001628 if (k == heap_->hidden_properties_symbol()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 TagObject(value, "(hidden properties)");
1630 SetInternalReference(js_obj, entry, "hidden_properties", value);
1631 continue;
1632 }
1633 PropertyDetails details = cell->property_details();
1634 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1635 Name::cast(k), value);
1636 }
1637 }
1638 } else {
1639 NameDictionary* dictionary = js_obj->property_dictionary();
1640 int length = dictionary->Capacity();
1641 for (int i = 0; i < length; ++i) {
1642 Object* k = dictionary->KeyAt(i);
1643 if (dictionary->IsKey(k)) {
1644 Object* value = dictionary->ValueAt(i);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001645 if (k == heap_->hidden_properties_symbol()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 TagObject(value, "(hidden properties)");
1647 SetInternalReference(js_obj, entry, "hidden_properties", value);
1648 continue;
1649 }
1650 PropertyDetails details = dictionary->DetailsAt(i);
1651 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1652 Name::cast(k), value);
1653 }
1654 }
1655 }
1656}
1657
1658
1659void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1660 Name* key,
1661 Object* callback_obj,
1662 int field_offset) {
1663 if (!callback_obj->IsAccessorPair()) return;
1664 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1665 SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1666 Object* getter = accessors->getter();
1667 if (!getter->IsOddball()) {
1668 SetPropertyReference(js_obj, entry, key, getter, "get %s");
1669 }
1670 Object* setter = accessors->setter();
1671 if (!setter->IsOddball()) {
1672 SetPropertyReference(js_obj, entry, key, setter, "set %s");
1673 }
1674}
1675
1676
1677void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1678 if (js_obj->HasFastObjectElements()) {
1679 FixedArray* elements = FixedArray::cast(js_obj->elements());
1680 int length = js_obj->IsJSArray() ?
1681 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1682 elements->length();
1683 for (int i = 0; i < length; ++i) {
1684 if (!elements->get(i)->IsTheHole()) {
1685 SetElementReference(js_obj, entry, i, elements->get(i));
1686 }
1687 }
1688 } else if (js_obj->HasDictionaryElements()) {
1689 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1690 int length = dictionary->Capacity();
1691 for (int i = 0; i < length; ++i) {
1692 Object* k = dictionary->KeyAt(i);
1693 if (dictionary->IsKey(k)) {
1694 DCHECK(k->IsNumber());
1695 uint32_t index = static_cast<uint32_t>(k->Number());
1696 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1697 }
1698 }
1699 }
1700}
1701
1702
1703void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1704 int length = js_obj->GetInternalFieldCount();
1705 for (int i = 0; i < length; ++i) {
1706 Object* o = js_obj->GetInternalField(i);
1707 SetInternalReference(
1708 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1709 }
1710}
1711
1712
1713String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1714 Isolate* isolate = object->GetIsolate();
1715 if (object->IsJSFunction()) return isolate->heap()->closure_string();
1716 DisallowHeapAllocation no_gc;
1717 HandleScope scope(isolate);
1718 return *JSReceiver::GetConstructorName(handle(object, isolate));
1719}
1720
1721
1722HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1723 if (!obj->IsHeapObject()) return NULL;
1724 return filler_->FindOrAddEntry(obj, this);
1725}
1726
1727
1728class RootsReferencesExtractor : public ObjectVisitor {
1729 private:
1730 struct IndexTag {
1731 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1732 : index(index), tag(tag) { }
1733 int index;
1734 VisitorSynchronization::SyncTag tag;
1735 };
1736
1737 public:
1738 explicit RootsReferencesExtractor(Heap* heap)
1739 : collecting_all_references_(false),
1740 previous_reference_count_(0),
1741 heap_(heap) {
1742 }
1743
1744 void VisitPointers(Object** start, Object** end) override {
1745 if (collecting_all_references_) {
1746 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1747 } else {
1748 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1749 }
1750 }
1751
1752 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1753
1754 void FillReferences(V8HeapExplorer* explorer) {
1755 DCHECK(strong_references_.length() <= all_references_.length());
1756 Builtins* builtins = heap_->isolate()->builtins();
1757 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1758 while (all_index < all_references_.length()) {
1759 bool is_strong = strong_index < strong_references_.length()
1760 && strong_references_[strong_index] == all_references_[all_index];
1761 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1762 !is_strong,
1763 all_references_[all_index]);
1764 if (reference_tags_[tags_index].tag ==
1765 VisitorSynchronization::kBuiltins) {
1766 DCHECK(all_references_[all_index]->IsCode());
1767 explorer->TagBuiltinCodeObject(
1768 Code::cast(all_references_[all_index]),
1769 builtins->name(builtin_index++));
1770 }
1771 ++all_index;
1772 if (is_strong) ++strong_index;
1773 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1774 }
1775 }
1776
1777 void Synchronize(VisitorSynchronization::SyncTag tag) override {
1778 if (collecting_all_references_ &&
1779 previous_reference_count_ != all_references_.length()) {
1780 previous_reference_count_ = all_references_.length();
1781 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1782 }
1783 }
1784
1785 private:
1786 bool collecting_all_references_;
1787 List<Object*> strong_references_;
1788 List<Object*> all_references_;
1789 int previous_reference_count_;
1790 List<IndexTag> reference_tags_;
1791 Heap* heap_;
1792};
1793
1794
1795bool V8HeapExplorer::IterateAndExtractReferences(
1796 SnapshotFiller* filler) {
1797 filler_ = filler;
1798
1799 // Create references to the synthetic roots.
1800 SetRootGcRootsReference();
1801 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1802 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1803 }
1804
1805 // Make sure builtin code objects get their builtin tags
1806 // first. Otherwise a particular JSFunction object could set
1807 // its custom name to a generic builtin.
1808 RootsReferencesExtractor extractor(heap_);
1809 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1810 extractor.SetCollectingAllReferences();
1811 heap_->IterateRoots(&extractor, VISIT_ALL);
1812 extractor.FillReferences(this);
1813
1814 // We have to do two passes as sometimes FixedArrays are used
1815 // to weakly hold their items, and it's impossible to distinguish
1816 // between these cases without processing the array owner first.
1817 bool interrupted =
1818 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1819 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1820
1821 if (interrupted) {
1822 filler_ = NULL;
1823 return false;
1824 }
1825
1826 filler_ = NULL;
1827 return progress_->ProgressReport(true);
1828}
1829
1830
1831template<V8HeapExplorer::ExtractReferencesMethod extractor>
1832bool V8HeapExplorer::IterateAndExtractSinglePass() {
1833 // Now iterate the whole heap.
1834 bool interrupted = false;
1835 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1836 // Heap iteration with filtering must be finished in any case.
1837 for (HeapObject* obj = iterator.next();
1838 obj != NULL;
1839 obj = iterator.next(), progress_->ProgressStep()) {
1840 if (interrupted) continue;
1841
1842 size_t max_pointer = obj->Size() / kPointerSize;
1843 if (max_pointer > marks_.size()) {
1844 // Clear the current bits.
1845 std::vector<bool>().swap(marks_);
1846 // Reallocate to right size.
1847 marks_.resize(max_pointer, false);
1848 }
1849
1850 HeapEntry* heap_entry = GetEntry(obj);
1851 int entry = heap_entry->index();
1852 if ((this->*extractor)(entry, obj)) {
1853 SetInternalReference(obj, entry,
1854 "map", obj->map(), HeapObject::kMapOffset);
1855 // Extract unvisited fields as hidden references and restore tags
1856 // of visited fields.
1857 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1858 obj->Iterate(&refs_extractor);
1859 }
1860
1861 if (!progress_->ProgressReport(false)) interrupted = true;
1862 }
1863 return interrupted;
1864}
1865
1866
1867bool V8HeapExplorer::IsEssentialObject(Object* object) {
1868 return object->IsHeapObject() && !object->IsOddball() &&
1869 object != heap_->empty_byte_array() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001870 object != heap_->empty_fixed_array() &&
1871 object != heap_->empty_descriptor_array() &&
1872 object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1873 object != heap_->global_property_cell_map() &&
1874 object != heap_->shared_function_info_map() &&
1875 object != heap_->free_space_map() &&
1876 object != heap_->one_pointer_filler_map() &&
1877 object != heap_->two_pointer_filler_map();
1878}
1879
1880
1881void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1882 int parent_entry,
1883 String* reference_name,
1884 Object* child_obj,
1885 int field_offset) {
1886 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1887 HeapEntry* child_entry = GetEntry(child_obj);
1888 if (child_entry != NULL) {
1889 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1890 parent_entry,
1891 names_->GetName(reference_name),
1892 child_entry);
1893 MarkVisitedField(parent_obj, field_offset);
1894 }
1895}
1896
1897
1898void V8HeapExplorer::MarkVisitedField(HeapObject* obj, int offset) {
1899 if (offset < 0) return;
1900 int index = offset / kPointerSize;
1901 DCHECK(!marks_[index]);
1902 marks_[index] = true;
1903}
1904
1905
1906void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1907 int parent_entry,
1908 const char* reference_name,
1909 Object* child_obj) {
1910 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1911 HeapEntry* child_entry = GetEntry(child_obj);
1912 if (child_entry != NULL) {
1913 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1914 parent_entry,
1915 reference_name,
1916 child_entry);
1917 }
1918}
1919
1920
1921void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1922 int parent_entry,
1923 int index,
1924 Object* child_obj) {
1925 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1926 HeapEntry* child_entry = GetEntry(child_obj);
1927 if (child_entry != NULL) {
1928 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1929 parent_entry,
1930 index,
1931 child_entry);
1932 }
1933}
1934
1935
1936void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1937 int parent_entry,
1938 const char* reference_name,
1939 Object* child_obj,
1940 int field_offset) {
1941 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1942 HeapEntry* child_entry = GetEntry(child_obj);
1943 if (child_entry == NULL) return;
1944 if (IsEssentialObject(child_obj)) {
1945 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1946 parent_entry,
1947 reference_name,
1948 child_entry);
1949 }
1950 MarkVisitedField(parent_obj, field_offset);
1951}
1952
1953
1954void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1955 int parent_entry,
1956 int index,
1957 Object* child_obj,
1958 int field_offset) {
1959 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1960 HeapEntry* child_entry = GetEntry(child_obj);
1961 if (child_entry == NULL) return;
1962 if (IsEssentialObject(child_obj)) {
1963 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1964 parent_entry,
1965 names_->GetName(index),
1966 child_entry);
1967 }
1968 MarkVisitedField(parent_obj, field_offset);
1969}
1970
1971
1972void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1973 int parent_entry,
1974 int index,
1975 Object* child_obj) {
1976 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1977 HeapEntry* child_entry = GetEntry(child_obj);
1978 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1979 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1980 parent_entry,
1981 index,
1982 child_entry);
1983 }
1984}
1985
1986
1987void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1988 int parent_entry,
1989 const char* reference_name,
1990 Object* child_obj,
1991 int field_offset) {
1992 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1993 HeapEntry* child_entry = GetEntry(child_obj);
1994 if (child_entry == NULL) return;
1995 if (IsEssentialObject(child_obj)) {
1996 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1997 parent_entry,
1998 reference_name,
1999 child_entry);
2000 }
2001 MarkVisitedField(parent_obj, field_offset);
2002}
2003
2004
2005void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2006 int parent_entry,
2007 int index,
2008 Object* child_obj,
2009 int field_offset) {
2010 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2011 HeapEntry* child_entry = GetEntry(child_obj);
2012 if (child_entry == NULL) return;
2013 if (IsEssentialObject(child_obj)) {
2014 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2015 parent_entry,
2016 names_->GetFormatted("%d", index),
2017 child_entry);
2018 }
2019 MarkVisitedField(parent_obj, field_offset);
2020}
2021
2022
2023void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2024 PropertyKind kind, JSObject* parent_obj, int parent_entry,
2025 Name* reference_name, Object* child_obj, const char* name_format_string,
2026 int field_offset) {
2027 if (kind == kAccessor) {
2028 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
2029 child_obj, field_offset);
2030 } else {
2031 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2032 name_format_string, field_offset);
2033 }
2034}
2035
2036
2037void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2038 int parent_entry,
2039 Name* reference_name,
2040 Object* child_obj,
2041 const char* name_format_string,
2042 int field_offset) {
2043 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2044 HeapEntry* child_entry = GetEntry(child_obj);
2045 if (child_entry != NULL) {
2046 HeapGraphEdge::Type type =
2047 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2048 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2049 const char* name = name_format_string != NULL && reference_name->IsString()
2050 ? names_->GetFormatted(
2051 name_format_string,
2052 String::cast(reference_name)->ToCString(
2053 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2054 names_->GetName(reference_name);
2055
2056 filler_->SetNamedReference(type,
2057 parent_entry,
2058 name,
2059 child_entry);
2060 MarkVisitedField(parent_obj, field_offset);
2061 }
2062}
2063
2064
2065void V8HeapExplorer::SetRootGcRootsReference() {
2066 filler_->SetIndexedAutoIndexReference(
2067 HeapGraphEdge::kElement,
2068 snapshot_->root()->index(),
2069 snapshot_->gc_roots());
2070}
2071
2072
2073void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2074 HeapEntry* child_entry = GetEntry(child_obj);
2075 DCHECK(child_entry != NULL);
2076 filler_->SetNamedAutoIndexReference(
2077 HeapGraphEdge::kShortcut,
2078 snapshot_->root()->index(),
2079 child_entry);
2080}
2081
2082
2083void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2084 filler_->SetIndexedAutoIndexReference(
2085 HeapGraphEdge::kElement,
2086 snapshot_->gc_roots()->index(),
2087 snapshot_->gc_subroot(tag));
2088}
2089
2090
2091void V8HeapExplorer::SetGcSubrootReference(
2092 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2093 HeapEntry* child_entry = GetEntry(child_obj);
2094 if (child_entry != NULL) {
2095 const char* name = GetStrongGcSubrootName(child_obj);
2096 if (name != NULL) {
2097 filler_->SetNamedReference(
2098 HeapGraphEdge::kInternal,
2099 snapshot_->gc_subroot(tag)->index(),
2100 name,
2101 child_entry);
2102 } else {
2103 if (is_weak) {
2104 filler_->SetNamedAutoIndexReference(
2105 HeapGraphEdge::kWeak,
2106 snapshot_->gc_subroot(tag)->index(),
2107 child_entry);
2108 } else {
2109 filler_->SetIndexedAutoIndexReference(
2110 HeapGraphEdge::kElement,
2111 snapshot_->gc_subroot(tag)->index(),
2112 child_entry);
2113 }
2114 }
2115
2116 // Add a shortcut to JS global object reference at snapshot root.
2117 if (child_obj->IsNativeContext()) {
2118 Context* context = Context::cast(child_obj);
2119 JSGlobalObject* global = context->global_object();
2120 if (global->IsJSGlobalObject()) {
2121 bool is_debug_object = false;
2122 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2123 if (!is_debug_object && !user_roots_.Contains(global)) {
2124 user_roots_.Insert(global);
2125 SetUserGlobalReference(global);
2126 }
2127 }
2128 }
2129 }
2130}
2131
2132
2133const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2134 if (strong_gc_subroot_names_.is_empty()) {
2135#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2136#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2137 STRONG_ROOT_LIST(ROOT_NAME)
2138#undef ROOT_NAME
2139#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2140 STRUCT_LIST(STRUCT_MAP_NAME)
2141#undef STRUCT_MAP_NAME
2142#define STRING_NAME(name, str) NAME_ENTRY(name)
2143 INTERNALIZED_STRING_LIST(STRING_NAME)
2144#undef STRING_NAME
2145#define SYMBOL_NAME(name) NAME_ENTRY(name)
2146 PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2147#undef SYMBOL_NAME
2148#define SYMBOL_NAME(name, description) NAME_ENTRY(name)
2149 PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2150 WELL_KNOWN_SYMBOL_LIST(SYMBOL_NAME)
2151#undef SYMBOL_NAME
2152#undef NAME_ENTRY
2153 CHECK(!strong_gc_subroot_names_.is_empty());
2154 }
2155 return strong_gc_subroot_names_.GetTag(object);
2156}
2157
2158
2159void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2160 if (IsEssentialObject(obj)) {
2161 HeapEntry* entry = GetEntry(obj);
2162 if (entry->name()[0] == '\0') {
2163 entry->set_name(tag);
2164 }
2165 }
2166}
2167
2168
2169void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2170 if (IsEssentialObject(object) && object->IsFixedArray()) {
2171 weak_containers_.Insert(object);
2172 }
2173}
2174
2175
2176class GlobalObjectsEnumerator : public ObjectVisitor {
2177 public:
2178 void VisitPointers(Object** start, Object** end) override {
2179 for (Object** p = start; p < end; p++) {
2180 if ((*p)->IsNativeContext()) {
2181 Context* context = Context::cast(*p);
2182 JSObject* proxy = context->global_proxy();
2183 if (proxy->IsJSGlobalProxy()) {
2184 Object* global = proxy->map()->prototype();
2185 if (global->IsJSGlobalObject()) {
2186 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2187 }
2188 }
2189 }
2190 }
2191 }
2192 int count() { return objects_.length(); }
2193 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2194
2195 private:
2196 List<Handle<JSGlobalObject> > objects_;
2197};
2198
2199
2200// Modifies heap. Must not be run during heap traversal.
2201void V8HeapExplorer::TagGlobalObjects() {
2202 Isolate* isolate = heap_->isolate();
2203 HandleScope scope(isolate);
2204 GlobalObjectsEnumerator enumerator;
2205 isolate->global_handles()->IterateAllRoots(&enumerator);
2206 const char** urls = NewArray<const char*>(enumerator.count());
2207 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2208 if (global_object_name_resolver_) {
2209 HandleScope scope(isolate);
2210 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2211 urls[i] = global_object_name_resolver_->GetName(
2212 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2213 } else {
2214 urls[i] = NULL;
2215 }
2216 }
2217
2218 DisallowHeapAllocation no_allocation;
2219 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2220 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2221 }
2222
2223 DeleteArray(urls);
2224}
2225
2226
2227class GlobalHandlesExtractor : public ObjectVisitor {
2228 public:
2229 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2230 : explorer_(explorer) {}
2231 ~GlobalHandlesExtractor() override {}
2232 void VisitPointers(Object** start, Object** end) override { UNREACHABLE(); }
2233 void VisitEmbedderReference(Object** p, uint16_t class_id) override {
2234 explorer_->VisitSubtreeWrapper(p, class_id);
2235 }
2236 private:
2237 NativeObjectsExplorer* explorer_;
2238};
2239
2240
2241class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2242 public:
2243 BasicHeapEntriesAllocator(
2244 HeapSnapshot* snapshot,
2245 HeapEntry::Type entries_type)
2246 : snapshot_(snapshot),
2247 names_(snapshot_->profiler()->names()),
2248 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2249 entries_type_(entries_type) {
2250 }
2251 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2252 private:
2253 HeapSnapshot* snapshot_;
2254 StringsStorage* names_;
2255 HeapObjectsMap* heap_object_map_;
2256 HeapEntry::Type entries_type_;
2257};
2258
2259
2260HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2261 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2262 intptr_t elements = info->GetElementCount();
2263 intptr_t size = info->GetSizeInBytes();
2264 const char* name = elements != -1
2265 ? names_->GetFormatted(
2266 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2267 : names_->GetCopy(info->GetLabel());
2268 return snapshot_->AddEntry(
2269 entries_type_,
2270 name,
2271 heap_object_map_->GenerateId(info),
2272 size != -1 ? static_cast<int>(size) : 0,
2273 0);
2274}
2275
2276
2277NativeObjectsExplorer::NativeObjectsExplorer(
2278 HeapSnapshot* snapshot,
2279 SnapshottingProgressReportingInterface* progress)
2280 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2281 snapshot_(snapshot),
2282 names_(snapshot_->profiler()->names()),
2283 embedder_queried_(false),
2284 objects_by_info_(RetainedInfosMatch),
2285 native_groups_(StringsMatch),
2286 filler_(NULL) {
2287 synthetic_entries_allocator_ =
2288 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2289 native_entries_allocator_ =
2290 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2291}
2292
2293
2294NativeObjectsExplorer::~NativeObjectsExplorer() {
2295 for (HashMap::Entry* p = objects_by_info_.Start();
2296 p != NULL;
2297 p = objects_by_info_.Next(p)) {
2298 v8::RetainedObjectInfo* info =
2299 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2300 info->Dispose();
2301 List<HeapObject*>* objects =
2302 reinterpret_cast<List<HeapObject*>* >(p->value);
2303 delete objects;
2304 }
2305 for (HashMap::Entry* p = native_groups_.Start();
2306 p != NULL;
2307 p = native_groups_.Next(p)) {
2308 v8::RetainedObjectInfo* info =
2309 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2310 info->Dispose();
2311 }
2312 delete synthetic_entries_allocator_;
2313 delete native_entries_allocator_;
2314}
2315
2316
2317int NativeObjectsExplorer::EstimateObjectsCount() {
2318 FillRetainedObjects();
2319 return objects_by_info_.occupancy();
2320}
2321
2322
2323void NativeObjectsExplorer::FillRetainedObjects() {
2324 if (embedder_queried_) return;
2325 Isolate* isolate = isolate_;
2326 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2327 // Record objects that are joined into ObjectGroups.
2328 isolate->heap()->CallGCPrologueCallbacks(
2329 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2330 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2331 for (int i = 0; i < groups->length(); ++i) {
2332 ObjectGroup* group = groups->at(i);
2333 if (group->info == NULL) continue;
2334 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2335 for (size_t j = 0; j < group->length; ++j) {
2336 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2337 list->Add(obj);
2338 in_groups_.Insert(obj);
2339 }
2340 group->info = NULL; // Acquire info object ownership.
2341 }
2342 isolate->global_handles()->RemoveObjectGroups();
2343 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2344 // Record objects that are not in ObjectGroups, but have class ID.
2345 GlobalHandlesExtractor extractor(this);
2346 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2347 embedder_queried_ = true;
2348}
2349
2350
2351void NativeObjectsExplorer::FillImplicitReferences() {
2352 Isolate* isolate = isolate_;
2353 List<ImplicitRefGroup*>* groups =
2354 isolate->global_handles()->implicit_ref_groups();
2355 for (int i = 0; i < groups->length(); ++i) {
2356 ImplicitRefGroup* group = groups->at(i);
2357 HeapObject* parent = *group->parent;
2358 int parent_entry =
2359 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2360 DCHECK(parent_entry != HeapEntry::kNoEntry);
2361 Object*** children = group->children;
2362 for (size_t j = 0; j < group->length; ++j) {
2363 Object* child = *children[j];
2364 HeapEntry* child_entry =
2365 filler_->FindOrAddEntry(child, native_entries_allocator_);
2366 filler_->SetNamedReference(
2367 HeapGraphEdge::kInternal,
2368 parent_entry,
2369 "native",
2370 child_entry);
2371 }
2372 }
2373 isolate->global_handles()->RemoveImplicitRefGroups();
2374}
2375
2376List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2377 v8::RetainedObjectInfo* info) {
2378 HashMap::Entry* entry = objects_by_info_.LookupOrInsert(info, InfoHash(info));
2379 if (entry->value != NULL) {
2380 info->Dispose();
2381 } else {
2382 entry->value = new List<HeapObject*>(4);
2383 }
2384 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2385}
2386
2387
2388bool NativeObjectsExplorer::IterateAndExtractReferences(
2389 SnapshotFiller* filler) {
2390 filler_ = filler;
2391 FillRetainedObjects();
2392 FillImplicitReferences();
2393 if (EstimateObjectsCount() > 0) {
2394 for (HashMap::Entry* p = objects_by_info_.Start();
2395 p != NULL;
2396 p = objects_by_info_.Next(p)) {
2397 v8::RetainedObjectInfo* info =
2398 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2399 SetNativeRootReference(info);
2400 List<HeapObject*>* objects =
2401 reinterpret_cast<List<HeapObject*>* >(p->value);
2402 for (int i = 0; i < objects->length(); ++i) {
2403 SetWrapperNativeReferences(objects->at(i), info);
2404 }
2405 }
2406 SetRootNativeRootsReference();
2407 }
2408 filler_ = NULL;
2409 return true;
2410}
2411
2412
2413class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2414 public:
2415 explicit NativeGroupRetainedObjectInfo(const char* label)
2416 : disposed_(false),
2417 hash_(reinterpret_cast<intptr_t>(label)),
2418 label_(label) {
2419 }
2420
2421 virtual ~NativeGroupRetainedObjectInfo() {}
2422 virtual void Dispose() {
2423 CHECK(!disposed_);
2424 disposed_ = true;
2425 delete this;
2426 }
2427 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2428 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2429 }
2430 virtual intptr_t GetHash() { return hash_; }
2431 virtual const char* GetLabel() { return label_; }
2432
2433 private:
2434 bool disposed_;
2435 intptr_t hash_;
2436 const char* label_;
2437};
2438
2439
2440NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2441 const char* label) {
2442 const char* label_copy = names_->GetCopy(label);
2443 uint32_t hash = StringHasher::HashSequentialString(
2444 label_copy,
2445 static_cast<int>(strlen(label_copy)),
2446 isolate_->heap()->HashSeed());
2447 HashMap::Entry* entry =
2448 native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2449 if (entry->value == NULL) {
2450 entry->value = new NativeGroupRetainedObjectInfo(label);
2451 }
2452 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2453}
2454
2455
2456void NativeObjectsExplorer::SetNativeRootReference(
2457 v8::RetainedObjectInfo* info) {
2458 HeapEntry* child_entry =
2459 filler_->FindOrAddEntry(info, native_entries_allocator_);
2460 DCHECK(child_entry != NULL);
2461 NativeGroupRetainedObjectInfo* group_info =
2462 FindOrAddGroupInfo(info->GetGroupLabel());
2463 HeapEntry* group_entry =
2464 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2465 // |FindOrAddEntry| can move and resize the entries backing store. Reload
2466 // potentially-stale pointer.
2467 child_entry = filler_->FindEntry(info);
2468 filler_->SetNamedAutoIndexReference(
2469 HeapGraphEdge::kInternal,
2470 group_entry->index(),
2471 child_entry);
2472}
2473
2474
2475void NativeObjectsExplorer::SetWrapperNativeReferences(
2476 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2477 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2478 DCHECK(wrapper_entry != NULL);
2479 HeapEntry* info_entry =
2480 filler_->FindOrAddEntry(info, native_entries_allocator_);
2481 DCHECK(info_entry != NULL);
2482 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2483 wrapper_entry->index(),
2484 "native",
2485 info_entry);
2486 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2487 info_entry->index(),
2488 wrapper_entry);
2489}
2490
2491
2492void NativeObjectsExplorer::SetRootNativeRootsReference() {
2493 for (HashMap::Entry* entry = native_groups_.Start();
2494 entry;
2495 entry = native_groups_.Next(entry)) {
2496 NativeGroupRetainedObjectInfo* group_info =
2497 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2498 HeapEntry* group_entry =
2499 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2500 DCHECK(group_entry != NULL);
2501 filler_->SetIndexedAutoIndexReference(
2502 HeapGraphEdge::kElement,
2503 snapshot_->root()->index(),
2504 group_entry);
2505 }
2506}
2507
2508
2509void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2510 if (in_groups_.Contains(*p)) return;
2511 Isolate* isolate = isolate_;
2512 v8::RetainedObjectInfo* info =
2513 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2514 if (info == NULL) return;
2515 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2516}
2517
2518
2519HeapSnapshotGenerator::HeapSnapshotGenerator(
2520 HeapSnapshot* snapshot,
2521 v8::ActivityControl* control,
2522 v8::HeapProfiler::ObjectNameResolver* resolver,
2523 Heap* heap)
2524 : snapshot_(snapshot),
2525 control_(control),
2526 v8_heap_explorer_(snapshot_, this, resolver),
2527 dom_explorer_(snapshot_, this),
2528 heap_(heap) {
2529}
2530
2531
2532bool HeapSnapshotGenerator::GenerateSnapshot() {
2533 v8_heap_explorer_.TagGlobalObjects();
2534
2535 // TODO(1562) Profiler assumes that any object that is in the heap after
2536 // full GC is reachable from the root when computing dominators.
2537 // This is not true for weakly reachable objects.
2538 // As a temporary solution we call GC twice.
2539 heap_->CollectAllGarbage(
2540 Heap::kMakeHeapIterableMask,
2541 "HeapSnapshotGenerator::GenerateSnapshot");
2542 heap_->CollectAllGarbage(
2543 Heap::kMakeHeapIterableMask,
2544 "HeapSnapshotGenerator::GenerateSnapshot");
2545
2546#ifdef VERIFY_HEAP
2547 Heap* debug_heap = heap_;
2548 if (FLAG_verify_heap) {
2549 debug_heap->Verify();
2550 }
2551#endif
2552
2553 SetProgressTotal(2); // 2 passes.
2554
2555#ifdef VERIFY_HEAP
2556 if (FLAG_verify_heap) {
2557 debug_heap->Verify();
2558 }
2559#endif
2560
2561 snapshot_->AddSyntheticRootEntries();
2562
2563 if (!FillReferences()) return false;
2564
2565 snapshot_->FillChildren();
2566 snapshot_->RememberLastJSObjectId();
2567
2568 progress_counter_ = progress_total_;
2569 if (!ProgressReport(true)) return false;
2570 return true;
2571}
2572
2573
2574void HeapSnapshotGenerator::ProgressStep() {
2575 ++progress_counter_;
2576}
2577
2578
2579bool HeapSnapshotGenerator::ProgressReport(bool force) {
2580 const int kProgressReportGranularity = 10000;
2581 if (control_ != NULL
2582 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2583 return
2584 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2585 v8::ActivityControl::kContinue;
2586 }
2587 return true;
2588}
2589
2590
2591void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2592 if (control_ == NULL) return;
2593 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2594 progress_total_ = iterations_count * (
2595 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2596 dom_explorer_.EstimateObjectsCount());
2597 progress_counter_ = 0;
2598}
2599
2600
2601bool HeapSnapshotGenerator::FillReferences() {
2602 SnapshotFiller filler(snapshot_, &entries_);
2603 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2604 && dom_explorer_.IterateAndExtractReferences(&filler);
2605}
2606
2607
2608template<int bytes> struct MaxDecimalDigitsIn;
2609template<> struct MaxDecimalDigitsIn<4> {
2610 static const int kSigned = 11;
2611 static const int kUnsigned = 10;
2612};
2613template<> struct MaxDecimalDigitsIn<8> {
2614 static const int kSigned = 20;
2615 static const int kUnsigned = 20;
2616};
2617
2618
2619class OutputStreamWriter {
2620 public:
2621 explicit OutputStreamWriter(v8::OutputStream* stream)
2622 : stream_(stream),
2623 chunk_size_(stream->GetChunkSize()),
2624 chunk_(chunk_size_),
2625 chunk_pos_(0),
2626 aborted_(false) {
2627 DCHECK(chunk_size_ > 0);
2628 }
2629 bool aborted() { return aborted_; }
2630 void AddCharacter(char c) {
2631 DCHECK(c != '\0');
2632 DCHECK(chunk_pos_ < chunk_size_);
2633 chunk_[chunk_pos_++] = c;
2634 MaybeWriteChunk();
2635 }
2636 void AddString(const char* s) {
2637 AddSubstring(s, StrLength(s));
2638 }
2639 void AddSubstring(const char* s, int n) {
2640 if (n <= 0) return;
2641 DCHECK(static_cast<size_t>(n) <= strlen(s));
2642 const char* s_end = s + n;
2643 while (s < s_end) {
2644 int s_chunk_size =
2645 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2646 DCHECK(s_chunk_size > 0);
2647 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2648 s += s_chunk_size;
2649 chunk_pos_ += s_chunk_size;
2650 MaybeWriteChunk();
2651 }
2652 }
2653 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2654 void Finalize() {
2655 if (aborted_) return;
2656 DCHECK(chunk_pos_ < chunk_size_);
2657 if (chunk_pos_ != 0) {
2658 WriteChunk();
2659 }
2660 stream_->EndOfStream();
2661 }
2662
2663 private:
2664 template<typename T>
2665 void AddNumberImpl(T n, const char* format) {
2666 // Buffer for the longest value plus trailing \0
2667 static const int kMaxNumberSize =
2668 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2669 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2670 int result = SNPrintF(
2671 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2672 DCHECK(result != -1);
2673 chunk_pos_ += result;
2674 MaybeWriteChunk();
2675 } else {
2676 EmbeddedVector<char, kMaxNumberSize> buffer;
2677 int result = SNPrintF(buffer, format, n);
2678 USE(result);
2679 DCHECK(result != -1);
2680 AddString(buffer.start());
2681 }
2682 }
2683 void MaybeWriteChunk() {
2684 DCHECK(chunk_pos_ <= chunk_size_);
2685 if (chunk_pos_ == chunk_size_) {
2686 WriteChunk();
2687 }
2688 }
2689 void WriteChunk() {
2690 if (aborted_) return;
2691 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2692 v8::OutputStream::kAbort) aborted_ = true;
2693 chunk_pos_ = 0;
2694 }
2695
2696 v8::OutputStream* stream_;
2697 int chunk_size_;
2698 ScopedVector<char> chunk_;
2699 int chunk_pos_;
2700 bool aborted_;
2701};
2702
2703
2704// type, name|index, to_node.
2705const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2706// type, name, id, self_size, edge_count, trace_node_id.
2707const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2708
2709void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2710 if (AllocationTracker* allocation_tracker =
2711 snapshot_->profiler()->allocation_tracker()) {
2712 allocation_tracker->PrepareForSerialization();
2713 }
2714 DCHECK(writer_ == NULL);
2715 writer_ = new OutputStreamWriter(stream);
2716 SerializeImpl();
2717 delete writer_;
2718 writer_ = NULL;
2719}
2720
2721
2722void HeapSnapshotJSONSerializer::SerializeImpl() {
2723 DCHECK(0 == snapshot_->root()->index());
2724 writer_->AddCharacter('{');
2725 writer_->AddString("\"snapshot\":{");
2726 SerializeSnapshot();
2727 if (writer_->aborted()) return;
2728 writer_->AddString("},\n");
2729 writer_->AddString("\"nodes\":[");
2730 SerializeNodes();
2731 if (writer_->aborted()) return;
2732 writer_->AddString("],\n");
2733 writer_->AddString("\"edges\":[");
2734 SerializeEdges();
2735 if (writer_->aborted()) return;
2736 writer_->AddString("],\n");
2737
2738 writer_->AddString("\"trace_function_infos\":[");
2739 SerializeTraceNodeInfos();
2740 if (writer_->aborted()) return;
2741 writer_->AddString("],\n");
2742 writer_->AddString("\"trace_tree\":[");
2743 SerializeTraceTree();
2744 if (writer_->aborted()) return;
2745 writer_->AddString("],\n");
2746
2747 writer_->AddString("\"samples\":[");
2748 SerializeSamples();
2749 if (writer_->aborted()) return;
2750 writer_->AddString("],\n");
2751
2752 writer_->AddString("\"strings\":[");
2753 SerializeStrings();
2754 if (writer_->aborted()) return;
2755 writer_->AddCharacter(']');
2756 writer_->AddCharacter('}');
2757 writer_->Finalize();
2758}
2759
2760
2761int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2762 HashMap::Entry* cache_entry =
2763 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2764 if (cache_entry->value == NULL) {
2765 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2766 }
2767 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2768}
2769
2770
2771namespace {
2772
2773template<size_t size> struct ToUnsigned;
2774
2775template<> struct ToUnsigned<4> {
2776 typedef uint32_t Type;
2777};
2778
2779template<> struct ToUnsigned<8> {
2780 typedef uint64_t Type;
2781};
2782
2783} // namespace
2784
2785
2786template<typename T>
2787static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2788 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2789 int number_of_digits = 0;
2790 T t = value;
2791 do {
2792 ++number_of_digits;
2793 } while (t /= 10);
2794
2795 buffer_pos += number_of_digits;
2796 int result = buffer_pos;
2797 do {
2798 int last_digit = static_cast<int>(value % 10);
2799 buffer[--buffer_pos] = '0' + last_digit;
2800 value /= 10;
2801 } while (value);
2802 return result;
2803}
2804
2805
2806template<typename T>
2807static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2808 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2809 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2810 return utoa_impl(unsigned_value, buffer, buffer_pos);
2811}
2812
2813
2814void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2815 bool first_edge) {
2816 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2817 static const int kBufferSize =
2818 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2819 EmbeddedVector<char, kBufferSize> buffer;
2820 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2821 || edge->type() == HeapGraphEdge::kHidden
2822 ? edge->index() : GetStringId(edge->name());
2823 int buffer_pos = 0;
2824 if (!first_edge) {
2825 buffer[buffer_pos++] = ',';
2826 }
2827 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2828 buffer[buffer_pos++] = ',';
2829 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2830 buffer[buffer_pos++] = ',';
2831 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2832 buffer[buffer_pos++] = '\n';
2833 buffer[buffer_pos++] = '\0';
2834 writer_->AddString(buffer.start());
2835}
2836
2837
2838void HeapSnapshotJSONSerializer::SerializeEdges() {
2839 List<HeapGraphEdge*>& edges = snapshot_->children();
2840 for (int i = 0; i < edges.length(); ++i) {
2841 DCHECK(i == 0 ||
2842 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2843 SerializeEdge(edges[i], i == 0);
2844 if (writer_->aborted()) return;
2845 }
2846}
2847
2848
2849void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2850 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2851 static const int kBufferSize =
2852 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2853 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2854 + 6 + 1 + 1;
2855 EmbeddedVector<char, kBufferSize> buffer;
2856 int buffer_pos = 0;
2857 if (entry_index(entry) != 0) {
2858 buffer[buffer_pos++] = ',';
2859 }
2860 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2861 buffer[buffer_pos++] = ',';
2862 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2863 buffer[buffer_pos++] = ',';
2864 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2865 buffer[buffer_pos++] = ',';
2866 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2867 buffer[buffer_pos++] = ',';
2868 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2869 buffer[buffer_pos++] = ',';
2870 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2871 buffer[buffer_pos++] = '\n';
2872 buffer[buffer_pos++] = '\0';
2873 writer_->AddString(buffer.start());
2874}
2875
2876
2877void HeapSnapshotJSONSerializer::SerializeNodes() {
2878 List<HeapEntry>& entries = snapshot_->entries();
2879 for (int i = 0; i < entries.length(); ++i) {
2880 SerializeNode(&entries[i]);
2881 if (writer_->aborted()) return;
2882 }
2883}
2884
2885
2886void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2887 writer_->AddString("\"meta\":");
2888 // The object describing node serialization layout.
2889 // We use a set of macros to improve readability.
2890#define JSON_A(s) "[" s "]"
2891#define JSON_O(s) "{" s "}"
2892#define JSON_S(s) "\"" s "\""
2893 writer_->AddString(JSON_O(
2894 JSON_S("node_fields") ":" JSON_A(
2895 JSON_S("type") ","
2896 JSON_S("name") ","
2897 JSON_S("id") ","
2898 JSON_S("self_size") ","
2899 JSON_S("edge_count") ","
2900 JSON_S("trace_node_id")) ","
2901 JSON_S("node_types") ":" JSON_A(
2902 JSON_A(
2903 JSON_S("hidden") ","
2904 JSON_S("array") ","
2905 JSON_S("string") ","
2906 JSON_S("object") ","
2907 JSON_S("code") ","
2908 JSON_S("closure") ","
2909 JSON_S("regexp") ","
2910 JSON_S("number") ","
2911 JSON_S("native") ","
2912 JSON_S("synthetic") ","
2913 JSON_S("concatenated string") ","
2914 JSON_S("sliced string")) ","
2915 JSON_S("string") ","
2916 JSON_S("number") ","
2917 JSON_S("number") ","
2918 JSON_S("number") ","
2919 JSON_S("number") ","
2920 JSON_S("number")) ","
2921 JSON_S("edge_fields") ":" JSON_A(
2922 JSON_S("type") ","
2923 JSON_S("name_or_index") ","
2924 JSON_S("to_node")) ","
2925 JSON_S("edge_types") ":" JSON_A(
2926 JSON_A(
2927 JSON_S("context") ","
2928 JSON_S("element") ","
2929 JSON_S("property") ","
2930 JSON_S("internal") ","
2931 JSON_S("hidden") ","
2932 JSON_S("shortcut") ","
2933 JSON_S("weak")) ","
2934 JSON_S("string_or_number") ","
2935 JSON_S("node")) ","
2936 JSON_S("trace_function_info_fields") ":" JSON_A(
2937 JSON_S("function_id") ","
2938 JSON_S("name") ","
2939 JSON_S("script_name") ","
2940 JSON_S("script_id") ","
2941 JSON_S("line") ","
2942 JSON_S("column")) ","
2943 JSON_S("trace_node_fields") ":" JSON_A(
2944 JSON_S("id") ","
2945 JSON_S("function_info_index") ","
2946 JSON_S("count") ","
2947 JSON_S("size") ","
2948 JSON_S("children")) ","
2949 JSON_S("sample_fields") ":" JSON_A(
2950 JSON_S("timestamp_us") ","
2951 JSON_S("last_assigned_id"))));
2952#undef JSON_S
2953#undef JSON_O
2954#undef JSON_A
2955 writer_->AddString(",\"node_count\":");
2956 writer_->AddNumber(snapshot_->entries().length());
2957 writer_->AddString(",\"edge_count\":");
2958 writer_->AddNumber(snapshot_->edges().length());
2959 writer_->AddString(",\"trace_function_count\":");
2960 uint32_t count = 0;
2961 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2962 if (tracker) {
2963 count = tracker->function_info_list().length();
2964 }
2965 writer_->AddNumber(count);
2966}
2967
2968
2969static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2970 static const char hex_chars[] = "0123456789ABCDEF";
2971 w->AddString("\\u");
2972 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2973 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2974 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2975 w->AddCharacter(hex_chars[u & 0xf]);
2976}
2977
2978
2979void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2980 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2981 if (!tracker) return;
2982 AllocationTraceTree* traces = tracker->trace_tree();
2983 SerializeTraceNode(traces->root());
2984}
2985
2986
2987void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2988 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2989 const int kBufferSize =
2990 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2991 + 4 + 1 + 1;
2992 EmbeddedVector<char, kBufferSize> buffer;
2993 int buffer_pos = 0;
2994 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2995 buffer[buffer_pos++] = ',';
2996 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2997 buffer[buffer_pos++] = ',';
2998 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2999 buffer[buffer_pos++] = ',';
3000 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3001 buffer[buffer_pos++] = ',';
3002 buffer[buffer_pos++] = '[';
3003 buffer[buffer_pos++] = '\0';
3004 writer_->AddString(buffer.start());
3005
3006 Vector<AllocationTraceNode*> children = node->children();
3007 for (int i = 0; i < children.length(); i++) {
3008 if (i > 0) {
3009 writer_->AddCharacter(',');
3010 }
3011 SerializeTraceNode(children[i]);
3012 }
3013 writer_->AddCharacter(']');
3014}
3015
3016
3017// 0-based position is converted to 1-based during the serialization.
3018static int SerializePosition(int position, const Vector<char>& buffer,
3019 int buffer_pos) {
3020 if (position == -1) {
3021 buffer[buffer_pos++] = '0';
3022 } else {
3023 DCHECK(position >= 0);
3024 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3025 }
3026 return buffer_pos;
3027}
3028
3029
3030void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3031 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3032 if (!tracker) return;
3033 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3034 const int kBufferSize =
3035 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3036 + 6 + 1 + 1;
3037 EmbeddedVector<char, kBufferSize> buffer;
3038 const List<AllocationTracker::FunctionInfo*>& list =
3039 tracker->function_info_list();
3040 for (int i = 0; i < list.length(); i++) {
3041 AllocationTracker::FunctionInfo* info = list[i];
3042 int buffer_pos = 0;
3043 if (i > 0) {
3044 buffer[buffer_pos++] = ',';
3045 }
3046 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3047 buffer[buffer_pos++] = ',';
3048 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3049 buffer[buffer_pos++] = ',';
3050 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3051 buffer[buffer_pos++] = ',';
3052 // The cast is safe because script id is a non-negative Smi.
3053 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3054 buffer_pos);
3055 buffer[buffer_pos++] = ',';
3056 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3057 buffer[buffer_pos++] = ',';
3058 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3059 buffer[buffer_pos++] = '\n';
3060 buffer[buffer_pos++] = '\0';
3061 writer_->AddString(buffer.start());
3062 }
3063}
3064
3065
3066void HeapSnapshotJSONSerializer::SerializeSamples() {
3067 const List<HeapObjectsMap::TimeInterval>& samples =
3068 snapshot_->profiler()->heap_object_map()->samples();
3069 if (samples.is_empty()) return;
3070 base::TimeTicks start_time = samples[0].timestamp;
3071 // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3072 const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3073 base::TimeDelta().InMicroseconds())>::kUnsigned +
3074 MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3075 2 + 1 + 1;
3076 EmbeddedVector<char, kBufferSize> buffer;
3077 for (int i = 0; i < samples.length(); i++) {
3078 HeapObjectsMap::TimeInterval& sample = samples[i];
3079 int buffer_pos = 0;
3080 if (i > 0) {
3081 buffer[buffer_pos++] = ',';
3082 }
3083 base::TimeDelta time_delta = sample.timestamp - start_time;
3084 buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3085 buffer[buffer_pos++] = ',';
3086 buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3087 buffer[buffer_pos++] = '\n';
3088 buffer[buffer_pos++] = '\0';
3089 writer_->AddString(buffer.start());
3090 }
3091}
3092
3093
3094void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3095 writer_->AddCharacter('\n');
3096 writer_->AddCharacter('\"');
3097 for ( ; *s != '\0'; ++s) {
3098 switch (*s) {
3099 case '\b':
3100 writer_->AddString("\\b");
3101 continue;
3102 case '\f':
3103 writer_->AddString("\\f");
3104 continue;
3105 case '\n':
3106 writer_->AddString("\\n");
3107 continue;
3108 case '\r':
3109 writer_->AddString("\\r");
3110 continue;
3111 case '\t':
3112 writer_->AddString("\\t");
3113 continue;
3114 case '\"':
3115 case '\\':
3116 writer_->AddCharacter('\\');
3117 writer_->AddCharacter(*s);
3118 continue;
3119 default:
3120 if (*s > 31 && *s < 128) {
3121 writer_->AddCharacter(*s);
3122 } else if (*s <= 31) {
3123 // Special character with no dedicated literal.
3124 WriteUChar(writer_, *s);
3125 } else {
3126 // Convert UTF-8 into \u UTF-16 literal.
3127 size_t length = 1, cursor = 0;
3128 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3129 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3130 if (c != unibrow::Utf8::kBadChar) {
3131 WriteUChar(writer_, c);
3132 DCHECK(cursor != 0);
3133 s += cursor - 1;
3134 } else {
3135 writer_->AddCharacter('?');
3136 }
3137 }
3138 }
3139 }
3140 writer_->AddCharacter('\"');
3141}
3142
3143
3144void HeapSnapshotJSONSerializer::SerializeStrings() {
3145 ScopedVector<const unsigned char*> sorted_strings(
3146 strings_.occupancy() + 1);
3147 for (HashMap::Entry* entry = strings_.Start();
3148 entry != NULL;
3149 entry = strings_.Next(entry)) {
3150 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3151 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3152 }
3153 writer_->AddString("\"<dummy>\"");
3154 for (int i = 1; i < sorted_strings.length(); ++i) {
3155 writer_->AddCharacter(',');
3156 SerializeString(sorted_strings[i]);
3157 if (writer_->aborted()) return;
3158 }
3159}
3160
3161
3162} // namespace internal
3163} // namespace v8