blob: 196eb130a5d81d24a4da525b77dbdd48362fb80e [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/heap-snapshot-generator-inl.h"
8
9#include "src/allocation-tracker.h"
10#include "src/code-stubs.h"
11#include "src/conversions.h"
12#include "src/debug.h"
13#include "src/heap-profiler.h"
14#include "src/types.h"
15
16namespace v8 {
17namespace internal {
18
19
20HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
Emily Bernierd0a1eb72015-03-24 16:35:39 -040021 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 to_index_(to),
23 name_(name) {
24 DCHECK(type == kContextVariable
25 || type == kProperty
26 || type == kInternal
27 || type == kShortcut
28 || type == kWeak);
29}
30
31
32HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
Emily Bernierd0a1eb72015-03-24 16:35:39 -040033 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034 to_index_(to),
35 index_(index) {
36 DCHECK(type == kElement || type == kHidden);
37}
38
39
40void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
41 to_entry_ = &snapshot->entries()[to_index_];
42}
43
44
45const int HeapEntry::kNoEntry = -1;
46
47HeapEntry::HeapEntry(HeapSnapshot* snapshot,
48 Type type,
49 const char* name,
50 SnapshotObjectId id,
51 size_t self_size,
52 unsigned trace_node_id)
53 : type_(type),
54 children_count_(0),
55 children_index_(-1),
56 self_size_(self_size),
57 snapshot_(snapshot),
58 name_(name),
59 id_(id),
60 trace_node_id_(trace_node_id) { }
61
62
63void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
64 const char* name,
65 HeapEntry* entry) {
66 HeapGraphEdge edge(type, name, this->index(), entry->index());
67 snapshot_->edges().Add(edge);
68 ++children_count_;
69}
70
71
72void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
73 int index,
74 HeapEntry* entry) {
75 HeapGraphEdge edge(type, index, this->index(), entry->index());
76 snapshot_->edges().Add(edge);
77 ++children_count_;
78}
79
80
81void HeapEntry::Print(
82 const char* prefix, const char* edge_name, int max_depth, int indent) {
83 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
84 base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
85 ' ', prefix, edge_name);
86 if (type() != kString) {
87 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
88 } else {
89 base::OS::Print("\"");
90 const char* c = name_;
91 while (*c && (c - name_) <= 40) {
92 if (*c != '\n')
93 base::OS::Print("%c", *c);
94 else
95 base::OS::Print("\\n");
96 ++c;
97 }
98 base::OS::Print("\"\n");
99 }
100 if (--max_depth == 0) return;
101 Vector<HeapGraphEdge*> ch = children();
102 for (int i = 0; i < ch.length(); ++i) {
103 HeapGraphEdge& edge = *ch[i];
104 const char* edge_prefix = "";
105 EmbeddedVector<char, 64> index;
106 const char* edge_name = index.start();
107 switch (edge.type()) {
108 case HeapGraphEdge::kContextVariable:
109 edge_prefix = "#";
110 edge_name = edge.name();
111 break;
112 case HeapGraphEdge::kElement:
113 SNPrintF(index, "%d", edge.index());
114 break;
115 case HeapGraphEdge::kInternal:
116 edge_prefix = "$";
117 edge_name = edge.name();
118 break;
119 case HeapGraphEdge::kProperty:
120 edge_name = edge.name();
121 break;
122 case HeapGraphEdge::kHidden:
123 edge_prefix = "$";
124 SNPrintF(index, "%d", edge.index());
125 break;
126 case HeapGraphEdge::kShortcut:
127 edge_prefix = "^";
128 edge_name = edge.name();
129 break;
130 case HeapGraphEdge::kWeak:
131 edge_prefix = "w";
132 edge_name = edge.name();
133 break;
134 default:
135 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
136 }
137 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
138 }
139}
140
141
142const char* HeapEntry::TypeAsString() {
143 switch (type()) {
144 case kHidden: return "/hidden/";
145 case kObject: return "/object/";
146 case kClosure: return "/closure/";
147 case kString: return "/string/";
148 case kCode: return "/code/";
149 case kArray: return "/array/";
150 case kRegExp: return "/regexp/";
151 case kHeapNumber: return "/number/";
152 case kNative: return "/native/";
153 case kSynthetic: return "/synthetic/";
154 case kConsString: return "/concatenated string/";
155 case kSlicedString: return "/sliced string/";
156 case kSymbol: return "/symbol/";
157 default: return "???";
158 }
159}
160
161
162// It is very important to keep objects that form a heap snapshot
163// as small as possible.
164namespace { // Avoid littering the global namespace.
165
166template <size_t ptr_size> struct SnapshotSizeConstants;
167
168template <> struct SnapshotSizeConstants<4> {
169 static const int kExpectedHeapGraphEdgeSize = 12;
170 static const int kExpectedHeapEntrySize = 28;
171};
172
173template <> struct SnapshotSizeConstants<8> {
174 static const int kExpectedHeapGraphEdgeSize = 24;
175 static const int kExpectedHeapEntrySize = 40;
176};
177
178} // namespace
179
180
181HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
182 const char* title,
183 unsigned uid)
184 : profiler_(profiler),
185 title_(title),
186 uid_(uid),
187 root_index_(HeapEntry::kNoEntry),
188 gc_roots_index_(HeapEntry::kNoEntry),
189 max_snapshot_js_object_id_(0) {
190 STATIC_ASSERT(
191 sizeof(HeapGraphEdge) ==
192 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
193 STATIC_ASSERT(
194 sizeof(HeapEntry) ==
195 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
196 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
197 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
198 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
199 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
200 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
201 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
202 }
203}
204
205
206void HeapSnapshot::Delete() {
207 profiler_->RemoveSnapshot(this);
208 delete this;
209}
210
211
212void HeapSnapshot::RememberLastJSObjectId() {
213 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
214}
215
216
217void HeapSnapshot::AddSyntheticRootEntries() {
218 AddRootEntry();
219 AddGcRootsEntry();
220 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
221 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
222 AddGcSubrootEntry(tag, id);
223 id += HeapObjectsMap::kObjectIdStep;
224 }
225 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
226}
227
228
229HeapEntry* HeapSnapshot::AddRootEntry() {
230 DCHECK(root_index_ == HeapEntry::kNoEntry);
231 DCHECK(entries_.is_empty()); // Root entry must be the first one.
232 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
233 "",
234 HeapObjectsMap::kInternalRootObjectId,
235 0,
236 0);
237 root_index_ = entry->index();
238 DCHECK(root_index_ == 0);
239 return entry;
240}
241
242
243HeapEntry* HeapSnapshot::AddGcRootsEntry() {
244 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
245 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
246 "(GC roots)",
247 HeapObjectsMap::kGcRootsObjectId,
248 0,
249 0);
250 gc_roots_index_ = entry->index();
251 return entry;
252}
253
254
255HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
256 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
257 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
258 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
259 VisitorSynchronization::kTagNames[tag], id, 0, 0);
260 gc_subroot_indexes_[tag] = entry->index();
261 return entry;
262}
263
264
265HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
266 const char* name,
267 SnapshotObjectId id,
268 size_t size,
269 unsigned trace_node_id) {
270 HeapEntry entry(this, type, name, id, size, trace_node_id);
271 entries_.Add(entry);
272 return &entries_.last();
273}
274
275
276void HeapSnapshot::FillChildren() {
277 DCHECK(children().is_empty());
278 children().Allocate(edges().length());
279 int children_index = 0;
280 for (int i = 0; i < entries().length(); ++i) {
281 HeapEntry* entry = &entries()[i];
282 children_index = entry->set_children_index(children_index);
283 }
284 DCHECK(edges().length() == children_index);
285 for (int i = 0; i < edges().length(); ++i) {
286 HeapGraphEdge* edge = &edges()[i];
287 edge->ReplaceToIndexWithEntry(this);
288 edge->from()->add_child(edge);
289 }
290}
291
292
293class FindEntryById {
294 public:
295 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
296 int operator()(HeapEntry* const* entry) {
297 if ((*entry)->id() == id_) return 0;
298 return (*entry)->id() < id_ ? -1 : 1;
299 }
300 private:
301 SnapshotObjectId id_;
302};
303
304
305HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
306 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
307 // Perform a binary search by id.
308 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
309 if (index == -1)
310 return NULL;
311 return entries_by_id->at(index);
312}
313
314
315template<class T>
316static int SortByIds(const T* entry1_ptr,
317 const T* entry2_ptr) {
318 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
319 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
320}
321
322
323List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
324 if (sorted_entries_.is_empty()) {
325 sorted_entries_.Allocate(entries_.length());
326 for (int i = 0; i < entries_.length(); ++i) {
327 sorted_entries_[i] = &entries_[i];
328 }
329 sorted_entries_.Sort(SortByIds);
330 }
331 return &sorted_entries_;
332}
333
334
335void HeapSnapshot::Print(int max_depth) {
336 root()->Print("", "", max_depth, 0);
337}
338
339
340size_t HeapSnapshot::RawSnapshotSize() const {
341 return
342 sizeof(*this) +
343 GetMemoryUsedByList(entries_) +
344 GetMemoryUsedByList(edges_) +
345 GetMemoryUsedByList(children_) +
346 GetMemoryUsedByList(sorted_entries_);
347}
348
349
350// We split IDs on evens for embedder objects (see
351// HeapObjectsMap::GenerateId) and odds for native objects.
352const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
353const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
354 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
355const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
356 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
357const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
358 HeapObjectsMap::kGcRootsFirstSubrootId +
359 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
360
361
362static bool AddressesMatch(void* key1, void* key2) {
363 return key1 == key2;
364}
365
366
367HeapObjectsMap::HeapObjectsMap(Heap* heap)
368 : next_id_(kFirstAvailableObjectId),
369 entries_map_(AddressesMatch),
370 heap_(heap) {
371 // This dummy element solves a problem with entries_map_.
372 // When we do lookup in HashMap we see no difference between two cases:
373 // it has an entry with NULL as the value or it has created
374 // a new entry on the fly with NULL as the default value.
375 // With such dummy element we have a guaranty that all entries_map_ entries
376 // will have the value field grater than 0.
377 // This fact is using in MoveObject method.
378 entries_.Add(EntryInfo(0, NULL, 0));
379}
380
381
382bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
383 DCHECK(to != NULL);
384 DCHECK(from != NULL);
385 if (from == to) return false;
386 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
387 if (from_value == NULL) {
388 // It may occur that some untracked object moves to an address X and there
389 // is a tracked object at that address. In this case we should remove the
390 // entry as we know that the object has died.
391 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
392 if (to_value != NULL) {
393 int to_entry_info_index =
394 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
395 entries_.at(to_entry_info_index).addr = NULL;
396 }
397 } else {
398 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
399 true);
400 if (to_entry->value != NULL) {
401 // We found the existing entry with to address for an old object.
402 // Without this operation we will have two EntryInfo's with the same
403 // value in addr field. It is bad because later at RemoveDeadEntries
404 // one of this entry will be removed with the corresponding entries_map_
405 // entry.
406 int to_entry_info_index =
407 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
408 entries_.at(to_entry_info_index).addr = NULL;
409 }
410 int from_entry_info_index =
411 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
412 entries_.at(from_entry_info_index).addr = to;
413 // Size of an object can change during its life, so to keep information
414 // about the object in entries_ consistent, we have to adjust size when the
415 // object is migrated.
416 if (FLAG_heap_profiler_trace_objects) {
417 PrintF("Move object from %p to %p old size %6d new size %6d\n",
418 from,
419 to,
420 entries_.at(from_entry_info_index).size,
421 object_size);
422 }
423 entries_.at(from_entry_info_index).size = object_size;
424 to_entry->value = from_value;
425 }
426 return from_value != NULL;
427}
428
429
430void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
431 FindOrAddEntry(addr, size, false);
432}
433
434
435SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
436 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
437 false);
438 if (entry == NULL) return 0;
439 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
440 EntryInfo& entry_info = entries_.at(entry_index);
441 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
442 return entry_info.id;
443}
444
445
446SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
447 unsigned int size,
448 bool accessed) {
449 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
450 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
451 true);
452 if (entry->value != NULL) {
453 int entry_index =
454 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
455 EntryInfo& entry_info = entries_.at(entry_index);
456 entry_info.accessed = accessed;
457 if (FLAG_heap_profiler_trace_objects) {
458 PrintF("Update object size : %p with old size %d and new size %d\n",
459 addr,
460 entry_info.size,
461 size);
462 }
463 entry_info.size = size;
464 return entry_info.id;
465 }
466 entry->value = reinterpret_cast<void*>(entries_.length());
467 SnapshotObjectId id = next_id_;
468 next_id_ += kObjectIdStep;
469 entries_.Add(EntryInfo(id, addr, size, accessed));
470 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
471 return id;
472}
473
474
475void HeapObjectsMap::StopHeapObjectsTracking() {
476 time_intervals_.Clear();
477}
478
479
480void HeapObjectsMap::UpdateHeapObjectsMap() {
481 if (FLAG_heap_profiler_trace_objects) {
482 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
483 entries_map_.occupancy());
484 }
485 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
486 "HeapObjectsMap::UpdateHeapObjectsMap");
487 HeapIterator iterator(heap_);
488 for (HeapObject* obj = iterator.next();
489 obj != NULL;
490 obj = iterator.next()) {
491 FindOrAddEntry(obj->address(), obj->Size());
492 if (FLAG_heap_profiler_trace_objects) {
493 PrintF("Update object : %p %6d. Next address is %p\n",
494 obj->address(),
495 obj->Size(),
496 obj->address() + obj->Size());
497 }
498 }
499 RemoveDeadEntries();
500 if (FLAG_heap_profiler_trace_objects) {
501 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
502 entries_map_.occupancy());
503 }
504}
505
506
507namespace {
508
509
510struct HeapObjectInfo {
511 HeapObjectInfo(HeapObject* obj, int expected_size)
512 : obj(obj),
513 expected_size(expected_size) {
514 }
515
516 HeapObject* obj;
517 int expected_size;
518
519 bool IsValid() const { return expected_size == obj->Size(); }
520
521 void Print() const {
522 if (expected_size == 0) {
523 PrintF("Untracked object : %p %6d. Next address is %p\n",
524 obj->address(),
525 obj->Size(),
526 obj->address() + obj->Size());
527 } else if (obj->Size() != expected_size) {
528 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
529 expected_size,
530 obj->address(),
531 obj->Size(),
532 obj->address() + obj->Size());
533 } else {
534 PrintF("Good object : %p %6d. Next address is %p\n",
535 obj->address(),
536 expected_size,
537 obj->address() + obj->Size());
538 }
539 }
540};
541
542
543static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
544 if (a->obj < b->obj) return -1;
545 if (a->obj > b->obj) return 1;
546 return 0;
547}
548
549
550} // namespace
551
552
553int HeapObjectsMap::FindUntrackedObjects() {
554 List<HeapObjectInfo> heap_objects(1000);
555
556 HeapIterator iterator(heap_);
557 int untracked = 0;
558 for (HeapObject* obj = iterator.next();
559 obj != NULL;
560 obj = iterator.next()) {
561 HashMap::Entry* entry = entries_map_.Lookup(
562 obj->address(), ComputePointerHash(obj->address()), false);
563 if (entry == NULL) {
564 ++untracked;
565 if (FLAG_heap_profiler_trace_objects) {
566 heap_objects.Add(HeapObjectInfo(obj, 0));
567 }
568 } else {
569 int entry_index = static_cast<int>(
570 reinterpret_cast<intptr_t>(entry->value));
571 EntryInfo& entry_info = entries_.at(entry_index);
572 if (FLAG_heap_profiler_trace_objects) {
573 heap_objects.Add(HeapObjectInfo(obj,
574 static_cast<int>(entry_info.size)));
575 if (obj->Size() != static_cast<int>(entry_info.size))
576 ++untracked;
577 } else {
578 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
579 }
580 }
581 }
582 if (FLAG_heap_profiler_trace_objects) {
583 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
584 entries_map_.occupancy());
585 heap_objects.Sort(comparator);
586 int last_printed_object = -1;
587 bool print_next_object = false;
588 for (int i = 0; i < heap_objects.length(); ++i) {
589 const HeapObjectInfo& object_info = heap_objects[i];
590 if (!object_info.IsValid()) {
591 ++untracked;
592 if (last_printed_object != i - 1) {
593 if (i > 0) {
594 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
595 heap_objects[i - 1].Print();
596 }
597 }
598 object_info.Print();
599 last_printed_object = i;
600 print_next_object = true;
601 } else if (print_next_object) {
602 object_info.Print();
603 print_next_object = false;
604 last_printed_object = i;
605 }
606 }
607 if (last_printed_object < heap_objects.length() - 1) {
608 PrintF("Last %d objects were skipped\n",
609 heap_objects.length() - 1 - last_printed_object);
610 }
611 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
612 entries_map_.occupancy());
613 }
614 return untracked;
615}
616
617
618SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
619 UpdateHeapObjectsMap();
620 time_intervals_.Add(TimeInterval(next_id_));
621 int prefered_chunk_size = stream->GetChunkSize();
622 List<v8::HeapStatsUpdate> stats_buffer;
623 DCHECK(!entries_.is_empty());
624 EntryInfo* entry_info = &entries_.first();
625 EntryInfo* end_entry_info = &entries_.last() + 1;
626 for (int time_interval_index = 0;
627 time_interval_index < time_intervals_.length();
628 ++time_interval_index) {
629 TimeInterval& time_interval = time_intervals_[time_interval_index];
630 SnapshotObjectId time_interval_id = time_interval.id;
631 uint32_t entries_size = 0;
632 EntryInfo* start_entry_info = entry_info;
633 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
634 entries_size += entry_info->size;
635 ++entry_info;
636 }
637 uint32_t entries_count =
638 static_cast<uint32_t>(entry_info - start_entry_info);
639 if (time_interval.count != entries_count ||
640 time_interval.size != entries_size) {
641 stats_buffer.Add(v8::HeapStatsUpdate(
642 time_interval_index,
643 time_interval.count = entries_count,
644 time_interval.size = entries_size));
645 if (stats_buffer.length() >= prefered_chunk_size) {
646 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
647 &stats_buffer.first(), stats_buffer.length());
648 if (result == OutputStream::kAbort) return last_assigned_id();
649 stats_buffer.Clear();
650 }
651 }
652 }
653 DCHECK(entry_info == end_entry_info);
654 if (!stats_buffer.is_empty()) {
655 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
656 &stats_buffer.first(), stats_buffer.length());
657 if (result == OutputStream::kAbort) return last_assigned_id();
658 }
659 stream->EndOfStream();
660 return last_assigned_id();
661}
662
663
664void HeapObjectsMap::RemoveDeadEntries() {
665 DCHECK(entries_.length() > 0 &&
666 entries_.at(0).id == 0 &&
667 entries_.at(0).addr == NULL);
668 int first_free_entry = 1;
669 for (int i = 1; i < entries_.length(); ++i) {
670 EntryInfo& entry_info = entries_.at(i);
671 if (entry_info.accessed) {
672 if (first_free_entry != i) {
673 entries_.at(first_free_entry) = entry_info;
674 }
675 entries_.at(first_free_entry).accessed = false;
676 HashMap::Entry* entry = entries_map_.Lookup(
677 entry_info.addr, ComputePointerHash(entry_info.addr), false);
678 DCHECK(entry);
679 entry->value = reinterpret_cast<void*>(first_free_entry);
680 ++first_free_entry;
681 } else {
682 if (entry_info.addr) {
683 entries_map_.Remove(entry_info.addr,
684 ComputePointerHash(entry_info.addr));
685 }
686 }
687 }
688 entries_.Rewind(first_free_entry);
689 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
690 entries_map_.occupancy());
691}
692
693
694SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
695 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
696 const char* label = info->GetLabel();
697 id ^= StringHasher::HashSequentialString(label,
698 static_cast<int>(strlen(label)),
699 heap_->HashSeed());
700 intptr_t element_count = info->GetElementCount();
701 if (element_count != -1)
702 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
703 v8::internal::kZeroHashSeed);
704 return id << 1;
705}
706
707
708size_t HeapObjectsMap::GetUsedMemorySize() const {
709 return
710 sizeof(*this) +
711 sizeof(HashMap::Entry) * entries_map_.capacity() +
712 GetMemoryUsedByList(entries_) +
713 GetMemoryUsedByList(time_intervals_);
714}
715
716
717HeapEntriesMap::HeapEntriesMap()
718 : entries_(HashMap::PointersMatch) {
719}
720
721
722int HeapEntriesMap::Map(HeapThing thing) {
723 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
724 if (cache_entry == NULL) return HeapEntry::kNoEntry;
725 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
726}
727
728
729void HeapEntriesMap::Pair(HeapThing thing, int entry) {
730 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
731 DCHECK(cache_entry->value == NULL);
732 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
733}
734
735
736HeapObjectsSet::HeapObjectsSet()
737 : entries_(HashMap::PointersMatch) {
738}
739
740
741void HeapObjectsSet::Clear() {
742 entries_.Clear();
743}
744
745
746bool HeapObjectsSet::Contains(Object* obj) {
747 if (!obj->IsHeapObject()) return false;
748 HeapObject* object = HeapObject::cast(obj);
749 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
750}
751
752
753void HeapObjectsSet::Insert(Object* obj) {
754 if (!obj->IsHeapObject()) return;
755 HeapObject* object = HeapObject::cast(obj);
756 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
757}
758
759
760const char* HeapObjectsSet::GetTag(Object* obj) {
761 HeapObject* object = HeapObject::cast(obj);
762 HashMap::Entry* cache_entry =
763 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
764 return cache_entry != NULL
765 ? reinterpret_cast<const char*>(cache_entry->value)
766 : NULL;
767}
768
769
770void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
771 if (!obj->IsHeapObject()) return;
772 HeapObject* object = HeapObject::cast(obj);
773 HashMap::Entry* cache_entry =
774 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
775 cache_entry->value = const_cast<char*>(tag);
776}
777
778
779V8HeapExplorer::V8HeapExplorer(
780 HeapSnapshot* snapshot,
781 SnapshottingProgressReportingInterface* progress,
782 v8::HeapProfiler::ObjectNameResolver* resolver)
783 : heap_(snapshot->profiler()->heap_object_map()->heap()),
784 snapshot_(snapshot),
785 names_(snapshot_->profiler()->names()),
786 heap_object_map_(snapshot_->profiler()->heap_object_map()),
787 progress_(progress),
788 filler_(NULL),
789 global_object_name_resolver_(resolver) {
790}
791
792
793V8HeapExplorer::~V8HeapExplorer() {
794}
795
796
797HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
798 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
799}
800
801
802HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
803 if (object->IsJSFunction()) {
804 JSFunction* func = JSFunction::cast(object);
805 SharedFunctionInfo* shared = func->shared();
806 const char* name = shared->bound() ? "native_bind" :
807 names_->GetName(String::cast(shared->name()));
808 return AddEntry(object, HeapEntry::kClosure, name);
809 } else if (object->IsJSRegExp()) {
810 JSRegExp* re = JSRegExp::cast(object);
811 return AddEntry(object,
812 HeapEntry::kRegExp,
813 names_->GetName(re->Pattern()));
814 } else if (object->IsJSObject()) {
815 const char* name = names_->GetName(
816 GetConstructorName(JSObject::cast(object)));
817 if (object->IsJSGlobalObject()) {
818 const char* tag = objects_tags_.GetTag(object);
819 if (tag != NULL) {
820 name = names_->GetFormatted("%s / %s", name, tag);
821 }
822 }
823 return AddEntry(object, HeapEntry::kObject, name);
824 } else if (object->IsString()) {
825 String* string = String::cast(object);
826 if (string->IsConsString())
827 return AddEntry(object,
828 HeapEntry::kConsString,
829 "(concatenated string)");
830 if (string->IsSlicedString())
831 return AddEntry(object,
832 HeapEntry::kSlicedString,
833 "(sliced string)");
834 return AddEntry(object,
835 HeapEntry::kString,
836 names_->GetName(String::cast(object)));
837 } else if (object->IsSymbol()) {
838 return AddEntry(object, HeapEntry::kSymbol, "symbol");
839 } else if (object->IsCode()) {
840 return AddEntry(object, HeapEntry::kCode, "");
841 } else if (object->IsSharedFunctionInfo()) {
842 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
843 return AddEntry(object,
844 HeapEntry::kCode,
845 names_->GetName(name));
846 } else if (object->IsScript()) {
847 Object* name = Script::cast(object)->name();
848 return AddEntry(object,
849 HeapEntry::kCode,
850 name->IsString()
851 ? names_->GetName(String::cast(name))
852 : "");
853 } else if (object->IsNativeContext()) {
854 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
855 } else if (object->IsContext()) {
856 return AddEntry(object, HeapEntry::kObject, "system / Context");
857 } else if (object->IsFixedArray() ||
858 object->IsFixedDoubleArray() ||
859 object->IsByteArray() ||
860 object->IsExternalArray()) {
861 return AddEntry(object, HeapEntry::kArray, "");
862 } else if (object->IsHeapNumber()) {
863 return AddEntry(object, HeapEntry::kHeapNumber, "number");
864 }
865 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
866}
867
868
869HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
870 HeapEntry::Type type,
871 const char* name) {
872 return AddEntry(object->address(), type, name, object->Size());
873}
874
875
876HeapEntry* V8HeapExplorer::AddEntry(Address address,
877 HeapEntry::Type type,
878 const char* name,
879 size_t size) {
880 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
881 address, static_cast<unsigned int>(size));
882 unsigned trace_node_id = 0;
883 if (AllocationTracker* allocation_tracker =
884 snapshot_->profiler()->allocation_tracker()) {
885 trace_node_id =
886 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
887 }
888 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
889}
890
891
892class SnapshotFiller {
893 public:
894 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
895 : snapshot_(snapshot),
896 names_(snapshot->profiler()->names()),
897 entries_(entries) { }
898 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
899 HeapEntry* entry = allocator->AllocateEntry(ptr);
900 entries_->Pair(ptr, entry->index());
901 return entry;
902 }
903 HeapEntry* FindEntry(HeapThing ptr) {
904 int index = entries_->Map(ptr);
905 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
906 }
907 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
908 HeapEntry* entry = FindEntry(ptr);
909 return entry != NULL ? entry : AddEntry(ptr, allocator);
910 }
911 void SetIndexedReference(HeapGraphEdge::Type type,
912 int parent,
913 int index,
914 HeapEntry* child_entry) {
915 HeapEntry* parent_entry = &snapshot_->entries()[parent];
916 parent_entry->SetIndexedReference(type, index, child_entry);
917 }
918 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
919 int parent,
920 HeapEntry* child_entry) {
921 HeapEntry* parent_entry = &snapshot_->entries()[parent];
922 int index = parent_entry->children_count() + 1;
923 parent_entry->SetIndexedReference(type, index, child_entry);
924 }
925 void SetNamedReference(HeapGraphEdge::Type type,
926 int parent,
927 const char* reference_name,
928 HeapEntry* child_entry) {
929 HeapEntry* parent_entry = &snapshot_->entries()[parent];
930 parent_entry->SetNamedReference(type, reference_name, child_entry);
931 }
932 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
933 int parent,
934 HeapEntry* child_entry) {
935 HeapEntry* parent_entry = &snapshot_->entries()[parent];
936 int index = parent_entry->children_count() + 1;
937 parent_entry->SetNamedReference(
938 type,
939 names_->GetName(index),
940 child_entry);
941 }
942
943 private:
944 HeapSnapshot* snapshot_;
945 StringsStorage* names_;
946 HeapEntriesMap* entries_;
947};
948
949
950const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
951 switch (object->map()->instance_type()) {
952 case MAP_TYPE:
953 switch (Map::cast(object)->instance_type()) {
954#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
955 case instance_type: return "system / Map (" #Name ")";
956 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
957#undef MAKE_STRING_MAP_CASE
958 default: return "system / Map";
959 }
960 case CELL_TYPE: return "system / Cell";
961 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
962 case FOREIGN_TYPE: return "system / Foreign";
963 case ODDBALL_TYPE: return "system / Oddball";
964#define MAKE_STRUCT_CASE(NAME, Name, name) \
965 case NAME##_TYPE: return "system / "#Name;
966 STRUCT_LIST(MAKE_STRUCT_CASE)
967#undef MAKE_STRUCT_CASE
968 default: return "system";
969 }
970}
971
972
973int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
974 int objects_count = 0;
975 for (HeapObject* obj = iterator->next();
976 obj != NULL;
977 obj = iterator->next()) {
978 objects_count++;
979 }
980 return objects_count;
981}
982
983
984class IndexedReferencesExtractor : public ObjectVisitor {
985 public:
986 IndexedReferencesExtractor(V8HeapExplorer* generator,
987 HeapObject* parent_obj,
988 int parent)
989 : generator_(generator),
990 parent_obj_(parent_obj),
991 parent_(parent),
992 next_index_(0) {
993 }
994 void VisitCodeEntry(Address entry_address) {
995 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
996 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
997 generator_->TagCodeObject(code);
998 }
999 void VisitPointers(Object** start, Object** end) {
1000 for (Object** p = start; p < end; p++) {
1001 ++next_index_;
1002 if (CheckVisitedAndUnmark(p)) continue;
1003 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1004 }
1005 }
1006 static void MarkVisitedField(HeapObject* obj, int offset) {
1007 if (offset < 0) return;
1008 Address field = obj->address() + offset;
1009 DCHECK(Memory::Object_at(field)->IsHeapObject());
1010 intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field));
1011 DCHECK(!IsMarked(p));
1012 intptr_t p_tagged = p | kTag;
1013 Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged);
1014 }
1015
1016 private:
1017 bool CheckVisitedAndUnmark(Object** field) {
1018 intptr_t p = reinterpret_cast<intptr_t>(*field);
1019 if (IsMarked(p)) {
1020 intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag;
1021 *field = reinterpret_cast<Object*>(p_untagged);
1022 DCHECK((*field)->IsHeapObject());
1023 return true;
1024 }
1025 return false;
1026 }
1027
1028 static const intptr_t kTaggingMask = 3;
1029 static const intptr_t kTag = 3;
1030
1031 static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; }
1032
1033 V8HeapExplorer* generator_;
1034 HeapObject* parent_obj_;
1035 int parent_;
1036 int next_index_;
1037};
1038
1039
1040bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1041 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1042
1043 if (obj->IsJSGlobalProxy()) {
1044 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1045 } else if (obj->IsJSArrayBuffer()) {
1046 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1047 } else if (obj->IsJSObject()) {
1048 if (obj->IsJSWeakSet()) {
1049 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1050 } else if (obj->IsJSWeakMap()) {
1051 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1052 } else if (obj->IsJSSet()) {
1053 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1054 } else if (obj->IsJSMap()) {
1055 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1056 }
1057 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1058 } else if (obj->IsString()) {
1059 ExtractStringReferences(entry, String::cast(obj));
1060 } else if (obj->IsSymbol()) {
1061 ExtractSymbolReferences(entry, Symbol::cast(obj));
1062 } else if (obj->IsMap()) {
1063 ExtractMapReferences(entry, Map::cast(obj));
1064 } else if (obj->IsSharedFunctionInfo()) {
1065 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1066 } else if (obj->IsScript()) {
1067 ExtractScriptReferences(entry, Script::cast(obj));
1068 } else if (obj->IsAccessorInfo()) {
1069 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1070 } else if (obj->IsAccessorPair()) {
1071 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1072 } else if (obj->IsCodeCache()) {
1073 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1074 } else if (obj->IsCode()) {
1075 ExtractCodeReferences(entry, Code::cast(obj));
1076 } else if (obj->IsBox()) {
1077 ExtractBoxReferences(entry, Box::cast(obj));
1078 } else if (obj->IsCell()) {
1079 ExtractCellReferences(entry, Cell::cast(obj));
1080 } else if (obj->IsPropertyCell()) {
1081 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1082 } else if (obj->IsAllocationSite()) {
1083 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1084 }
1085 return true;
1086}
1087
1088
1089bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1090 if (!obj->IsFixedArray()) return false;
1091
1092 if (obj->IsContext()) {
1093 ExtractContextReferences(entry, Context::cast(obj));
1094 } else {
1095 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1096 }
1097 return true;
1098}
1099
1100
1101void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1102 int entry, JSGlobalProxy* proxy) {
1103 SetInternalReference(proxy, entry,
1104 "native_context", proxy->native_context(),
1105 JSGlobalProxy::kNativeContextOffset);
1106}
1107
1108
1109void V8HeapExplorer::ExtractJSObjectReferences(
1110 int entry, JSObject* js_obj) {
1111 HeapObject* obj = js_obj;
1112 ExtractClosureReferences(js_obj, entry);
1113 ExtractPropertyReferences(js_obj, entry);
1114 ExtractElementReferences(js_obj, entry);
1115 ExtractInternalReferences(js_obj, entry);
1116 PrototypeIterator iter(heap_->isolate(), js_obj);
1117 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1118 if (obj->IsJSFunction()) {
1119 JSFunction* js_fun = JSFunction::cast(js_obj);
1120 Object* proto_or_map = js_fun->prototype_or_initial_map();
1121 if (!proto_or_map->IsTheHole()) {
1122 if (!proto_or_map->IsMap()) {
1123 SetPropertyReference(
1124 obj, entry,
1125 heap_->prototype_string(), proto_or_map,
1126 NULL,
1127 JSFunction::kPrototypeOrInitialMapOffset);
1128 } else {
1129 SetPropertyReference(
1130 obj, entry,
1131 heap_->prototype_string(), js_fun->prototype());
1132 SetInternalReference(
1133 obj, entry, "initial_map", proto_or_map,
1134 JSFunction::kPrototypeOrInitialMapOffset);
1135 }
1136 }
1137 SharedFunctionInfo* shared_info = js_fun->shared();
1138 // JSFunction has either bindings or literals and never both.
1139 bool bound = shared_info->bound();
1140 TagObject(js_fun->literals_or_bindings(),
1141 bound ? "(function bindings)" : "(function literals)");
1142 SetInternalReference(js_fun, entry,
1143 bound ? "bindings" : "literals",
1144 js_fun->literals_or_bindings(),
1145 JSFunction::kLiteralsOffset);
1146 TagObject(shared_info, "(shared function info)");
1147 SetInternalReference(js_fun, entry,
1148 "shared", shared_info,
1149 JSFunction::kSharedFunctionInfoOffset);
1150 TagObject(js_fun->context(), "(context)");
1151 SetInternalReference(js_fun, entry,
1152 "context", js_fun->context(),
1153 JSFunction::kContextOffset);
1154 SetWeakReference(js_fun, entry,
1155 "next_function_link", js_fun->next_function_link(),
1156 JSFunction::kNextFunctionLinkOffset);
1157 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset
1158 == JSFunction::kNonWeakFieldsEndOffset);
1159 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1160 == JSFunction::kSize);
1161 } else if (obj->IsGlobalObject()) {
1162 GlobalObject* global_obj = GlobalObject::cast(obj);
1163 SetInternalReference(global_obj, entry,
1164 "builtins", global_obj->builtins(),
1165 GlobalObject::kBuiltinsOffset);
1166 SetInternalReference(global_obj, entry,
1167 "native_context", global_obj->native_context(),
1168 GlobalObject::kNativeContextOffset);
1169 SetInternalReference(global_obj, entry,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001170 "global_proxy", global_obj->global_proxy(),
1171 GlobalObject::kGlobalProxyOffset);
1172 STATIC_ASSERT(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001173 3 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001174 } else if (obj->IsJSArrayBufferView()) {
1175 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1176 SetInternalReference(view, entry, "buffer", view->buffer(),
1177 JSArrayBufferView::kBufferOffset);
1178 SetWeakReference(view, entry, "weak_next", view->weak_next(),
1179 JSArrayBufferView::kWeakNextOffset);
1180 }
1181 TagObject(js_obj->properties(), "(object properties)");
1182 SetInternalReference(obj, entry,
1183 "properties", js_obj->properties(),
1184 JSObject::kPropertiesOffset);
1185 TagObject(js_obj->elements(), "(object elements)");
1186 SetInternalReference(obj, entry,
1187 "elements", js_obj->elements(),
1188 JSObject::kElementsOffset);
1189}
1190
1191
1192void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1193 if (string->IsConsString()) {
1194 ConsString* cs = ConsString::cast(string);
1195 SetInternalReference(cs, entry, "first", cs->first(),
1196 ConsString::kFirstOffset);
1197 SetInternalReference(cs, entry, "second", cs->second(),
1198 ConsString::kSecondOffset);
1199 } else if (string->IsSlicedString()) {
1200 SlicedString* ss = SlicedString::cast(string);
1201 SetInternalReference(ss, entry, "parent", ss->parent(),
1202 SlicedString::kParentOffset);
1203 }
1204}
1205
1206
1207void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1208 SetInternalReference(symbol, entry,
1209 "name", symbol->name(),
1210 Symbol::kNameOffset);
1211}
1212
1213
1214void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1215 JSCollection* collection) {
1216 SetInternalReference(collection, entry, "table", collection->table(),
1217 JSCollection::kTableOffset);
1218}
1219
1220
1221void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1222 int entry, JSWeakCollection* collection) {
1223 MarkAsWeakContainer(collection->table());
1224 SetInternalReference(collection, entry,
1225 "table", collection->table(),
1226 JSWeakCollection::kTableOffset);
1227}
1228
1229
1230void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1231 if (context == context->declaration_context()) {
1232 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1233 // Add context allocated locals.
1234 int context_locals = scope_info->ContextLocalCount();
1235 for (int i = 0; i < context_locals; ++i) {
1236 String* local_name = scope_info->ContextLocalName(i);
1237 int idx = Context::MIN_CONTEXT_SLOTS + i;
1238 SetContextReference(context, entry, local_name, context->get(idx),
1239 Context::OffsetOfElementAt(idx));
1240 }
1241 if (scope_info->HasFunctionName()) {
1242 String* name = scope_info->FunctionName();
1243 VariableMode mode;
1244 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1245 if (idx >= 0) {
1246 SetContextReference(context, entry, name, context->get(idx),
1247 Context::OffsetOfElementAt(idx));
1248 }
1249 }
1250 }
1251
1252#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1253 if (Context::index < Context::FIRST_WEAK_SLOT || \
1254 Context::index == Context::MAP_CACHE_INDEX) { \
1255 SetInternalReference(context, entry, #name, context->get(Context::index), \
1256 FixedArray::OffsetOfElementAt(Context::index)); \
1257 } else { \
1258 SetWeakReference(context, entry, #name, context->get(Context::index), \
1259 FixedArray::OffsetOfElementAt(Context::index)); \
1260 }
1261 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1262 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1263 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1264 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1265 if (context->IsNativeContext()) {
1266 TagObject(context->jsfunction_result_caches(),
1267 "(context func. result caches)");
1268 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1269 TagObject(context->runtime_context(), "(runtime context)");
1270 TagObject(context->embedder_data(), "(context data)");
1271 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1272 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1273 optimized_functions_list);
1274 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1275 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1276 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1277#undef EXTRACT_CONTEXT_FIELD
1278 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1279 Context::FIRST_WEAK_SLOT);
1280 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1281 Context::NATIVE_CONTEXT_SLOTS);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001282 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001283 Context::NATIVE_CONTEXT_SLOTS);
1284 }
1285}
1286
1287
1288void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1289 if (map->HasTransitionArray()) {
1290 TransitionArray* transitions = map->transitions();
1291 int transitions_entry = GetEntry(transitions)->index();
1292 Object* back_pointer = transitions->back_pointer_storage();
1293 TagObject(back_pointer, "(back pointer)");
1294 SetInternalReference(transitions, transitions_entry,
1295 "back_pointer", back_pointer);
1296
1297 if (FLAG_collect_maps && map->CanTransition()) {
1298 if (!transitions->IsSimpleTransition()) {
1299 if (transitions->HasPrototypeTransitions()) {
1300 FixedArray* prototype_transitions =
1301 transitions->GetPrototypeTransitions();
1302 MarkAsWeakContainer(prototype_transitions);
1303 TagObject(prototype_transitions, "(prototype transitions");
1304 SetInternalReference(transitions, transitions_entry,
1305 "prototype_transitions", prototype_transitions);
1306 }
1307 // TODO(alph): transitions keys are strong links.
1308 MarkAsWeakContainer(transitions);
1309 }
1310 }
1311
1312 TagObject(transitions, "(transition array)");
1313 SetInternalReference(map, entry,
1314 "transitions", transitions,
1315 Map::kTransitionsOrBackPointerOffset);
1316 } else {
1317 Object* back_pointer = map->GetBackPointer();
1318 TagObject(back_pointer, "(back pointer)");
1319 SetInternalReference(map, entry,
1320 "back_pointer", back_pointer,
1321 Map::kTransitionsOrBackPointerOffset);
1322 }
1323 DescriptorArray* descriptors = map->instance_descriptors();
1324 TagObject(descriptors, "(map descriptors)");
1325 SetInternalReference(map, entry,
1326 "descriptors", descriptors,
1327 Map::kDescriptorsOffset);
1328
1329 MarkAsWeakContainer(map->code_cache());
1330 SetInternalReference(map, entry,
1331 "code_cache", map->code_cache(),
1332 Map::kCodeCacheOffset);
1333 SetInternalReference(map, entry,
1334 "prototype", map->prototype(), Map::kPrototypeOffset);
1335 SetInternalReference(map, entry,
1336 "constructor", map->constructor(),
1337 Map::kConstructorOffset);
1338 TagObject(map->dependent_code(), "(dependent code)");
1339 MarkAsWeakContainer(map->dependent_code());
1340 SetInternalReference(map, entry,
1341 "dependent_code", map->dependent_code(),
1342 Map::kDependentCodeOffset);
1343}
1344
1345
1346void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1347 int entry, SharedFunctionInfo* shared) {
1348 HeapObject* obj = shared;
1349 String* shared_name = shared->DebugName();
1350 const char* name = NULL;
1351 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1352 name = names_->GetName(shared_name);
1353 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1354 } else {
1355 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1356 Code::Kind2String(shared->code()->kind())));
1357 }
1358
1359 SetInternalReference(obj, entry,
1360 "name", shared->name(),
1361 SharedFunctionInfo::kNameOffset);
1362 SetInternalReference(obj, entry,
1363 "code", shared->code(),
1364 SharedFunctionInfo::kCodeOffset);
1365 TagObject(shared->scope_info(), "(function scope info)");
1366 SetInternalReference(obj, entry,
1367 "scope_info", shared->scope_info(),
1368 SharedFunctionInfo::kScopeInfoOffset);
1369 SetInternalReference(obj, entry,
1370 "instance_class_name", shared->instance_class_name(),
1371 SharedFunctionInfo::kInstanceClassNameOffset);
1372 SetInternalReference(obj, entry,
1373 "script", shared->script(),
1374 SharedFunctionInfo::kScriptOffset);
1375 const char* construct_stub_name = name ?
1376 names_->GetFormatted("(construct stub code for %s)", name) :
1377 "(construct stub code)";
1378 TagObject(shared->construct_stub(), construct_stub_name);
1379 SetInternalReference(obj, entry,
1380 "construct_stub", shared->construct_stub(),
1381 SharedFunctionInfo::kConstructStubOffset);
1382 SetInternalReference(obj, entry,
1383 "function_data", shared->function_data(),
1384 SharedFunctionInfo::kFunctionDataOffset);
1385 SetInternalReference(obj, entry,
1386 "debug_info", shared->debug_info(),
1387 SharedFunctionInfo::kDebugInfoOffset);
1388 SetInternalReference(obj, entry,
1389 "inferred_name", shared->inferred_name(),
1390 SharedFunctionInfo::kInferredNameOffset);
1391 SetInternalReference(obj, entry,
1392 "optimized_code_map", shared->optimized_code_map(),
1393 SharedFunctionInfo::kOptimizedCodeMapOffset);
1394 SetInternalReference(obj, entry,
1395 "feedback_vector", shared->feedback_vector(),
1396 SharedFunctionInfo::kFeedbackVectorOffset);
1397}
1398
1399
1400void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1401 HeapObject* obj = script;
1402 SetInternalReference(obj, entry,
1403 "source", script->source(),
1404 Script::kSourceOffset);
1405 SetInternalReference(obj, entry,
1406 "name", script->name(),
1407 Script::kNameOffset);
1408 SetInternalReference(obj, entry,
1409 "context_data", script->context_data(),
1410 Script::kContextOffset);
1411 TagObject(script->line_ends(), "(script line ends)");
1412 SetInternalReference(obj, entry,
1413 "line_ends", script->line_ends(),
1414 Script::kLineEndsOffset);
1415}
1416
1417
1418void V8HeapExplorer::ExtractAccessorInfoReferences(
1419 int entry, AccessorInfo* accessor_info) {
1420 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1421 AccessorInfo::kNameOffset);
1422 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1423 accessor_info->expected_receiver_type(),
1424 AccessorInfo::kExpectedReceiverTypeOffset);
1425 if (accessor_info->IsDeclaredAccessorInfo()) {
1426 DeclaredAccessorInfo* declared_accessor_info =
1427 DeclaredAccessorInfo::cast(accessor_info);
1428 SetInternalReference(declared_accessor_info, entry, "descriptor",
1429 declared_accessor_info->descriptor(),
1430 DeclaredAccessorInfo::kDescriptorOffset);
1431 } else if (accessor_info->IsExecutableAccessorInfo()) {
1432 ExecutableAccessorInfo* executable_accessor_info =
1433 ExecutableAccessorInfo::cast(accessor_info);
1434 SetInternalReference(executable_accessor_info, entry, "getter",
1435 executable_accessor_info->getter(),
1436 ExecutableAccessorInfo::kGetterOffset);
1437 SetInternalReference(executable_accessor_info, entry, "setter",
1438 executable_accessor_info->setter(),
1439 ExecutableAccessorInfo::kSetterOffset);
1440 SetInternalReference(executable_accessor_info, entry, "data",
1441 executable_accessor_info->data(),
1442 ExecutableAccessorInfo::kDataOffset);
1443 }
1444}
1445
1446
1447void V8HeapExplorer::ExtractAccessorPairReferences(
1448 int entry, AccessorPair* accessors) {
1449 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1450 AccessorPair::kGetterOffset);
1451 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1452 AccessorPair::kSetterOffset);
1453}
1454
1455
1456void V8HeapExplorer::ExtractCodeCacheReferences(
1457 int entry, CodeCache* code_cache) {
1458 TagObject(code_cache->default_cache(), "(default code cache)");
1459 SetInternalReference(code_cache, entry,
1460 "default_cache", code_cache->default_cache(),
1461 CodeCache::kDefaultCacheOffset);
1462 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1463 SetInternalReference(code_cache, entry,
1464 "type_cache", code_cache->normal_type_cache(),
1465 CodeCache::kNormalTypeCacheOffset);
1466}
1467
1468
1469void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1470 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1471}
1472
1473
1474void V8HeapExplorer::TagCodeObject(Code* code) {
1475 if (code->kind() == Code::STUB) {
1476 TagObject(code, names_->GetFormatted(
1477 "(%s code)", CodeStub::MajorName(
1478 CodeStub::GetMajorKey(code), true)));
1479 }
1480}
1481
1482
1483void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1484 TagCodeObject(code);
1485 TagObject(code->relocation_info(), "(code relocation info)");
1486 SetInternalReference(code, entry,
1487 "relocation_info", code->relocation_info(),
1488 Code::kRelocationInfoOffset);
1489 SetInternalReference(code, entry,
1490 "handler_table", code->handler_table(),
1491 Code::kHandlerTableOffset);
1492 TagObject(code->deoptimization_data(), "(code deopt data)");
1493 SetInternalReference(code, entry,
1494 "deoptimization_data", code->deoptimization_data(),
1495 Code::kDeoptimizationDataOffset);
1496 if (code->kind() == Code::FUNCTION) {
1497 SetInternalReference(code, entry,
1498 "type_feedback_info", code->type_feedback_info(),
1499 Code::kTypeFeedbackInfoOffset);
1500 }
1501 SetInternalReference(code, entry,
1502 "gc_metadata", code->gc_metadata(),
1503 Code::kGCMetadataOffset);
1504 SetInternalReference(code, entry,
1505 "constant_pool", code->constant_pool(),
1506 Code::kConstantPoolOffset);
1507 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1508 SetWeakReference(code, entry,
1509 "next_code_link", code->next_code_link(),
1510 Code::kNextCodeLinkOffset);
1511 }
1512}
1513
1514
1515void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1516 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1517}
1518
1519
1520void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1521 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1522}
1523
1524
1525void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1526 PropertyCell* cell) {
1527 ExtractCellReferences(entry, cell);
1528 SetInternalReference(cell, entry, "type", cell->type(),
1529 PropertyCell::kTypeOffset);
1530 MarkAsWeakContainer(cell->dependent_code());
1531 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1532 PropertyCell::kDependentCodeOffset);
1533}
1534
1535
1536void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1537 AllocationSite* site) {
1538 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1539 AllocationSite::kTransitionInfoOffset);
1540 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1541 AllocationSite::kNestedSiteOffset);
1542 MarkAsWeakContainer(site->dependent_code());
1543 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1544 AllocationSite::kDependentCodeOffset);
1545 // Do not visit weak_next as it is not visited by the StaticVisitor,
1546 // and we're not very interested in weak_next field here.
1547 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1548 AllocationSite::BodyDescriptor::kEndOffset);
1549}
1550
1551
1552class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1553 public:
1554 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1555 : size_(size)
1556 , explorer_(explorer) {
1557 }
1558 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1559 return explorer_->AddEntry(
1560 static_cast<Address>(ptr),
1561 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1562 }
1563 private:
1564 size_t size_;
1565 V8HeapExplorer* explorer_;
1566};
1567
1568
1569void V8HeapExplorer::ExtractJSArrayBufferReferences(
1570 int entry, JSArrayBuffer* buffer) {
1571 SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1572 JSArrayBuffer::kWeakNextOffset);
1573 SetWeakReference(buffer, entry,
1574 "weak_first_view", buffer->weak_first_view(),
1575 JSArrayBuffer::kWeakFirstViewOffset);
1576 // Setup a reference to a native memory backing_store object.
1577 if (!buffer->backing_store())
1578 return;
1579 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1580 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1581 HeapEntry* data_entry =
1582 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1583 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1584 entry, "backing_store", data_entry);
1585}
1586
1587
1588void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1589 bool is_weak = weak_containers_.Contains(array);
1590 for (int i = 0, l = array->length(); i < l; ++i) {
1591 if (is_weak) {
1592 SetWeakReference(array, entry,
1593 i, array->get(i), array->OffsetOfElementAt(i));
1594 } else {
1595 SetInternalReference(array, entry,
1596 i, array->get(i), array->OffsetOfElementAt(i));
1597 }
1598 }
1599}
1600
1601
1602void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1603 if (!js_obj->IsJSFunction()) return;
1604
1605 JSFunction* func = JSFunction::cast(js_obj);
1606 if (func->shared()->bound()) {
1607 FixedArray* bindings = func->function_bindings();
1608 SetNativeBindReference(js_obj, entry, "bound_this",
1609 bindings->get(JSFunction::kBoundThisIndex));
1610 SetNativeBindReference(js_obj, entry, "bound_function",
1611 bindings->get(JSFunction::kBoundFunctionIndex));
1612 for (int i = JSFunction::kBoundArgumentsStartIndex;
1613 i < bindings->length(); i++) {
1614 const char* reference_name = names_->GetFormatted(
1615 "bound_argument_%d",
1616 i - JSFunction::kBoundArgumentsStartIndex);
1617 SetNativeBindReference(js_obj, entry, reference_name,
1618 bindings->get(i));
1619 }
1620 }
1621}
1622
1623
1624void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1625 if (js_obj->HasFastProperties()) {
1626 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1627 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1628 for (int i = 0; i < real_size; i++) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001629 PropertyDetails details = descs->GetDetails(i);
1630 switch (details.location()) {
1631 case IN_OBJECT: {
1632 Representation r = details.representation();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633 if (r.IsSmi() || r.IsDouble()) break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634
1635 Name* k = descs->GetKey(i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001636 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1637 Object* value = js_obj->RawFastPropertyAt(field_index);
1638 int field_offset =
1639 field_index.is_inobject() ? field_index.offset() : -1;
1640
1641 if (k != heap_->hidden_string()) {
1642 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1643 value, NULL, field_offset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001644 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001645 TagObject(value, "(hidden properties)");
1646 SetInternalReference(js_obj, entry, "hidden_properties", value,
1647 field_offset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 }
1649 break;
1650 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001651 case IN_DESCRIPTOR:
1652 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1653 descs->GetKey(i),
1654 descs->GetValue(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655 break;
1656 }
1657 }
1658 } else {
1659 NameDictionary* dictionary = js_obj->property_dictionary();
1660 int length = dictionary->Capacity();
1661 for (int i = 0; i < length; ++i) {
1662 Object* k = dictionary->KeyAt(i);
1663 if (dictionary->IsKey(k)) {
1664 Object* target = dictionary->ValueAt(i);
1665 // We assume that global objects can only have slow properties.
1666 Object* value = target->IsPropertyCell()
1667 ? PropertyCell::cast(target)->value()
1668 : target;
1669 if (k == heap_->hidden_string()) {
1670 TagObject(value, "(hidden properties)");
1671 SetInternalReference(js_obj, entry, "hidden_properties", value);
1672 continue;
1673 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001674 PropertyDetails details = dictionary->DetailsAt(i);
1675 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1676 Name::cast(k), value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001677 }
1678 }
1679 }
1680}
1681
1682
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001683void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1684 Name* key,
1685 Object* callback_obj,
1686 int field_offset) {
1687 if (!callback_obj->IsAccessorPair()) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001688 AccessorPair* accessors = AccessorPair::cast(callback_obj);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001689 SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 Object* getter = accessors->getter();
1691 if (!getter->IsOddball()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001692 SetPropertyReference(js_obj, entry, key, getter, "get %s");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 }
1694 Object* setter = accessors->setter();
1695 if (!setter->IsOddball()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001696 SetPropertyReference(js_obj, entry, key, setter, "set %s");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001697 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698}
1699
1700
1701void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1702 if (js_obj->HasFastObjectElements()) {
1703 FixedArray* elements = FixedArray::cast(js_obj->elements());
1704 int length = js_obj->IsJSArray() ?
1705 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1706 elements->length();
1707 for (int i = 0; i < length; ++i) {
1708 if (!elements->get(i)->IsTheHole()) {
1709 SetElementReference(js_obj, entry, i, elements->get(i));
1710 }
1711 }
1712 } else if (js_obj->HasDictionaryElements()) {
1713 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1714 int length = dictionary->Capacity();
1715 for (int i = 0; i < length; ++i) {
1716 Object* k = dictionary->KeyAt(i);
1717 if (dictionary->IsKey(k)) {
1718 DCHECK(k->IsNumber());
1719 uint32_t index = static_cast<uint32_t>(k->Number());
1720 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1721 }
1722 }
1723 }
1724}
1725
1726
1727void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1728 int length = js_obj->GetInternalFieldCount();
1729 for (int i = 0; i < length; ++i) {
1730 Object* o = js_obj->GetInternalField(i);
1731 SetInternalReference(
1732 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1733 }
1734}
1735
1736
1737String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1738 Heap* heap = object->GetHeap();
1739 if (object->IsJSFunction()) return heap->closure_string();
1740 String* constructor_name = object->constructor_name();
1741 if (constructor_name == heap->Object_string()) {
1742 // TODO(verwaest): Try to get object.constructor.name in this case.
1743 // This requires handlification of the V8HeapExplorer.
1744 }
1745 return object->constructor_name();
1746}
1747
1748
1749HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1750 if (!obj->IsHeapObject()) return NULL;
1751 return filler_->FindOrAddEntry(obj, this);
1752}
1753
1754
1755class RootsReferencesExtractor : public ObjectVisitor {
1756 private:
1757 struct IndexTag {
1758 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1759 : index(index), tag(tag) { }
1760 int index;
1761 VisitorSynchronization::SyncTag tag;
1762 };
1763
1764 public:
1765 explicit RootsReferencesExtractor(Heap* heap)
1766 : collecting_all_references_(false),
1767 previous_reference_count_(0),
1768 heap_(heap) {
1769 }
1770
1771 void VisitPointers(Object** start, Object** end) {
1772 if (collecting_all_references_) {
1773 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1774 } else {
1775 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1776 }
1777 }
1778
1779 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1780
1781 void FillReferences(V8HeapExplorer* explorer) {
1782 DCHECK(strong_references_.length() <= all_references_.length());
1783 Builtins* builtins = heap_->isolate()->builtins();
1784 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1785 while (all_index < all_references_.length()) {
1786 bool is_strong = strong_index < strong_references_.length()
1787 && strong_references_[strong_index] == all_references_[all_index];
1788 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1789 !is_strong,
1790 all_references_[all_index]);
1791 if (reference_tags_[tags_index].tag ==
1792 VisitorSynchronization::kBuiltins) {
1793 DCHECK(all_references_[all_index]->IsCode());
1794 explorer->TagBuiltinCodeObject(
1795 Code::cast(all_references_[all_index]),
1796 builtins->name(builtin_index++));
1797 }
1798 ++all_index;
1799 if (is_strong) ++strong_index;
1800 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1801 }
1802 }
1803
1804 void Synchronize(VisitorSynchronization::SyncTag tag) {
1805 if (collecting_all_references_ &&
1806 previous_reference_count_ != all_references_.length()) {
1807 previous_reference_count_ = all_references_.length();
1808 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1809 }
1810 }
1811
1812 private:
1813 bool collecting_all_references_;
1814 List<Object*> strong_references_;
1815 List<Object*> all_references_;
1816 int previous_reference_count_;
1817 List<IndexTag> reference_tags_;
1818 Heap* heap_;
1819};
1820
1821
1822bool V8HeapExplorer::IterateAndExtractReferences(
1823 SnapshotFiller* filler) {
1824 filler_ = filler;
1825
1826 // Create references to the synthetic roots.
1827 SetRootGcRootsReference();
1828 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1829 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1830 }
1831
1832 // Make sure builtin code objects get their builtin tags
1833 // first. Otherwise a particular JSFunction object could set
1834 // its custom name to a generic builtin.
1835 RootsReferencesExtractor extractor(heap_);
1836 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1837 extractor.SetCollectingAllReferences();
1838 heap_->IterateRoots(&extractor, VISIT_ALL);
1839 extractor.FillReferences(this);
1840
1841 // We have to do two passes as sometimes FixedArrays are used
1842 // to weakly hold their items, and it's impossible to distinguish
1843 // between these cases without processing the array owner first.
1844 bool interrupted =
1845 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1846 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1847
1848 if (interrupted) {
1849 filler_ = NULL;
1850 return false;
1851 }
1852
1853 filler_ = NULL;
1854 return progress_->ProgressReport(true);
1855}
1856
1857
1858template<V8HeapExplorer::ExtractReferencesMethod extractor>
1859bool V8HeapExplorer::IterateAndExtractSinglePass() {
1860 // Now iterate the whole heap.
1861 bool interrupted = false;
1862 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1863 // Heap iteration with filtering must be finished in any case.
1864 for (HeapObject* obj = iterator.next();
1865 obj != NULL;
1866 obj = iterator.next(), progress_->ProgressStep()) {
1867 if (interrupted) continue;
1868
1869 HeapEntry* heap_entry = GetEntry(obj);
1870 int entry = heap_entry->index();
1871 if ((this->*extractor)(entry, obj)) {
1872 SetInternalReference(obj, entry,
1873 "map", obj->map(), HeapObject::kMapOffset);
1874 // Extract unvisited fields as hidden references and restore tags
1875 // of visited fields.
1876 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1877 obj->Iterate(&refs_extractor);
1878 }
1879
1880 if (!progress_->ProgressReport(false)) interrupted = true;
1881 }
1882 return interrupted;
1883}
1884
1885
1886bool V8HeapExplorer::IsEssentialObject(Object* object) {
1887 return object->IsHeapObject()
1888 && !object->IsOddball()
1889 && object != heap_->empty_byte_array()
1890 && object != heap_->empty_fixed_array()
1891 && object != heap_->empty_descriptor_array()
1892 && object != heap_->fixed_array_map()
1893 && object != heap_->cell_map()
1894 && object != heap_->global_property_cell_map()
1895 && object != heap_->shared_function_info_map()
1896 && object != heap_->free_space_map()
1897 && object != heap_->one_pointer_filler_map()
1898 && object != heap_->two_pointer_filler_map();
1899}
1900
1901
1902void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1903 int parent_entry,
1904 String* reference_name,
1905 Object* child_obj,
1906 int field_offset) {
1907 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1908 HeapEntry* child_entry = GetEntry(child_obj);
1909 if (child_entry != NULL) {
1910 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1911 parent_entry,
1912 names_->GetName(reference_name),
1913 child_entry);
1914 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1915 }
1916}
1917
1918
1919void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1920 int parent_entry,
1921 const char* reference_name,
1922 Object* child_obj) {
1923 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1924 HeapEntry* child_entry = GetEntry(child_obj);
1925 if (child_entry != NULL) {
1926 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1927 parent_entry,
1928 reference_name,
1929 child_entry);
1930 }
1931}
1932
1933
1934void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1935 int parent_entry,
1936 int index,
1937 Object* child_obj) {
1938 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1939 HeapEntry* child_entry = GetEntry(child_obj);
1940 if (child_entry != NULL) {
1941 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1942 parent_entry,
1943 index,
1944 child_entry);
1945 }
1946}
1947
1948
1949void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1950 int parent_entry,
1951 const char* reference_name,
1952 Object* child_obj,
1953 int field_offset) {
1954 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1955 HeapEntry* child_entry = GetEntry(child_obj);
1956 if (child_entry == NULL) return;
1957 if (IsEssentialObject(child_obj)) {
1958 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1959 parent_entry,
1960 reference_name,
1961 child_entry);
1962 }
1963 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1964}
1965
1966
1967void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1968 int parent_entry,
1969 int index,
1970 Object* child_obj,
1971 int field_offset) {
1972 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1973 HeapEntry* child_entry = GetEntry(child_obj);
1974 if (child_entry == NULL) return;
1975 if (IsEssentialObject(child_obj)) {
1976 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1977 parent_entry,
1978 names_->GetName(index),
1979 child_entry);
1980 }
1981 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1982}
1983
1984
1985void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1986 int parent_entry,
1987 int index,
1988 Object* child_obj) {
1989 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1990 HeapEntry* child_entry = GetEntry(child_obj);
1991 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1992 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1993 parent_entry,
1994 index,
1995 child_entry);
1996 }
1997}
1998
1999
2000void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2001 int parent_entry,
2002 const char* reference_name,
2003 Object* child_obj,
2004 int field_offset) {
2005 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2006 HeapEntry* child_entry = GetEntry(child_obj);
2007 if (child_entry == NULL) return;
2008 if (IsEssentialObject(child_obj)) {
2009 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2010 parent_entry,
2011 reference_name,
2012 child_entry);
2013 }
2014 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2015}
2016
2017
2018void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2019 int parent_entry,
2020 int index,
2021 Object* child_obj,
2022 int field_offset) {
2023 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2024 HeapEntry* child_entry = GetEntry(child_obj);
2025 if (child_entry == NULL) return;
2026 if (IsEssentialObject(child_obj)) {
2027 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2028 parent_entry,
2029 names_->GetFormatted("%d", index),
2030 child_entry);
2031 }
2032 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2033}
2034
2035
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002036void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2037 PropertyKind kind, JSObject* parent_obj, int parent_entry,
2038 Name* reference_name, Object* child_obj, const char* name_format_string,
2039 int field_offset) {
2040 if (kind == ACCESSOR) {
2041 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
2042 child_obj, field_offset);
2043 } else {
2044 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2045 name_format_string, field_offset);
2046 }
2047}
2048
2049
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002050void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2051 int parent_entry,
2052 Name* reference_name,
2053 Object* child_obj,
2054 const char* name_format_string,
2055 int field_offset) {
2056 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2057 HeapEntry* child_entry = GetEntry(child_obj);
2058 if (child_entry != NULL) {
2059 HeapGraphEdge::Type type =
2060 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2061 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2062 const char* name = name_format_string != NULL && reference_name->IsString()
2063 ? names_->GetFormatted(
2064 name_format_string,
2065 String::cast(reference_name)->ToCString(
2066 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2067 names_->GetName(reference_name);
2068
2069 filler_->SetNamedReference(type,
2070 parent_entry,
2071 name,
2072 child_entry);
2073 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2074 }
2075}
2076
2077
2078void V8HeapExplorer::SetRootGcRootsReference() {
2079 filler_->SetIndexedAutoIndexReference(
2080 HeapGraphEdge::kElement,
2081 snapshot_->root()->index(),
2082 snapshot_->gc_roots());
2083}
2084
2085
2086void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2087 HeapEntry* child_entry = GetEntry(child_obj);
2088 DCHECK(child_entry != NULL);
2089 filler_->SetNamedAutoIndexReference(
2090 HeapGraphEdge::kShortcut,
2091 snapshot_->root()->index(),
2092 child_entry);
2093}
2094
2095
2096void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2097 filler_->SetIndexedAutoIndexReference(
2098 HeapGraphEdge::kElement,
2099 snapshot_->gc_roots()->index(),
2100 snapshot_->gc_subroot(tag));
2101}
2102
2103
2104void V8HeapExplorer::SetGcSubrootReference(
2105 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2106 HeapEntry* child_entry = GetEntry(child_obj);
2107 if (child_entry != NULL) {
2108 const char* name = GetStrongGcSubrootName(child_obj);
2109 if (name != NULL) {
2110 filler_->SetNamedReference(
2111 HeapGraphEdge::kInternal,
2112 snapshot_->gc_subroot(tag)->index(),
2113 name,
2114 child_entry);
2115 } else {
2116 if (is_weak) {
2117 filler_->SetNamedAutoIndexReference(
2118 HeapGraphEdge::kWeak,
2119 snapshot_->gc_subroot(tag)->index(),
2120 child_entry);
2121 } else {
2122 filler_->SetIndexedAutoIndexReference(
2123 HeapGraphEdge::kElement,
2124 snapshot_->gc_subroot(tag)->index(),
2125 child_entry);
2126 }
2127 }
2128
2129 // Add a shortcut to JS global object reference at snapshot root.
2130 if (child_obj->IsNativeContext()) {
2131 Context* context = Context::cast(child_obj);
2132 GlobalObject* global = context->global_object();
2133 if (global->IsJSGlobalObject()) {
2134 bool is_debug_object = false;
2135 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2136 if (!is_debug_object && !user_roots_.Contains(global)) {
2137 user_roots_.Insert(global);
2138 SetUserGlobalReference(global);
2139 }
2140 }
2141 }
2142 }
2143}
2144
2145
2146const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2147 if (strong_gc_subroot_names_.is_empty()) {
2148#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2149#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2150 STRONG_ROOT_LIST(ROOT_NAME)
2151#undef ROOT_NAME
2152#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2153 STRUCT_LIST(STRUCT_MAP_NAME)
2154#undef STRUCT_MAP_NAME
2155#define STRING_NAME(name, str) NAME_ENTRY(name)
2156 INTERNALIZED_STRING_LIST(STRING_NAME)
2157#undef STRING_NAME
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002158#define SYMBOL_NAME(name) NAME_ENTRY(name)
2159 PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2160#undef SYMBOL_NAME
2161#define SYMBOL_NAME(name, varname, description) NAME_ENTRY(name)
2162 PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2163#undef SYMBOL_NAME
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164#undef NAME_ENTRY
2165 CHECK(!strong_gc_subroot_names_.is_empty());
2166 }
2167 return strong_gc_subroot_names_.GetTag(object);
2168}
2169
2170
2171void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2172 if (IsEssentialObject(obj)) {
2173 HeapEntry* entry = GetEntry(obj);
2174 if (entry->name()[0] == '\0') {
2175 entry->set_name(tag);
2176 }
2177 }
2178}
2179
2180
2181void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2182 if (IsEssentialObject(object) && object->IsFixedArray()) {
2183 weak_containers_.Insert(object);
2184 }
2185}
2186
2187
2188class GlobalObjectsEnumerator : public ObjectVisitor {
2189 public:
2190 virtual void VisitPointers(Object** start, Object** end) {
2191 for (Object** p = start; p < end; p++) {
2192 if ((*p)->IsNativeContext()) {
2193 Context* context = Context::cast(*p);
2194 JSObject* proxy = context->global_proxy();
2195 if (proxy->IsJSGlobalProxy()) {
2196 Object* global = proxy->map()->prototype();
2197 if (global->IsJSGlobalObject()) {
2198 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2199 }
2200 }
2201 }
2202 }
2203 }
2204 int count() { return objects_.length(); }
2205 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2206
2207 private:
2208 List<Handle<JSGlobalObject> > objects_;
2209};
2210
2211
2212// Modifies heap. Must not be run during heap traversal.
2213void V8HeapExplorer::TagGlobalObjects() {
2214 Isolate* isolate = heap_->isolate();
2215 HandleScope scope(isolate);
2216 GlobalObjectsEnumerator enumerator;
2217 isolate->global_handles()->IterateAllRoots(&enumerator);
2218 const char** urls = NewArray<const char*>(enumerator.count());
2219 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2220 if (global_object_name_resolver_) {
2221 HandleScope scope(isolate);
2222 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2223 urls[i] = global_object_name_resolver_->GetName(
2224 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2225 } else {
2226 urls[i] = NULL;
2227 }
2228 }
2229
2230 DisallowHeapAllocation no_allocation;
2231 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2232 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2233 }
2234
2235 DeleteArray(urls);
2236}
2237
2238
2239class GlobalHandlesExtractor : public ObjectVisitor {
2240 public:
2241 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2242 : explorer_(explorer) {}
2243 virtual ~GlobalHandlesExtractor() {}
2244 virtual void VisitPointers(Object** start, Object** end) {
2245 UNREACHABLE();
2246 }
2247 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2248 explorer_->VisitSubtreeWrapper(p, class_id);
2249 }
2250 private:
2251 NativeObjectsExplorer* explorer_;
2252};
2253
2254
2255class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2256 public:
2257 BasicHeapEntriesAllocator(
2258 HeapSnapshot* snapshot,
2259 HeapEntry::Type entries_type)
2260 : snapshot_(snapshot),
2261 names_(snapshot_->profiler()->names()),
2262 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2263 entries_type_(entries_type) {
2264 }
2265 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2266 private:
2267 HeapSnapshot* snapshot_;
2268 StringsStorage* names_;
2269 HeapObjectsMap* heap_object_map_;
2270 HeapEntry::Type entries_type_;
2271};
2272
2273
2274HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2275 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2276 intptr_t elements = info->GetElementCount();
2277 intptr_t size = info->GetSizeInBytes();
2278 const char* name = elements != -1
2279 ? names_->GetFormatted(
2280 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2281 : names_->GetCopy(info->GetLabel());
2282 return snapshot_->AddEntry(
2283 entries_type_,
2284 name,
2285 heap_object_map_->GenerateId(info),
2286 size != -1 ? static_cast<int>(size) : 0,
2287 0);
2288}
2289
2290
2291NativeObjectsExplorer::NativeObjectsExplorer(
2292 HeapSnapshot* snapshot,
2293 SnapshottingProgressReportingInterface* progress)
2294 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2295 snapshot_(snapshot),
2296 names_(snapshot_->profiler()->names()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002297 embedder_queried_(false),
2298 objects_by_info_(RetainedInfosMatch),
2299 native_groups_(StringsMatch),
2300 filler_(NULL) {
2301 synthetic_entries_allocator_ =
2302 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2303 native_entries_allocator_ =
2304 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2305}
2306
2307
2308NativeObjectsExplorer::~NativeObjectsExplorer() {
2309 for (HashMap::Entry* p = objects_by_info_.Start();
2310 p != NULL;
2311 p = objects_by_info_.Next(p)) {
2312 v8::RetainedObjectInfo* info =
2313 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2314 info->Dispose();
2315 List<HeapObject*>* objects =
2316 reinterpret_cast<List<HeapObject*>* >(p->value);
2317 delete objects;
2318 }
2319 for (HashMap::Entry* p = native_groups_.Start();
2320 p != NULL;
2321 p = native_groups_.Next(p)) {
2322 v8::RetainedObjectInfo* info =
2323 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2324 info->Dispose();
2325 }
2326 delete synthetic_entries_allocator_;
2327 delete native_entries_allocator_;
2328}
2329
2330
2331int NativeObjectsExplorer::EstimateObjectsCount() {
2332 FillRetainedObjects();
2333 return objects_by_info_.occupancy();
2334}
2335
2336
2337void NativeObjectsExplorer::FillRetainedObjects() {
2338 if (embedder_queried_) return;
2339 Isolate* isolate = isolate_;
2340 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2341 // Record objects that are joined into ObjectGroups.
2342 isolate->heap()->CallGCPrologueCallbacks(
2343 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2344 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2345 for (int i = 0; i < groups->length(); ++i) {
2346 ObjectGroup* group = groups->at(i);
2347 if (group->info == NULL) continue;
2348 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2349 for (size_t j = 0; j < group->length; ++j) {
2350 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2351 list->Add(obj);
2352 in_groups_.Insert(obj);
2353 }
2354 group->info = NULL; // Acquire info object ownership.
2355 }
2356 isolate->global_handles()->RemoveObjectGroups();
2357 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2358 // Record objects that are not in ObjectGroups, but have class ID.
2359 GlobalHandlesExtractor extractor(this);
2360 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2361 embedder_queried_ = true;
2362}
2363
2364
2365void NativeObjectsExplorer::FillImplicitReferences() {
2366 Isolate* isolate = isolate_;
2367 List<ImplicitRefGroup*>* groups =
2368 isolate->global_handles()->implicit_ref_groups();
2369 for (int i = 0; i < groups->length(); ++i) {
2370 ImplicitRefGroup* group = groups->at(i);
2371 HeapObject* parent = *group->parent;
2372 int parent_entry =
2373 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2374 DCHECK(parent_entry != HeapEntry::kNoEntry);
2375 Object*** children = group->children;
2376 for (size_t j = 0; j < group->length; ++j) {
2377 Object* child = *children[j];
2378 HeapEntry* child_entry =
2379 filler_->FindOrAddEntry(child, native_entries_allocator_);
2380 filler_->SetNamedReference(
2381 HeapGraphEdge::kInternal,
2382 parent_entry,
2383 "native",
2384 child_entry);
2385 }
2386 }
2387 isolate->global_handles()->RemoveImplicitRefGroups();
2388}
2389
2390List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2391 v8::RetainedObjectInfo* info) {
2392 HashMap::Entry* entry =
2393 objects_by_info_.Lookup(info, InfoHash(info), true);
2394 if (entry->value != NULL) {
2395 info->Dispose();
2396 } else {
2397 entry->value = new List<HeapObject*>(4);
2398 }
2399 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2400}
2401
2402
2403bool NativeObjectsExplorer::IterateAndExtractReferences(
2404 SnapshotFiller* filler) {
2405 filler_ = filler;
2406 FillRetainedObjects();
2407 FillImplicitReferences();
2408 if (EstimateObjectsCount() > 0) {
2409 for (HashMap::Entry* p = objects_by_info_.Start();
2410 p != NULL;
2411 p = objects_by_info_.Next(p)) {
2412 v8::RetainedObjectInfo* info =
2413 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2414 SetNativeRootReference(info);
2415 List<HeapObject*>* objects =
2416 reinterpret_cast<List<HeapObject*>* >(p->value);
2417 for (int i = 0; i < objects->length(); ++i) {
2418 SetWrapperNativeReferences(objects->at(i), info);
2419 }
2420 }
2421 SetRootNativeRootsReference();
2422 }
2423 filler_ = NULL;
2424 return true;
2425}
2426
2427
2428class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2429 public:
2430 explicit NativeGroupRetainedObjectInfo(const char* label)
2431 : disposed_(false),
2432 hash_(reinterpret_cast<intptr_t>(label)),
2433 label_(label) {
2434 }
2435
2436 virtual ~NativeGroupRetainedObjectInfo() {}
2437 virtual void Dispose() {
2438 CHECK(!disposed_);
2439 disposed_ = true;
2440 delete this;
2441 }
2442 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2443 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2444 }
2445 virtual intptr_t GetHash() { return hash_; }
2446 virtual const char* GetLabel() { return label_; }
2447
2448 private:
2449 bool disposed_;
2450 intptr_t hash_;
2451 const char* label_;
2452};
2453
2454
2455NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2456 const char* label) {
2457 const char* label_copy = names_->GetCopy(label);
2458 uint32_t hash = StringHasher::HashSequentialString(
2459 label_copy,
2460 static_cast<int>(strlen(label_copy)),
2461 isolate_->heap()->HashSeed());
2462 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2463 hash, true);
2464 if (entry->value == NULL) {
2465 entry->value = new NativeGroupRetainedObjectInfo(label);
2466 }
2467 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2468}
2469
2470
2471void NativeObjectsExplorer::SetNativeRootReference(
2472 v8::RetainedObjectInfo* info) {
2473 HeapEntry* child_entry =
2474 filler_->FindOrAddEntry(info, native_entries_allocator_);
2475 DCHECK(child_entry != NULL);
2476 NativeGroupRetainedObjectInfo* group_info =
2477 FindOrAddGroupInfo(info->GetGroupLabel());
2478 HeapEntry* group_entry =
2479 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2480 filler_->SetNamedAutoIndexReference(
2481 HeapGraphEdge::kInternal,
2482 group_entry->index(),
2483 child_entry);
2484}
2485
2486
2487void NativeObjectsExplorer::SetWrapperNativeReferences(
2488 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2489 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2490 DCHECK(wrapper_entry != NULL);
2491 HeapEntry* info_entry =
2492 filler_->FindOrAddEntry(info, native_entries_allocator_);
2493 DCHECK(info_entry != NULL);
2494 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2495 wrapper_entry->index(),
2496 "native",
2497 info_entry);
2498 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2499 info_entry->index(),
2500 wrapper_entry);
2501}
2502
2503
2504void NativeObjectsExplorer::SetRootNativeRootsReference() {
2505 for (HashMap::Entry* entry = native_groups_.Start();
2506 entry;
2507 entry = native_groups_.Next(entry)) {
2508 NativeGroupRetainedObjectInfo* group_info =
2509 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2510 HeapEntry* group_entry =
2511 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2512 DCHECK(group_entry != NULL);
2513 filler_->SetIndexedAutoIndexReference(
2514 HeapGraphEdge::kElement,
2515 snapshot_->root()->index(),
2516 group_entry);
2517 }
2518}
2519
2520
2521void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2522 if (in_groups_.Contains(*p)) return;
2523 Isolate* isolate = isolate_;
2524 v8::RetainedObjectInfo* info =
2525 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2526 if (info == NULL) return;
2527 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2528}
2529
2530
2531HeapSnapshotGenerator::HeapSnapshotGenerator(
2532 HeapSnapshot* snapshot,
2533 v8::ActivityControl* control,
2534 v8::HeapProfiler::ObjectNameResolver* resolver,
2535 Heap* heap)
2536 : snapshot_(snapshot),
2537 control_(control),
2538 v8_heap_explorer_(snapshot_, this, resolver),
2539 dom_explorer_(snapshot_, this),
2540 heap_(heap) {
2541}
2542
2543
2544bool HeapSnapshotGenerator::GenerateSnapshot() {
2545 v8_heap_explorer_.TagGlobalObjects();
2546
2547 // TODO(1562) Profiler assumes that any object that is in the heap after
2548 // full GC is reachable from the root when computing dominators.
2549 // This is not true for weakly reachable objects.
2550 // As a temporary solution we call GC twice.
2551 heap_->CollectAllGarbage(
2552 Heap::kMakeHeapIterableMask,
2553 "HeapSnapshotGenerator::GenerateSnapshot");
2554 heap_->CollectAllGarbage(
2555 Heap::kMakeHeapIterableMask,
2556 "HeapSnapshotGenerator::GenerateSnapshot");
2557
2558#ifdef VERIFY_HEAP
2559 Heap* debug_heap = heap_;
2560 debug_heap->Verify();
2561#endif
2562
2563 SetProgressTotal(2); // 2 passes.
2564
2565#ifdef VERIFY_HEAP
2566 debug_heap->Verify();
2567#endif
2568
2569 snapshot_->AddSyntheticRootEntries();
2570
2571 if (!FillReferences()) return false;
2572
2573 snapshot_->FillChildren();
2574 snapshot_->RememberLastJSObjectId();
2575
2576 progress_counter_ = progress_total_;
2577 if (!ProgressReport(true)) return false;
2578 return true;
2579}
2580
2581
2582void HeapSnapshotGenerator::ProgressStep() {
2583 ++progress_counter_;
2584}
2585
2586
2587bool HeapSnapshotGenerator::ProgressReport(bool force) {
2588 const int kProgressReportGranularity = 10000;
2589 if (control_ != NULL
2590 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2591 return
2592 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2593 v8::ActivityControl::kContinue;
2594 }
2595 return true;
2596}
2597
2598
2599void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2600 if (control_ == NULL) return;
2601 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2602 progress_total_ = iterations_count * (
2603 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2604 dom_explorer_.EstimateObjectsCount());
2605 progress_counter_ = 0;
2606}
2607
2608
2609bool HeapSnapshotGenerator::FillReferences() {
2610 SnapshotFiller filler(snapshot_, &entries_);
2611 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2612 && dom_explorer_.IterateAndExtractReferences(&filler);
2613}
2614
2615
2616template<int bytes> struct MaxDecimalDigitsIn;
2617template<> struct MaxDecimalDigitsIn<4> {
2618 static const int kSigned = 11;
2619 static const int kUnsigned = 10;
2620};
2621template<> struct MaxDecimalDigitsIn<8> {
2622 static const int kSigned = 20;
2623 static const int kUnsigned = 20;
2624};
2625
2626
2627class OutputStreamWriter {
2628 public:
2629 explicit OutputStreamWriter(v8::OutputStream* stream)
2630 : stream_(stream),
2631 chunk_size_(stream->GetChunkSize()),
2632 chunk_(chunk_size_),
2633 chunk_pos_(0),
2634 aborted_(false) {
2635 DCHECK(chunk_size_ > 0);
2636 }
2637 bool aborted() { return aborted_; }
2638 void AddCharacter(char c) {
2639 DCHECK(c != '\0');
2640 DCHECK(chunk_pos_ < chunk_size_);
2641 chunk_[chunk_pos_++] = c;
2642 MaybeWriteChunk();
2643 }
2644 void AddString(const char* s) {
2645 AddSubstring(s, StrLength(s));
2646 }
2647 void AddSubstring(const char* s, int n) {
2648 if (n <= 0) return;
2649 DCHECK(static_cast<size_t>(n) <= strlen(s));
2650 const char* s_end = s + n;
2651 while (s < s_end) {
2652 int s_chunk_size =
2653 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2654 DCHECK(s_chunk_size > 0);
2655 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2656 s += s_chunk_size;
2657 chunk_pos_ += s_chunk_size;
2658 MaybeWriteChunk();
2659 }
2660 }
2661 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2662 void Finalize() {
2663 if (aborted_) return;
2664 DCHECK(chunk_pos_ < chunk_size_);
2665 if (chunk_pos_ != 0) {
2666 WriteChunk();
2667 }
2668 stream_->EndOfStream();
2669 }
2670
2671 private:
2672 template<typename T>
2673 void AddNumberImpl(T n, const char* format) {
2674 // Buffer for the longest value plus trailing \0
2675 static const int kMaxNumberSize =
2676 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2677 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2678 int result = SNPrintF(
2679 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2680 DCHECK(result != -1);
2681 chunk_pos_ += result;
2682 MaybeWriteChunk();
2683 } else {
2684 EmbeddedVector<char, kMaxNumberSize> buffer;
2685 int result = SNPrintF(buffer, format, n);
2686 USE(result);
2687 DCHECK(result != -1);
2688 AddString(buffer.start());
2689 }
2690 }
2691 void MaybeWriteChunk() {
2692 DCHECK(chunk_pos_ <= chunk_size_);
2693 if (chunk_pos_ == chunk_size_) {
2694 WriteChunk();
2695 }
2696 }
2697 void WriteChunk() {
2698 if (aborted_) return;
2699 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2700 v8::OutputStream::kAbort) aborted_ = true;
2701 chunk_pos_ = 0;
2702 }
2703
2704 v8::OutputStream* stream_;
2705 int chunk_size_;
2706 ScopedVector<char> chunk_;
2707 int chunk_pos_;
2708 bool aborted_;
2709};
2710
2711
2712// type, name|index, to_node.
2713const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2714// type, name, id, self_size, edge_count, trace_node_id.
2715const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2716
2717void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2718 if (AllocationTracker* allocation_tracker =
2719 snapshot_->profiler()->allocation_tracker()) {
2720 allocation_tracker->PrepareForSerialization();
2721 }
2722 DCHECK(writer_ == NULL);
2723 writer_ = new OutputStreamWriter(stream);
2724 SerializeImpl();
2725 delete writer_;
2726 writer_ = NULL;
2727}
2728
2729
2730void HeapSnapshotJSONSerializer::SerializeImpl() {
2731 DCHECK(0 == snapshot_->root()->index());
2732 writer_->AddCharacter('{');
2733 writer_->AddString("\"snapshot\":{");
2734 SerializeSnapshot();
2735 if (writer_->aborted()) return;
2736 writer_->AddString("},\n");
2737 writer_->AddString("\"nodes\":[");
2738 SerializeNodes();
2739 if (writer_->aborted()) return;
2740 writer_->AddString("],\n");
2741 writer_->AddString("\"edges\":[");
2742 SerializeEdges();
2743 if (writer_->aborted()) return;
2744 writer_->AddString("],\n");
2745
2746 writer_->AddString("\"trace_function_infos\":[");
2747 SerializeTraceNodeInfos();
2748 if (writer_->aborted()) return;
2749 writer_->AddString("],\n");
2750 writer_->AddString("\"trace_tree\":[");
2751 SerializeTraceTree();
2752 if (writer_->aborted()) return;
2753 writer_->AddString("],\n");
2754
2755 writer_->AddString("\"strings\":[");
2756 SerializeStrings();
2757 if (writer_->aborted()) return;
2758 writer_->AddCharacter(']');
2759 writer_->AddCharacter('}');
2760 writer_->Finalize();
2761}
2762
2763
2764int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2765 HashMap::Entry* cache_entry = strings_.Lookup(
2766 const_cast<char*>(s), StringHash(s), true);
2767 if (cache_entry->value == NULL) {
2768 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2769 }
2770 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2771}
2772
2773
2774namespace {
2775
2776template<size_t size> struct ToUnsigned;
2777
2778template<> struct ToUnsigned<4> {
2779 typedef uint32_t Type;
2780};
2781
2782template<> struct ToUnsigned<8> {
2783 typedef uint64_t Type;
2784};
2785
2786} // namespace
2787
2788
2789template<typename T>
2790static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2791 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2792 int number_of_digits = 0;
2793 T t = value;
2794 do {
2795 ++number_of_digits;
2796 } while (t /= 10);
2797
2798 buffer_pos += number_of_digits;
2799 int result = buffer_pos;
2800 do {
2801 int last_digit = static_cast<int>(value % 10);
2802 buffer[--buffer_pos] = '0' + last_digit;
2803 value /= 10;
2804 } while (value);
2805 return result;
2806}
2807
2808
2809template<typename T>
2810static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2811 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2812 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2813 return utoa_impl(unsigned_value, buffer, buffer_pos);
2814}
2815
2816
2817void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2818 bool first_edge) {
2819 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2820 static const int kBufferSize =
2821 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2822 EmbeddedVector<char, kBufferSize> buffer;
2823 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2824 || edge->type() == HeapGraphEdge::kHidden
2825 ? edge->index() : GetStringId(edge->name());
2826 int buffer_pos = 0;
2827 if (!first_edge) {
2828 buffer[buffer_pos++] = ',';
2829 }
2830 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2831 buffer[buffer_pos++] = ',';
2832 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2833 buffer[buffer_pos++] = ',';
2834 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2835 buffer[buffer_pos++] = '\n';
2836 buffer[buffer_pos++] = '\0';
2837 writer_->AddString(buffer.start());
2838}
2839
2840
2841void HeapSnapshotJSONSerializer::SerializeEdges() {
2842 List<HeapGraphEdge*>& edges = snapshot_->children();
2843 for (int i = 0; i < edges.length(); ++i) {
2844 DCHECK(i == 0 ||
2845 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2846 SerializeEdge(edges[i], i == 0);
2847 if (writer_->aborted()) return;
2848 }
2849}
2850
2851
2852void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2853 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2854 static const int kBufferSize =
2855 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2856 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2857 + 6 + 1 + 1;
2858 EmbeddedVector<char, kBufferSize> buffer;
2859 int buffer_pos = 0;
2860 if (entry_index(entry) != 0) {
2861 buffer[buffer_pos++] = ',';
2862 }
2863 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2864 buffer[buffer_pos++] = ',';
2865 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2866 buffer[buffer_pos++] = ',';
2867 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2868 buffer[buffer_pos++] = ',';
2869 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2870 buffer[buffer_pos++] = ',';
2871 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2872 buffer[buffer_pos++] = ',';
2873 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2874 buffer[buffer_pos++] = '\n';
2875 buffer[buffer_pos++] = '\0';
2876 writer_->AddString(buffer.start());
2877}
2878
2879
2880void HeapSnapshotJSONSerializer::SerializeNodes() {
2881 List<HeapEntry>& entries = snapshot_->entries();
2882 for (int i = 0; i < entries.length(); ++i) {
2883 SerializeNode(&entries[i]);
2884 if (writer_->aborted()) return;
2885 }
2886}
2887
2888
2889void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2890 writer_->AddString("\"title\":\"");
2891 writer_->AddString(snapshot_->title());
2892 writer_->AddString("\"");
2893 writer_->AddString(",\"uid\":");
2894 writer_->AddNumber(snapshot_->uid());
2895 writer_->AddString(",\"meta\":");
2896 // The object describing node serialization layout.
2897 // We use a set of macros to improve readability.
2898#define JSON_A(s) "[" s "]"
2899#define JSON_O(s) "{" s "}"
2900#define JSON_S(s) "\"" s "\""
2901 writer_->AddString(JSON_O(
2902 JSON_S("node_fields") ":" JSON_A(
2903 JSON_S("type") ","
2904 JSON_S("name") ","
2905 JSON_S("id") ","
2906 JSON_S("self_size") ","
2907 JSON_S("edge_count") ","
2908 JSON_S("trace_node_id")) ","
2909 JSON_S("node_types") ":" JSON_A(
2910 JSON_A(
2911 JSON_S("hidden") ","
2912 JSON_S("array") ","
2913 JSON_S("string") ","
2914 JSON_S("object") ","
2915 JSON_S("code") ","
2916 JSON_S("closure") ","
2917 JSON_S("regexp") ","
2918 JSON_S("number") ","
2919 JSON_S("native") ","
2920 JSON_S("synthetic") ","
2921 JSON_S("concatenated string") ","
2922 JSON_S("sliced string")) ","
2923 JSON_S("string") ","
2924 JSON_S("number") ","
2925 JSON_S("number") ","
2926 JSON_S("number") ","
2927 JSON_S("number") ","
2928 JSON_S("number")) ","
2929 JSON_S("edge_fields") ":" JSON_A(
2930 JSON_S("type") ","
2931 JSON_S("name_or_index") ","
2932 JSON_S("to_node")) ","
2933 JSON_S("edge_types") ":" JSON_A(
2934 JSON_A(
2935 JSON_S("context") ","
2936 JSON_S("element") ","
2937 JSON_S("property") ","
2938 JSON_S("internal") ","
2939 JSON_S("hidden") ","
2940 JSON_S("shortcut") ","
2941 JSON_S("weak")) ","
2942 JSON_S("string_or_number") ","
2943 JSON_S("node")) ","
2944 JSON_S("trace_function_info_fields") ":" JSON_A(
2945 JSON_S("function_id") ","
2946 JSON_S("name") ","
2947 JSON_S("script_name") ","
2948 JSON_S("script_id") ","
2949 JSON_S("line") ","
2950 JSON_S("column")) ","
2951 JSON_S("trace_node_fields") ":" JSON_A(
2952 JSON_S("id") ","
2953 JSON_S("function_info_index") ","
2954 JSON_S("count") ","
2955 JSON_S("size") ","
2956 JSON_S("children"))));
2957#undef JSON_S
2958#undef JSON_O
2959#undef JSON_A
2960 writer_->AddString(",\"node_count\":");
2961 writer_->AddNumber(snapshot_->entries().length());
2962 writer_->AddString(",\"edge_count\":");
2963 writer_->AddNumber(snapshot_->edges().length());
2964 writer_->AddString(",\"trace_function_count\":");
2965 uint32_t count = 0;
2966 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2967 if (tracker) {
2968 count = tracker->function_info_list().length();
2969 }
2970 writer_->AddNumber(count);
2971}
2972
2973
2974static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2975 static const char hex_chars[] = "0123456789ABCDEF";
2976 w->AddString("\\u");
2977 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2978 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2979 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2980 w->AddCharacter(hex_chars[u & 0xf]);
2981}
2982
2983
2984void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2985 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2986 if (!tracker) return;
2987 AllocationTraceTree* traces = tracker->trace_tree();
2988 SerializeTraceNode(traces->root());
2989}
2990
2991
2992void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2993 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2994 const int kBufferSize =
2995 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2996 + 4 + 1 + 1;
2997 EmbeddedVector<char, kBufferSize> buffer;
2998 int buffer_pos = 0;
2999 buffer_pos = utoa(node->id(), buffer, buffer_pos);
3000 buffer[buffer_pos++] = ',';
3001 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3002 buffer[buffer_pos++] = ',';
3003 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3004 buffer[buffer_pos++] = ',';
3005 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3006 buffer[buffer_pos++] = ',';
3007 buffer[buffer_pos++] = '[';
3008 buffer[buffer_pos++] = '\0';
3009 writer_->AddString(buffer.start());
3010
3011 Vector<AllocationTraceNode*> children = node->children();
3012 for (int i = 0; i < children.length(); i++) {
3013 if (i > 0) {
3014 writer_->AddCharacter(',');
3015 }
3016 SerializeTraceNode(children[i]);
3017 }
3018 writer_->AddCharacter(']');
3019}
3020
3021
3022// 0-based position is converted to 1-based during the serialization.
3023static int SerializePosition(int position, const Vector<char>& buffer,
3024 int buffer_pos) {
3025 if (position == -1) {
3026 buffer[buffer_pos++] = '0';
3027 } else {
3028 DCHECK(position >= 0);
3029 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3030 }
3031 return buffer_pos;
3032}
3033
3034
3035void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3036 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3037 if (!tracker) return;
3038 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3039 const int kBufferSize =
3040 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3041 + 6 + 1 + 1;
3042 EmbeddedVector<char, kBufferSize> buffer;
3043 const List<AllocationTracker::FunctionInfo*>& list =
3044 tracker->function_info_list();
3045 bool first_entry = true;
3046 for (int i = 0; i < list.length(); i++) {
3047 AllocationTracker::FunctionInfo* info = list[i];
3048 int buffer_pos = 0;
3049 if (first_entry) {
3050 first_entry = false;
3051 } else {
3052 buffer[buffer_pos++] = ',';
3053 }
3054 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3055 buffer[buffer_pos++] = ',';
3056 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3057 buffer[buffer_pos++] = ',';
3058 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3059 buffer[buffer_pos++] = ',';
3060 // The cast is safe because script id is a non-negative Smi.
3061 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3062 buffer_pos);
3063 buffer[buffer_pos++] = ',';
3064 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3065 buffer[buffer_pos++] = ',';
3066 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3067 buffer[buffer_pos++] = '\n';
3068 buffer[buffer_pos++] = '\0';
3069 writer_->AddString(buffer.start());
3070 }
3071}
3072
3073
3074void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3075 writer_->AddCharacter('\n');
3076 writer_->AddCharacter('\"');
3077 for ( ; *s != '\0'; ++s) {
3078 switch (*s) {
3079 case '\b':
3080 writer_->AddString("\\b");
3081 continue;
3082 case '\f':
3083 writer_->AddString("\\f");
3084 continue;
3085 case '\n':
3086 writer_->AddString("\\n");
3087 continue;
3088 case '\r':
3089 writer_->AddString("\\r");
3090 continue;
3091 case '\t':
3092 writer_->AddString("\\t");
3093 continue;
3094 case '\"':
3095 case '\\':
3096 writer_->AddCharacter('\\');
3097 writer_->AddCharacter(*s);
3098 continue;
3099 default:
3100 if (*s > 31 && *s < 128) {
3101 writer_->AddCharacter(*s);
3102 } else if (*s <= 31) {
3103 // Special character with no dedicated literal.
3104 WriteUChar(writer_, *s);
3105 } else {
3106 // Convert UTF-8 into \u UTF-16 literal.
3107 unsigned length = 1, cursor = 0;
3108 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3109 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3110 if (c != unibrow::Utf8::kBadChar) {
3111 WriteUChar(writer_, c);
3112 DCHECK(cursor != 0);
3113 s += cursor - 1;
3114 } else {
3115 writer_->AddCharacter('?');
3116 }
3117 }
3118 }
3119 }
3120 writer_->AddCharacter('\"');
3121}
3122
3123
3124void HeapSnapshotJSONSerializer::SerializeStrings() {
3125 ScopedVector<const unsigned char*> sorted_strings(
3126 strings_.occupancy() + 1);
3127 for (HashMap::Entry* entry = strings_.Start();
3128 entry != NULL;
3129 entry = strings_.Next(entry)) {
3130 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3131 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3132 }
3133 writer_->AddString("\"<dummy>\"");
3134 for (int i = 1; i < sorted_strings.length(); ++i) {
3135 writer_->AddCharacter(',');
3136 SerializeString(sorted_strings[i]);
3137 if (writer_->aborted()) return;
3138 }
3139}
3140
3141
3142} } // namespace v8::internal