blob: 4a4c914e4ee0ddde726c857dd117f17dfee21505 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/heap-snapshot-generator-inl.h"
8
9#include "src/allocation-tracker.h"
10#include "src/code-stubs.h"
11#include "src/conversions.h"
12#include "src/debug.h"
13#include "src/heap-profiler.h"
14#include "src/types.h"
15
16namespace v8 {
17namespace internal {
18
19
20HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
21 : type_(type),
22 from_index_(from),
23 to_index_(to),
24 name_(name) {
25 DCHECK(type == kContextVariable
26 || type == kProperty
27 || type == kInternal
28 || type == kShortcut
29 || type == kWeak);
30}
31
32
33HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
34 : type_(type),
35 from_index_(from),
36 to_index_(to),
37 index_(index) {
38 DCHECK(type == kElement || type == kHidden);
39}
40
41
42void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
43 to_entry_ = &snapshot->entries()[to_index_];
44}
45
46
47const int HeapEntry::kNoEntry = -1;
48
49HeapEntry::HeapEntry(HeapSnapshot* snapshot,
50 Type type,
51 const char* name,
52 SnapshotObjectId id,
53 size_t self_size,
54 unsigned trace_node_id)
55 : type_(type),
56 children_count_(0),
57 children_index_(-1),
58 self_size_(self_size),
59 snapshot_(snapshot),
60 name_(name),
61 id_(id),
62 trace_node_id_(trace_node_id) { }
63
64
65void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
66 const char* name,
67 HeapEntry* entry) {
68 HeapGraphEdge edge(type, name, this->index(), entry->index());
69 snapshot_->edges().Add(edge);
70 ++children_count_;
71}
72
73
74void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
75 int index,
76 HeapEntry* entry) {
77 HeapGraphEdge edge(type, index, this->index(), entry->index());
78 snapshot_->edges().Add(edge);
79 ++children_count_;
80}
81
82
83void HeapEntry::Print(
84 const char* prefix, const char* edge_name, int max_depth, int indent) {
85 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
86 base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
87 ' ', prefix, edge_name);
88 if (type() != kString) {
89 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
90 } else {
91 base::OS::Print("\"");
92 const char* c = name_;
93 while (*c && (c - name_) <= 40) {
94 if (*c != '\n')
95 base::OS::Print("%c", *c);
96 else
97 base::OS::Print("\\n");
98 ++c;
99 }
100 base::OS::Print("\"\n");
101 }
102 if (--max_depth == 0) return;
103 Vector<HeapGraphEdge*> ch = children();
104 for (int i = 0; i < ch.length(); ++i) {
105 HeapGraphEdge& edge = *ch[i];
106 const char* edge_prefix = "";
107 EmbeddedVector<char, 64> index;
108 const char* edge_name = index.start();
109 switch (edge.type()) {
110 case HeapGraphEdge::kContextVariable:
111 edge_prefix = "#";
112 edge_name = edge.name();
113 break;
114 case HeapGraphEdge::kElement:
115 SNPrintF(index, "%d", edge.index());
116 break;
117 case HeapGraphEdge::kInternal:
118 edge_prefix = "$";
119 edge_name = edge.name();
120 break;
121 case HeapGraphEdge::kProperty:
122 edge_name = edge.name();
123 break;
124 case HeapGraphEdge::kHidden:
125 edge_prefix = "$";
126 SNPrintF(index, "%d", edge.index());
127 break;
128 case HeapGraphEdge::kShortcut:
129 edge_prefix = "^";
130 edge_name = edge.name();
131 break;
132 case HeapGraphEdge::kWeak:
133 edge_prefix = "w";
134 edge_name = edge.name();
135 break;
136 default:
137 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
138 }
139 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
140 }
141}
142
143
144const char* HeapEntry::TypeAsString() {
145 switch (type()) {
146 case kHidden: return "/hidden/";
147 case kObject: return "/object/";
148 case kClosure: return "/closure/";
149 case kString: return "/string/";
150 case kCode: return "/code/";
151 case kArray: return "/array/";
152 case kRegExp: return "/regexp/";
153 case kHeapNumber: return "/number/";
154 case kNative: return "/native/";
155 case kSynthetic: return "/synthetic/";
156 case kConsString: return "/concatenated string/";
157 case kSlicedString: return "/sliced string/";
158 case kSymbol: return "/symbol/";
159 default: return "???";
160 }
161}
162
163
164// It is very important to keep objects that form a heap snapshot
165// as small as possible.
166namespace { // Avoid littering the global namespace.
167
168template <size_t ptr_size> struct SnapshotSizeConstants;
169
170template <> struct SnapshotSizeConstants<4> {
171 static const int kExpectedHeapGraphEdgeSize = 12;
172 static const int kExpectedHeapEntrySize = 28;
173};
174
175template <> struct SnapshotSizeConstants<8> {
176 static const int kExpectedHeapGraphEdgeSize = 24;
177 static const int kExpectedHeapEntrySize = 40;
178};
179
180} // namespace
181
182
183HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
184 const char* title,
185 unsigned uid)
186 : profiler_(profiler),
187 title_(title),
188 uid_(uid),
189 root_index_(HeapEntry::kNoEntry),
190 gc_roots_index_(HeapEntry::kNoEntry),
191 max_snapshot_js_object_id_(0) {
192 STATIC_ASSERT(
193 sizeof(HeapGraphEdge) ==
194 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
195 STATIC_ASSERT(
196 sizeof(HeapEntry) ==
197 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
198 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
199 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
200 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
201 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
202 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
203 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
204 }
205}
206
207
208void HeapSnapshot::Delete() {
209 profiler_->RemoveSnapshot(this);
210 delete this;
211}
212
213
214void HeapSnapshot::RememberLastJSObjectId() {
215 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
216}
217
218
219void HeapSnapshot::AddSyntheticRootEntries() {
220 AddRootEntry();
221 AddGcRootsEntry();
222 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
223 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
224 AddGcSubrootEntry(tag, id);
225 id += HeapObjectsMap::kObjectIdStep;
226 }
227 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
228}
229
230
231HeapEntry* HeapSnapshot::AddRootEntry() {
232 DCHECK(root_index_ == HeapEntry::kNoEntry);
233 DCHECK(entries_.is_empty()); // Root entry must be the first one.
234 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
235 "",
236 HeapObjectsMap::kInternalRootObjectId,
237 0,
238 0);
239 root_index_ = entry->index();
240 DCHECK(root_index_ == 0);
241 return entry;
242}
243
244
245HeapEntry* HeapSnapshot::AddGcRootsEntry() {
246 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
247 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
248 "(GC roots)",
249 HeapObjectsMap::kGcRootsObjectId,
250 0,
251 0);
252 gc_roots_index_ = entry->index();
253 return entry;
254}
255
256
257HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
258 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
259 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
260 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
261 VisitorSynchronization::kTagNames[tag], id, 0, 0);
262 gc_subroot_indexes_[tag] = entry->index();
263 return entry;
264}
265
266
267HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
268 const char* name,
269 SnapshotObjectId id,
270 size_t size,
271 unsigned trace_node_id) {
272 HeapEntry entry(this, type, name, id, size, trace_node_id);
273 entries_.Add(entry);
274 return &entries_.last();
275}
276
277
278void HeapSnapshot::FillChildren() {
279 DCHECK(children().is_empty());
280 children().Allocate(edges().length());
281 int children_index = 0;
282 for (int i = 0; i < entries().length(); ++i) {
283 HeapEntry* entry = &entries()[i];
284 children_index = entry->set_children_index(children_index);
285 }
286 DCHECK(edges().length() == children_index);
287 for (int i = 0; i < edges().length(); ++i) {
288 HeapGraphEdge* edge = &edges()[i];
289 edge->ReplaceToIndexWithEntry(this);
290 edge->from()->add_child(edge);
291 }
292}
293
294
295class FindEntryById {
296 public:
297 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
298 int operator()(HeapEntry* const* entry) {
299 if ((*entry)->id() == id_) return 0;
300 return (*entry)->id() < id_ ? -1 : 1;
301 }
302 private:
303 SnapshotObjectId id_;
304};
305
306
307HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
308 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
309 // Perform a binary search by id.
310 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
311 if (index == -1)
312 return NULL;
313 return entries_by_id->at(index);
314}
315
316
317template<class T>
318static int SortByIds(const T* entry1_ptr,
319 const T* entry2_ptr) {
320 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
321 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
322}
323
324
325List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
326 if (sorted_entries_.is_empty()) {
327 sorted_entries_.Allocate(entries_.length());
328 for (int i = 0; i < entries_.length(); ++i) {
329 sorted_entries_[i] = &entries_[i];
330 }
331 sorted_entries_.Sort(SortByIds);
332 }
333 return &sorted_entries_;
334}
335
336
337void HeapSnapshot::Print(int max_depth) {
338 root()->Print("", "", max_depth, 0);
339}
340
341
342size_t HeapSnapshot::RawSnapshotSize() const {
343 return
344 sizeof(*this) +
345 GetMemoryUsedByList(entries_) +
346 GetMemoryUsedByList(edges_) +
347 GetMemoryUsedByList(children_) +
348 GetMemoryUsedByList(sorted_entries_);
349}
350
351
352// We split IDs on evens for embedder objects (see
353// HeapObjectsMap::GenerateId) and odds for native objects.
354const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
355const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
356 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
357const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
358 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
359const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
360 HeapObjectsMap::kGcRootsFirstSubrootId +
361 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
362
363
364static bool AddressesMatch(void* key1, void* key2) {
365 return key1 == key2;
366}
367
368
369HeapObjectsMap::HeapObjectsMap(Heap* heap)
370 : next_id_(kFirstAvailableObjectId),
371 entries_map_(AddressesMatch),
372 heap_(heap) {
373 // This dummy element solves a problem with entries_map_.
374 // When we do lookup in HashMap we see no difference between two cases:
375 // it has an entry with NULL as the value or it has created
376 // a new entry on the fly with NULL as the default value.
377 // With such dummy element we have a guaranty that all entries_map_ entries
378 // will have the value field grater than 0.
379 // This fact is using in MoveObject method.
380 entries_.Add(EntryInfo(0, NULL, 0));
381}
382
383
384bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
385 DCHECK(to != NULL);
386 DCHECK(from != NULL);
387 if (from == to) return false;
388 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
389 if (from_value == NULL) {
390 // It may occur that some untracked object moves to an address X and there
391 // is a tracked object at that address. In this case we should remove the
392 // entry as we know that the object has died.
393 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
394 if (to_value != NULL) {
395 int to_entry_info_index =
396 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
397 entries_.at(to_entry_info_index).addr = NULL;
398 }
399 } else {
400 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
401 true);
402 if (to_entry->value != NULL) {
403 // We found the existing entry with to address for an old object.
404 // Without this operation we will have two EntryInfo's with the same
405 // value in addr field. It is bad because later at RemoveDeadEntries
406 // one of this entry will be removed with the corresponding entries_map_
407 // entry.
408 int to_entry_info_index =
409 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
410 entries_.at(to_entry_info_index).addr = NULL;
411 }
412 int from_entry_info_index =
413 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
414 entries_.at(from_entry_info_index).addr = to;
415 // Size of an object can change during its life, so to keep information
416 // about the object in entries_ consistent, we have to adjust size when the
417 // object is migrated.
418 if (FLAG_heap_profiler_trace_objects) {
419 PrintF("Move object from %p to %p old size %6d new size %6d\n",
420 from,
421 to,
422 entries_.at(from_entry_info_index).size,
423 object_size);
424 }
425 entries_.at(from_entry_info_index).size = object_size;
426 to_entry->value = from_value;
427 }
428 return from_value != NULL;
429}
430
431
432void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
433 FindOrAddEntry(addr, size, false);
434}
435
436
437SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
438 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
439 false);
440 if (entry == NULL) return 0;
441 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
442 EntryInfo& entry_info = entries_.at(entry_index);
443 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
444 return entry_info.id;
445}
446
447
448SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
449 unsigned int size,
450 bool accessed) {
451 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
452 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
453 true);
454 if (entry->value != NULL) {
455 int entry_index =
456 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
457 EntryInfo& entry_info = entries_.at(entry_index);
458 entry_info.accessed = accessed;
459 if (FLAG_heap_profiler_trace_objects) {
460 PrintF("Update object size : %p with old size %d and new size %d\n",
461 addr,
462 entry_info.size,
463 size);
464 }
465 entry_info.size = size;
466 return entry_info.id;
467 }
468 entry->value = reinterpret_cast<void*>(entries_.length());
469 SnapshotObjectId id = next_id_;
470 next_id_ += kObjectIdStep;
471 entries_.Add(EntryInfo(id, addr, size, accessed));
472 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
473 return id;
474}
475
476
477void HeapObjectsMap::StopHeapObjectsTracking() {
478 time_intervals_.Clear();
479}
480
481
482void HeapObjectsMap::UpdateHeapObjectsMap() {
483 if (FLAG_heap_profiler_trace_objects) {
484 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
485 entries_map_.occupancy());
486 }
487 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
488 "HeapObjectsMap::UpdateHeapObjectsMap");
489 HeapIterator iterator(heap_);
490 for (HeapObject* obj = iterator.next();
491 obj != NULL;
492 obj = iterator.next()) {
493 FindOrAddEntry(obj->address(), obj->Size());
494 if (FLAG_heap_profiler_trace_objects) {
495 PrintF("Update object : %p %6d. Next address is %p\n",
496 obj->address(),
497 obj->Size(),
498 obj->address() + obj->Size());
499 }
500 }
501 RemoveDeadEntries();
502 if (FLAG_heap_profiler_trace_objects) {
503 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
504 entries_map_.occupancy());
505 }
506}
507
508
509namespace {
510
511
512struct HeapObjectInfo {
513 HeapObjectInfo(HeapObject* obj, int expected_size)
514 : obj(obj),
515 expected_size(expected_size) {
516 }
517
518 HeapObject* obj;
519 int expected_size;
520
521 bool IsValid() const { return expected_size == obj->Size(); }
522
523 void Print() const {
524 if (expected_size == 0) {
525 PrintF("Untracked object : %p %6d. Next address is %p\n",
526 obj->address(),
527 obj->Size(),
528 obj->address() + obj->Size());
529 } else if (obj->Size() != expected_size) {
530 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
531 expected_size,
532 obj->address(),
533 obj->Size(),
534 obj->address() + obj->Size());
535 } else {
536 PrintF("Good object : %p %6d. Next address is %p\n",
537 obj->address(),
538 expected_size,
539 obj->address() + obj->Size());
540 }
541 }
542};
543
544
545static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
546 if (a->obj < b->obj) return -1;
547 if (a->obj > b->obj) return 1;
548 return 0;
549}
550
551
552} // namespace
553
554
555int HeapObjectsMap::FindUntrackedObjects() {
556 List<HeapObjectInfo> heap_objects(1000);
557
558 HeapIterator iterator(heap_);
559 int untracked = 0;
560 for (HeapObject* obj = iterator.next();
561 obj != NULL;
562 obj = iterator.next()) {
563 HashMap::Entry* entry = entries_map_.Lookup(
564 obj->address(), ComputePointerHash(obj->address()), false);
565 if (entry == NULL) {
566 ++untracked;
567 if (FLAG_heap_profiler_trace_objects) {
568 heap_objects.Add(HeapObjectInfo(obj, 0));
569 }
570 } else {
571 int entry_index = static_cast<int>(
572 reinterpret_cast<intptr_t>(entry->value));
573 EntryInfo& entry_info = entries_.at(entry_index);
574 if (FLAG_heap_profiler_trace_objects) {
575 heap_objects.Add(HeapObjectInfo(obj,
576 static_cast<int>(entry_info.size)));
577 if (obj->Size() != static_cast<int>(entry_info.size))
578 ++untracked;
579 } else {
580 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
581 }
582 }
583 }
584 if (FLAG_heap_profiler_trace_objects) {
585 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
586 entries_map_.occupancy());
587 heap_objects.Sort(comparator);
588 int last_printed_object = -1;
589 bool print_next_object = false;
590 for (int i = 0; i < heap_objects.length(); ++i) {
591 const HeapObjectInfo& object_info = heap_objects[i];
592 if (!object_info.IsValid()) {
593 ++untracked;
594 if (last_printed_object != i - 1) {
595 if (i > 0) {
596 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
597 heap_objects[i - 1].Print();
598 }
599 }
600 object_info.Print();
601 last_printed_object = i;
602 print_next_object = true;
603 } else if (print_next_object) {
604 object_info.Print();
605 print_next_object = false;
606 last_printed_object = i;
607 }
608 }
609 if (last_printed_object < heap_objects.length() - 1) {
610 PrintF("Last %d objects were skipped\n",
611 heap_objects.length() - 1 - last_printed_object);
612 }
613 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
614 entries_map_.occupancy());
615 }
616 return untracked;
617}
618
619
620SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
621 UpdateHeapObjectsMap();
622 time_intervals_.Add(TimeInterval(next_id_));
623 int prefered_chunk_size = stream->GetChunkSize();
624 List<v8::HeapStatsUpdate> stats_buffer;
625 DCHECK(!entries_.is_empty());
626 EntryInfo* entry_info = &entries_.first();
627 EntryInfo* end_entry_info = &entries_.last() + 1;
628 for (int time_interval_index = 0;
629 time_interval_index < time_intervals_.length();
630 ++time_interval_index) {
631 TimeInterval& time_interval = time_intervals_[time_interval_index];
632 SnapshotObjectId time_interval_id = time_interval.id;
633 uint32_t entries_size = 0;
634 EntryInfo* start_entry_info = entry_info;
635 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
636 entries_size += entry_info->size;
637 ++entry_info;
638 }
639 uint32_t entries_count =
640 static_cast<uint32_t>(entry_info - start_entry_info);
641 if (time_interval.count != entries_count ||
642 time_interval.size != entries_size) {
643 stats_buffer.Add(v8::HeapStatsUpdate(
644 time_interval_index,
645 time_interval.count = entries_count,
646 time_interval.size = entries_size));
647 if (stats_buffer.length() >= prefered_chunk_size) {
648 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
649 &stats_buffer.first(), stats_buffer.length());
650 if (result == OutputStream::kAbort) return last_assigned_id();
651 stats_buffer.Clear();
652 }
653 }
654 }
655 DCHECK(entry_info == end_entry_info);
656 if (!stats_buffer.is_empty()) {
657 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
658 &stats_buffer.first(), stats_buffer.length());
659 if (result == OutputStream::kAbort) return last_assigned_id();
660 }
661 stream->EndOfStream();
662 return last_assigned_id();
663}
664
665
666void HeapObjectsMap::RemoveDeadEntries() {
667 DCHECK(entries_.length() > 0 &&
668 entries_.at(0).id == 0 &&
669 entries_.at(0).addr == NULL);
670 int first_free_entry = 1;
671 for (int i = 1; i < entries_.length(); ++i) {
672 EntryInfo& entry_info = entries_.at(i);
673 if (entry_info.accessed) {
674 if (first_free_entry != i) {
675 entries_.at(first_free_entry) = entry_info;
676 }
677 entries_.at(first_free_entry).accessed = false;
678 HashMap::Entry* entry = entries_map_.Lookup(
679 entry_info.addr, ComputePointerHash(entry_info.addr), false);
680 DCHECK(entry);
681 entry->value = reinterpret_cast<void*>(first_free_entry);
682 ++first_free_entry;
683 } else {
684 if (entry_info.addr) {
685 entries_map_.Remove(entry_info.addr,
686 ComputePointerHash(entry_info.addr));
687 }
688 }
689 }
690 entries_.Rewind(first_free_entry);
691 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
692 entries_map_.occupancy());
693}
694
695
696SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
697 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
698 const char* label = info->GetLabel();
699 id ^= StringHasher::HashSequentialString(label,
700 static_cast<int>(strlen(label)),
701 heap_->HashSeed());
702 intptr_t element_count = info->GetElementCount();
703 if (element_count != -1)
704 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
705 v8::internal::kZeroHashSeed);
706 return id << 1;
707}
708
709
710size_t HeapObjectsMap::GetUsedMemorySize() const {
711 return
712 sizeof(*this) +
713 sizeof(HashMap::Entry) * entries_map_.capacity() +
714 GetMemoryUsedByList(entries_) +
715 GetMemoryUsedByList(time_intervals_);
716}
717
718
719HeapEntriesMap::HeapEntriesMap()
720 : entries_(HashMap::PointersMatch) {
721}
722
723
724int HeapEntriesMap::Map(HeapThing thing) {
725 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
726 if (cache_entry == NULL) return HeapEntry::kNoEntry;
727 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
728}
729
730
731void HeapEntriesMap::Pair(HeapThing thing, int entry) {
732 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
733 DCHECK(cache_entry->value == NULL);
734 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
735}
736
737
738HeapObjectsSet::HeapObjectsSet()
739 : entries_(HashMap::PointersMatch) {
740}
741
742
743void HeapObjectsSet::Clear() {
744 entries_.Clear();
745}
746
747
748bool HeapObjectsSet::Contains(Object* obj) {
749 if (!obj->IsHeapObject()) return false;
750 HeapObject* object = HeapObject::cast(obj);
751 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
752}
753
754
755void HeapObjectsSet::Insert(Object* obj) {
756 if (!obj->IsHeapObject()) return;
757 HeapObject* object = HeapObject::cast(obj);
758 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
759}
760
761
762const char* HeapObjectsSet::GetTag(Object* obj) {
763 HeapObject* object = HeapObject::cast(obj);
764 HashMap::Entry* cache_entry =
765 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
766 return cache_entry != NULL
767 ? reinterpret_cast<const char*>(cache_entry->value)
768 : NULL;
769}
770
771
772void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
773 if (!obj->IsHeapObject()) return;
774 HeapObject* object = HeapObject::cast(obj);
775 HashMap::Entry* cache_entry =
776 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
777 cache_entry->value = const_cast<char*>(tag);
778}
779
780
781V8HeapExplorer::V8HeapExplorer(
782 HeapSnapshot* snapshot,
783 SnapshottingProgressReportingInterface* progress,
784 v8::HeapProfiler::ObjectNameResolver* resolver)
785 : heap_(snapshot->profiler()->heap_object_map()->heap()),
786 snapshot_(snapshot),
787 names_(snapshot_->profiler()->names()),
788 heap_object_map_(snapshot_->profiler()->heap_object_map()),
789 progress_(progress),
790 filler_(NULL),
791 global_object_name_resolver_(resolver) {
792}
793
794
795V8HeapExplorer::~V8HeapExplorer() {
796}
797
798
799HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
800 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
801}
802
803
804HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
805 if (object->IsJSFunction()) {
806 JSFunction* func = JSFunction::cast(object);
807 SharedFunctionInfo* shared = func->shared();
808 const char* name = shared->bound() ? "native_bind" :
809 names_->GetName(String::cast(shared->name()));
810 return AddEntry(object, HeapEntry::kClosure, name);
811 } else if (object->IsJSRegExp()) {
812 JSRegExp* re = JSRegExp::cast(object);
813 return AddEntry(object,
814 HeapEntry::kRegExp,
815 names_->GetName(re->Pattern()));
816 } else if (object->IsJSObject()) {
817 const char* name = names_->GetName(
818 GetConstructorName(JSObject::cast(object)));
819 if (object->IsJSGlobalObject()) {
820 const char* tag = objects_tags_.GetTag(object);
821 if (tag != NULL) {
822 name = names_->GetFormatted("%s / %s", name, tag);
823 }
824 }
825 return AddEntry(object, HeapEntry::kObject, name);
826 } else if (object->IsString()) {
827 String* string = String::cast(object);
828 if (string->IsConsString())
829 return AddEntry(object,
830 HeapEntry::kConsString,
831 "(concatenated string)");
832 if (string->IsSlicedString())
833 return AddEntry(object,
834 HeapEntry::kSlicedString,
835 "(sliced string)");
836 return AddEntry(object,
837 HeapEntry::kString,
838 names_->GetName(String::cast(object)));
839 } else if (object->IsSymbol()) {
840 return AddEntry(object, HeapEntry::kSymbol, "symbol");
841 } else if (object->IsCode()) {
842 return AddEntry(object, HeapEntry::kCode, "");
843 } else if (object->IsSharedFunctionInfo()) {
844 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
845 return AddEntry(object,
846 HeapEntry::kCode,
847 names_->GetName(name));
848 } else if (object->IsScript()) {
849 Object* name = Script::cast(object)->name();
850 return AddEntry(object,
851 HeapEntry::kCode,
852 name->IsString()
853 ? names_->GetName(String::cast(name))
854 : "");
855 } else if (object->IsNativeContext()) {
856 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
857 } else if (object->IsContext()) {
858 return AddEntry(object, HeapEntry::kObject, "system / Context");
859 } else if (object->IsFixedArray() ||
860 object->IsFixedDoubleArray() ||
861 object->IsByteArray() ||
862 object->IsExternalArray()) {
863 return AddEntry(object, HeapEntry::kArray, "");
864 } else if (object->IsHeapNumber()) {
865 return AddEntry(object, HeapEntry::kHeapNumber, "number");
866 }
867 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
868}
869
870
871HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
872 HeapEntry::Type type,
873 const char* name) {
874 return AddEntry(object->address(), type, name, object->Size());
875}
876
877
878HeapEntry* V8HeapExplorer::AddEntry(Address address,
879 HeapEntry::Type type,
880 const char* name,
881 size_t size) {
882 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
883 address, static_cast<unsigned int>(size));
884 unsigned trace_node_id = 0;
885 if (AllocationTracker* allocation_tracker =
886 snapshot_->profiler()->allocation_tracker()) {
887 trace_node_id =
888 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
889 }
890 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
891}
892
893
894class SnapshotFiller {
895 public:
896 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
897 : snapshot_(snapshot),
898 names_(snapshot->profiler()->names()),
899 entries_(entries) { }
900 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
901 HeapEntry* entry = allocator->AllocateEntry(ptr);
902 entries_->Pair(ptr, entry->index());
903 return entry;
904 }
905 HeapEntry* FindEntry(HeapThing ptr) {
906 int index = entries_->Map(ptr);
907 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
908 }
909 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
910 HeapEntry* entry = FindEntry(ptr);
911 return entry != NULL ? entry : AddEntry(ptr, allocator);
912 }
913 void SetIndexedReference(HeapGraphEdge::Type type,
914 int parent,
915 int index,
916 HeapEntry* child_entry) {
917 HeapEntry* parent_entry = &snapshot_->entries()[parent];
918 parent_entry->SetIndexedReference(type, index, child_entry);
919 }
920 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
921 int parent,
922 HeapEntry* child_entry) {
923 HeapEntry* parent_entry = &snapshot_->entries()[parent];
924 int index = parent_entry->children_count() + 1;
925 parent_entry->SetIndexedReference(type, index, child_entry);
926 }
927 void SetNamedReference(HeapGraphEdge::Type type,
928 int parent,
929 const char* reference_name,
930 HeapEntry* child_entry) {
931 HeapEntry* parent_entry = &snapshot_->entries()[parent];
932 parent_entry->SetNamedReference(type, reference_name, child_entry);
933 }
934 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
935 int parent,
936 HeapEntry* child_entry) {
937 HeapEntry* parent_entry = &snapshot_->entries()[parent];
938 int index = parent_entry->children_count() + 1;
939 parent_entry->SetNamedReference(
940 type,
941 names_->GetName(index),
942 child_entry);
943 }
944
945 private:
946 HeapSnapshot* snapshot_;
947 StringsStorage* names_;
948 HeapEntriesMap* entries_;
949};
950
951
952const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
953 switch (object->map()->instance_type()) {
954 case MAP_TYPE:
955 switch (Map::cast(object)->instance_type()) {
956#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
957 case instance_type: return "system / Map (" #Name ")";
958 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
959#undef MAKE_STRING_MAP_CASE
960 default: return "system / Map";
961 }
962 case CELL_TYPE: return "system / Cell";
963 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
964 case FOREIGN_TYPE: return "system / Foreign";
965 case ODDBALL_TYPE: return "system / Oddball";
966#define MAKE_STRUCT_CASE(NAME, Name, name) \
967 case NAME##_TYPE: return "system / "#Name;
968 STRUCT_LIST(MAKE_STRUCT_CASE)
969#undef MAKE_STRUCT_CASE
970 default: return "system";
971 }
972}
973
974
975int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
976 int objects_count = 0;
977 for (HeapObject* obj = iterator->next();
978 obj != NULL;
979 obj = iterator->next()) {
980 objects_count++;
981 }
982 return objects_count;
983}
984
985
986class IndexedReferencesExtractor : public ObjectVisitor {
987 public:
988 IndexedReferencesExtractor(V8HeapExplorer* generator,
989 HeapObject* parent_obj,
990 int parent)
991 : generator_(generator),
992 parent_obj_(parent_obj),
993 parent_(parent),
994 next_index_(0) {
995 }
996 void VisitCodeEntry(Address entry_address) {
997 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
998 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
999 generator_->TagCodeObject(code);
1000 }
1001 void VisitPointers(Object** start, Object** end) {
1002 for (Object** p = start; p < end; p++) {
1003 ++next_index_;
1004 if (CheckVisitedAndUnmark(p)) continue;
1005 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1006 }
1007 }
1008 static void MarkVisitedField(HeapObject* obj, int offset) {
1009 if (offset < 0) return;
1010 Address field = obj->address() + offset;
1011 DCHECK(Memory::Object_at(field)->IsHeapObject());
1012 intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field));
1013 DCHECK(!IsMarked(p));
1014 intptr_t p_tagged = p | kTag;
1015 Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged);
1016 }
1017
1018 private:
1019 bool CheckVisitedAndUnmark(Object** field) {
1020 intptr_t p = reinterpret_cast<intptr_t>(*field);
1021 if (IsMarked(p)) {
1022 intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag;
1023 *field = reinterpret_cast<Object*>(p_untagged);
1024 DCHECK((*field)->IsHeapObject());
1025 return true;
1026 }
1027 return false;
1028 }
1029
1030 static const intptr_t kTaggingMask = 3;
1031 static const intptr_t kTag = 3;
1032
1033 static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; }
1034
1035 V8HeapExplorer* generator_;
1036 HeapObject* parent_obj_;
1037 int parent_;
1038 int next_index_;
1039};
1040
1041
1042bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1043 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1044
1045 if (obj->IsJSGlobalProxy()) {
1046 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1047 } else if (obj->IsJSArrayBuffer()) {
1048 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1049 } else if (obj->IsJSObject()) {
1050 if (obj->IsJSWeakSet()) {
1051 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1052 } else if (obj->IsJSWeakMap()) {
1053 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1054 } else if (obj->IsJSSet()) {
1055 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1056 } else if (obj->IsJSMap()) {
1057 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1058 }
1059 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1060 } else if (obj->IsString()) {
1061 ExtractStringReferences(entry, String::cast(obj));
1062 } else if (obj->IsSymbol()) {
1063 ExtractSymbolReferences(entry, Symbol::cast(obj));
1064 } else if (obj->IsMap()) {
1065 ExtractMapReferences(entry, Map::cast(obj));
1066 } else if (obj->IsSharedFunctionInfo()) {
1067 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1068 } else if (obj->IsScript()) {
1069 ExtractScriptReferences(entry, Script::cast(obj));
1070 } else if (obj->IsAccessorInfo()) {
1071 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1072 } else if (obj->IsAccessorPair()) {
1073 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1074 } else if (obj->IsCodeCache()) {
1075 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1076 } else if (obj->IsCode()) {
1077 ExtractCodeReferences(entry, Code::cast(obj));
1078 } else if (obj->IsBox()) {
1079 ExtractBoxReferences(entry, Box::cast(obj));
1080 } else if (obj->IsCell()) {
1081 ExtractCellReferences(entry, Cell::cast(obj));
1082 } else if (obj->IsPropertyCell()) {
1083 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1084 } else if (obj->IsAllocationSite()) {
1085 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1086 }
1087 return true;
1088}
1089
1090
1091bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1092 if (!obj->IsFixedArray()) return false;
1093
1094 if (obj->IsContext()) {
1095 ExtractContextReferences(entry, Context::cast(obj));
1096 } else {
1097 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1098 }
1099 return true;
1100}
1101
1102
1103void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1104 int entry, JSGlobalProxy* proxy) {
1105 SetInternalReference(proxy, entry,
1106 "native_context", proxy->native_context(),
1107 JSGlobalProxy::kNativeContextOffset);
1108}
1109
1110
1111void V8HeapExplorer::ExtractJSObjectReferences(
1112 int entry, JSObject* js_obj) {
1113 HeapObject* obj = js_obj;
1114 ExtractClosureReferences(js_obj, entry);
1115 ExtractPropertyReferences(js_obj, entry);
1116 ExtractElementReferences(js_obj, entry);
1117 ExtractInternalReferences(js_obj, entry);
1118 PrototypeIterator iter(heap_->isolate(), js_obj);
1119 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1120 if (obj->IsJSFunction()) {
1121 JSFunction* js_fun = JSFunction::cast(js_obj);
1122 Object* proto_or_map = js_fun->prototype_or_initial_map();
1123 if (!proto_or_map->IsTheHole()) {
1124 if (!proto_or_map->IsMap()) {
1125 SetPropertyReference(
1126 obj, entry,
1127 heap_->prototype_string(), proto_or_map,
1128 NULL,
1129 JSFunction::kPrototypeOrInitialMapOffset);
1130 } else {
1131 SetPropertyReference(
1132 obj, entry,
1133 heap_->prototype_string(), js_fun->prototype());
1134 SetInternalReference(
1135 obj, entry, "initial_map", proto_or_map,
1136 JSFunction::kPrototypeOrInitialMapOffset);
1137 }
1138 }
1139 SharedFunctionInfo* shared_info = js_fun->shared();
1140 // JSFunction has either bindings or literals and never both.
1141 bool bound = shared_info->bound();
1142 TagObject(js_fun->literals_or_bindings(),
1143 bound ? "(function bindings)" : "(function literals)");
1144 SetInternalReference(js_fun, entry,
1145 bound ? "bindings" : "literals",
1146 js_fun->literals_or_bindings(),
1147 JSFunction::kLiteralsOffset);
1148 TagObject(shared_info, "(shared function info)");
1149 SetInternalReference(js_fun, entry,
1150 "shared", shared_info,
1151 JSFunction::kSharedFunctionInfoOffset);
1152 TagObject(js_fun->context(), "(context)");
1153 SetInternalReference(js_fun, entry,
1154 "context", js_fun->context(),
1155 JSFunction::kContextOffset);
1156 SetWeakReference(js_fun, entry,
1157 "next_function_link", js_fun->next_function_link(),
1158 JSFunction::kNextFunctionLinkOffset);
1159 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset
1160 == JSFunction::kNonWeakFieldsEndOffset);
1161 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1162 == JSFunction::kSize);
1163 } else if (obj->IsGlobalObject()) {
1164 GlobalObject* global_obj = GlobalObject::cast(obj);
1165 SetInternalReference(global_obj, entry,
1166 "builtins", global_obj->builtins(),
1167 GlobalObject::kBuiltinsOffset);
1168 SetInternalReference(global_obj, entry,
1169 "native_context", global_obj->native_context(),
1170 GlobalObject::kNativeContextOffset);
1171 SetInternalReference(global_obj, entry,
1172 "global_context", global_obj->global_context(),
1173 GlobalObject::kGlobalContextOffset);
1174 SetInternalReference(global_obj, entry,
1175 "global_proxy", global_obj->global_proxy(),
1176 GlobalObject::kGlobalProxyOffset);
1177 STATIC_ASSERT(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1178 4 * kPointerSize);
1179 } else if (obj->IsJSArrayBufferView()) {
1180 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1181 SetInternalReference(view, entry, "buffer", view->buffer(),
1182 JSArrayBufferView::kBufferOffset);
1183 SetWeakReference(view, entry, "weak_next", view->weak_next(),
1184 JSArrayBufferView::kWeakNextOffset);
1185 }
1186 TagObject(js_obj->properties(), "(object properties)");
1187 SetInternalReference(obj, entry,
1188 "properties", js_obj->properties(),
1189 JSObject::kPropertiesOffset);
1190 TagObject(js_obj->elements(), "(object elements)");
1191 SetInternalReference(obj, entry,
1192 "elements", js_obj->elements(),
1193 JSObject::kElementsOffset);
1194}
1195
1196
1197void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1198 if (string->IsConsString()) {
1199 ConsString* cs = ConsString::cast(string);
1200 SetInternalReference(cs, entry, "first", cs->first(),
1201 ConsString::kFirstOffset);
1202 SetInternalReference(cs, entry, "second", cs->second(),
1203 ConsString::kSecondOffset);
1204 } else if (string->IsSlicedString()) {
1205 SlicedString* ss = SlicedString::cast(string);
1206 SetInternalReference(ss, entry, "parent", ss->parent(),
1207 SlicedString::kParentOffset);
1208 }
1209}
1210
1211
1212void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1213 SetInternalReference(symbol, entry,
1214 "name", symbol->name(),
1215 Symbol::kNameOffset);
1216}
1217
1218
1219void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1220 JSCollection* collection) {
1221 SetInternalReference(collection, entry, "table", collection->table(),
1222 JSCollection::kTableOffset);
1223}
1224
1225
1226void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1227 int entry, JSWeakCollection* collection) {
1228 MarkAsWeakContainer(collection->table());
1229 SetInternalReference(collection, entry,
1230 "table", collection->table(),
1231 JSWeakCollection::kTableOffset);
1232}
1233
1234
1235void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1236 if (context == context->declaration_context()) {
1237 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1238 // Add context allocated locals.
1239 int context_locals = scope_info->ContextLocalCount();
1240 for (int i = 0; i < context_locals; ++i) {
1241 String* local_name = scope_info->ContextLocalName(i);
1242 int idx = Context::MIN_CONTEXT_SLOTS + i;
1243 SetContextReference(context, entry, local_name, context->get(idx),
1244 Context::OffsetOfElementAt(idx));
1245 }
1246 if (scope_info->HasFunctionName()) {
1247 String* name = scope_info->FunctionName();
1248 VariableMode mode;
1249 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1250 if (idx >= 0) {
1251 SetContextReference(context, entry, name, context->get(idx),
1252 Context::OffsetOfElementAt(idx));
1253 }
1254 }
1255 }
1256
1257#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1258 if (Context::index < Context::FIRST_WEAK_SLOT || \
1259 Context::index == Context::MAP_CACHE_INDEX) { \
1260 SetInternalReference(context, entry, #name, context->get(Context::index), \
1261 FixedArray::OffsetOfElementAt(Context::index)); \
1262 } else { \
1263 SetWeakReference(context, entry, #name, context->get(Context::index), \
1264 FixedArray::OffsetOfElementAt(Context::index)); \
1265 }
1266 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1267 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1268 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1269 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1270 if (context->IsNativeContext()) {
1271 TagObject(context->jsfunction_result_caches(),
1272 "(context func. result caches)");
1273 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1274 TagObject(context->runtime_context(), "(runtime context)");
1275 TagObject(context->embedder_data(), "(context data)");
1276 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1277 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1278 optimized_functions_list);
1279 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1280 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1281 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1282#undef EXTRACT_CONTEXT_FIELD
1283 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1284 Context::FIRST_WEAK_SLOT);
1285 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1286 Context::NATIVE_CONTEXT_SLOTS);
1287 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 5 ==
1288 Context::NATIVE_CONTEXT_SLOTS);
1289 }
1290}
1291
1292
1293void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1294 if (map->HasTransitionArray()) {
1295 TransitionArray* transitions = map->transitions();
1296 int transitions_entry = GetEntry(transitions)->index();
1297 Object* back_pointer = transitions->back_pointer_storage();
1298 TagObject(back_pointer, "(back pointer)");
1299 SetInternalReference(transitions, transitions_entry,
1300 "back_pointer", back_pointer);
1301
1302 if (FLAG_collect_maps && map->CanTransition()) {
1303 if (!transitions->IsSimpleTransition()) {
1304 if (transitions->HasPrototypeTransitions()) {
1305 FixedArray* prototype_transitions =
1306 transitions->GetPrototypeTransitions();
1307 MarkAsWeakContainer(prototype_transitions);
1308 TagObject(prototype_transitions, "(prototype transitions");
1309 SetInternalReference(transitions, transitions_entry,
1310 "prototype_transitions", prototype_transitions);
1311 }
1312 // TODO(alph): transitions keys are strong links.
1313 MarkAsWeakContainer(transitions);
1314 }
1315 }
1316
1317 TagObject(transitions, "(transition array)");
1318 SetInternalReference(map, entry,
1319 "transitions", transitions,
1320 Map::kTransitionsOrBackPointerOffset);
1321 } else {
1322 Object* back_pointer = map->GetBackPointer();
1323 TagObject(back_pointer, "(back pointer)");
1324 SetInternalReference(map, entry,
1325 "back_pointer", back_pointer,
1326 Map::kTransitionsOrBackPointerOffset);
1327 }
1328 DescriptorArray* descriptors = map->instance_descriptors();
1329 TagObject(descriptors, "(map descriptors)");
1330 SetInternalReference(map, entry,
1331 "descriptors", descriptors,
1332 Map::kDescriptorsOffset);
1333
1334 MarkAsWeakContainer(map->code_cache());
1335 SetInternalReference(map, entry,
1336 "code_cache", map->code_cache(),
1337 Map::kCodeCacheOffset);
1338 SetInternalReference(map, entry,
1339 "prototype", map->prototype(), Map::kPrototypeOffset);
1340 SetInternalReference(map, entry,
1341 "constructor", map->constructor(),
1342 Map::kConstructorOffset);
1343 TagObject(map->dependent_code(), "(dependent code)");
1344 MarkAsWeakContainer(map->dependent_code());
1345 SetInternalReference(map, entry,
1346 "dependent_code", map->dependent_code(),
1347 Map::kDependentCodeOffset);
1348}
1349
1350
1351void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1352 int entry, SharedFunctionInfo* shared) {
1353 HeapObject* obj = shared;
1354 String* shared_name = shared->DebugName();
1355 const char* name = NULL;
1356 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1357 name = names_->GetName(shared_name);
1358 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1359 } else {
1360 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1361 Code::Kind2String(shared->code()->kind())));
1362 }
1363
1364 SetInternalReference(obj, entry,
1365 "name", shared->name(),
1366 SharedFunctionInfo::kNameOffset);
1367 SetInternalReference(obj, entry,
1368 "code", shared->code(),
1369 SharedFunctionInfo::kCodeOffset);
1370 TagObject(shared->scope_info(), "(function scope info)");
1371 SetInternalReference(obj, entry,
1372 "scope_info", shared->scope_info(),
1373 SharedFunctionInfo::kScopeInfoOffset);
1374 SetInternalReference(obj, entry,
1375 "instance_class_name", shared->instance_class_name(),
1376 SharedFunctionInfo::kInstanceClassNameOffset);
1377 SetInternalReference(obj, entry,
1378 "script", shared->script(),
1379 SharedFunctionInfo::kScriptOffset);
1380 const char* construct_stub_name = name ?
1381 names_->GetFormatted("(construct stub code for %s)", name) :
1382 "(construct stub code)";
1383 TagObject(shared->construct_stub(), construct_stub_name);
1384 SetInternalReference(obj, entry,
1385 "construct_stub", shared->construct_stub(),
1386 SharedFunctionInfo::kConstructStubOffset);
1387 SetInternalReference(obj, entry,
1388 "function_data", shared->function_data(),
1389 SharedFunctionInfo::kFunctionDataOffset);
1390 SetInternalReference(obj, entry,
1391 "debug_info", shared->debug_info(),
1392 SharedFunctionInfo::kDebugInfoOffset);
1393 SetInternalReference(obj, entry,
1394 "inferred_name", shared->inferred_name(),
1395 SharedFunctionInfo::kInferredNameOffset);
1396 SetInternalReference(obj, entry,
1397 "optimized_code_map", shared->optimized_code_map(),
1398 SharedFunctionInfo::kOptimizedCodeMapOffset);
1399 SetInternalReference(obj, entry,
1400 "feedback_vector", shared->feedback_vector(),
1401 SharedFunctionInfo::kFeedbackVectorOffset);
1402}
1403
1404
1405void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1406 HeapObject* obj = script;
1407 SetInternalReference(obj, entry,
1408 "source", script->source(),
1409 Script::kSourceOffset);
1410 SetInternalReference(obj, entry,
1411 "name", script->name(),
1412 Script::kNameOffset);
1413 SetInternalReference(obj, entry,
1414 "context_data", script->context_data(),
1415 Script::kContextOffset);
1416 TagObject(script->line_ends(), "(script line ends)");
1417 SetInternalReference(obj, entry,
1418 "line_ends", script->line_ends(),
1419 Script::kLineEndsOffset);
1420}
1421
1422
1423void V8HeapExplorer::ExtractAccessorInfoReferences(
1424 int entry, AccessorInfo* accessor_info) {
1425 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1426 AccessorInfo::kNameOffset);
1427 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1428 accessor_info->expected_receiver_type(),
1429 AccessorInfo::kExpectedReceiverTypeOffset);
1430 if (accessor_info->IsDeclaredAccessorInfo()) {
1431 DeclaredAccessorInfo* declared_accessor_info =
1432 DeclaredAccessorInfo::cast(accessor_info);
1433 SetInternalReference(declared_accessor_info, entry, "descriptor",
1434 declared_accessor_info->descriptor(),
1435 DeclaredAccessorInfo::kDescriptorOffset);
1436 } else if (accessor_info->IsExecutableAccessorInfo()) {
1437 ExecutableAccessorInfo* executable_accessor_info =
1438 ExecutableAccessorInfo::cast(accessor_info);
1439 SetInternalReference(executable_accessor_info, entry, "getter",
1440 executable_accessor_info->getter(),
1441 ExecutableAccessorInfo::kGetterOffset);
1442 SetInternalReference(executable_accessor_info, entry, "setter",
1443 executable_accessor_info->setter(),
1444 ExecutableAccessorInfo::kSetterOffset);
1445 SetInternalReference(executable_accessor_info, entry, "data",
1446 executable_accessor_info->data(),
1447 ExecutableAccessorInfo::kDataOffset);
1448 }
1449}
1450
1451
1452void V8HeapExplorer::ExtractAccessorPairReferences(
1453 int entry, AccessorPair* accessors) {
1454 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1455 AccessorPair::kGetterOffset);
1456 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1457 AccessorPair::kSetterOffset);
1458}
1459
1460
1461void V8HeapExplorer::ExtractCodeCacheReferences(
1462 int entry, CodeCache* code_cache) {
1463 TagObject(code_cache->default_cache(), "(default code cache)");
1464 SetInternalReference(code_cache, entry,
1465 "default_cache", code_cache->default_cache(),
1466 CodeCache::kDefaultCacheOffset);
1467 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1468 SetInternalReference(code_cache, entry,
1469 "type_cache", code_cache->normal_type_cache(),
1470 CodeCache::kNormalTypeCacheOffset);
1471}
1472
1473
1474void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1475 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1476}
1477
1478
1479void V8HeapExplorer::TagCodeObject(Code* code) {
1480 if (code->kind() == Code::STUB) {
1481 TagObject(code, names_->GetFormatted(
1482 "(%s code)", CodeStub::MajorName(
1483 CodeStub::GetMajorKey(code), true)));
1484 }
1485}
1486
1487
1488void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1489 TagCodeObject(code);
1490 TagObject(code->relocation_info(), "(code relocation info)");
1491 SetInternalReference(code, entry,
1492 "relocation_info", code->relocation_info(),
1493 Code::kRelocationInfoOffset);
1494 SetInternalReference(code, entry,
1495 "handler_table", code->handler_table(),
1496 Code::kHandlerTableOffset);
1497 TagObject(code->deoptimization_data(), "(code deopt data)");
1498 SetInternalReference(code, entry,
1499 "deoptimization_data", code->deoptimization_data(),
1500 Code::kDeoptimizationDataOffset);
1501 if (code->kind() == Code::FUNCTION) {
1502 SetInternalReference(code, entry,
1503 "type_feedback_info", code->type_feedback_info(),
1504 Code::kTypeFeedbackInfoOffset);
1505 }
1506 SetInternalReference(code, entry,
1507 "gc_metadata", code->gc_metadata(),
1508 Code::kGCMetadataOffset);
1509 SetInternalReference(code, entry,
1510 "constant_pool", code->constant_pool(),
1511 Code::kConstantPoolOffset);
1512 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1513 SetWeakReference(code, entry,
1514 "next_code_link", code->next_code_link(),
1515 Code::kNextCodeLinkOffset);
1516 }
1517}
1518
1519
1520void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1521 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1522}
1523
1524
1525void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1526 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1527}
1528
1529
1530void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1531 PropertyCell* cell) {
1532 ExtractCellReferences(entry, cell);
1533 SetInternalReference(cell, entry, "type", cell->type(),
1534 PropertyCell::kTypeOffset);
1535 MarkAsWeakContainer(cell->dependent_code());
1536 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1537 PropertyCell::kDependentCodeOffset);
1538}
1539
1540
1541void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1542 AllocationSite* site) {
1543 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1544 AllocationSite::kTransitionInfoOffset);
1545 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1546 AllocationSite::kNestedSiteOffset);
1547 MarkAsWeakContainer(site->dependent_code());
1548 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1549 AllocationSite::kDependentCodeOffset);
1550 // Do not visit weak_next as it is not visited by the StaticVisitor,
1551 // and we're not very interested in weak_next field here.
1552 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1553 AllocationSite::BodyDescriptor::kEndOffset);
1554}
1555
1556
1557class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1558 public:
1559 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1560 : size_(size)
1561 , explorer_(explorer) {
1562 }
1563 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1564 return explorer_->AddEntry(
1565 static_cast<Address>(ptr),
1566 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1567 }
1568 private:
1569 size_t size_;
1570 V8HeapExplorer* explorer_;
1571};
1572
1573
1574void V8HeapExplorer::ExtractJSArrayBufferReferences(
1575 int entry, JSArrayBuffer* buffer) {
1576 SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1577 JSArrayBuffer::kWeakNextOffset);
1578 SetWeakReference(buffer, entry,
1579 "weak_first_view", buffer->weak_first_view(),
1580 JSArrayBuffer::kWeakFirstViewOffset);
1581 // Setup a reference to a native memory backing_store object.
1582 if (!buffer->backing_store())
1583 return;
1584 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1585 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1586 HeapEntry* data_entry =
1587 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1588 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1589 entry, "backing_store", data_entry);
1590}
1591
1592
1593void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1594 bool is_weak = weak_containers_.Contains(array);
1595 for (int i = 0, l = array->length(); i < l; ++i) {
1596 if (is_weak) {
1597 SetWeakReference(array, entry,
1598 i, array->get(i), array->OffsetOfElementAt(i));
1599 } else {
1600 SetInternalReference(array, entry,
1601 i, array->get(i), array->OffsetOfElementAt(i));
1602 }
1603 }
1604}
1605
1606
1607void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1608 if (!js_obj->IsJSFunction()) return;
1609
1610 JSFunction* func = JSFunction::cast(js_obj);
1611 if (func->shared()->bound()) {
1612 FixedArray* bindings = func->function_bindings();
1613 SetNativeBindReference(js_obj, entry, "bound_this",
1614 bindings->get(JSFunction::kBoundThisIndex));
1615 SetNativeBindReference(js_obj, entry, "bound_function",
1616 bindings->get(JSFunction::kBoundFunctionIndex));
1617 for (int i = JSFunction::kBoundArgumentsStartIndex;
1618 i < bindings->length(); i++) {
1619 const char* reference_name = names_->GetFormatted(
1620 "bound_argument_%d",
1621 i - JSFunction::kBoundArgumentsStartIndex);
1622 SetNativeBindReference(js_obj, entry, reference_name,
1623 bindings->get(i));
1624 }
1625 }
1626}
1627
1628
1629void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1630 if (js_obj->HasFastProperties()) {
1631 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1632 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1633 for (int i = 0; i < real_size; i++) {
1634 switch (descs->GetType(i)) {
1635 case FIELD: {
1636 Representation r = descs->GetDetails(i).representation();
1637 if (r.IsSmi() || r.IsDouble()) break;
1638 int index = descs->GetFieldIndex(i);
1639
1640 Name* k = descs->GetKey(i);
1641 if (index < js_obj->map()->inobject_properties()) {
1642 Object* value = js_obj->InObjectPropertyAt(index);
1643 if (k != heap_->hidden_string()) {
1644 SetPropertyReference(
1645 js_obj, entry,
1646 k, value,
1647 NULL,
1648 js_obj->GetInObjectPropertyOffset(index));
1649 } else {
1650 TagObject(value, "(hidden properties)");
1651 SetInternalReference(
1652 js_obj, entry,
1653 "hidden_properties", value,
1654 js_obj->GetInObjectPropertyOffset(index));
1655 }
1656 } else {
1657 FieldIndex field_index =
1658 FieldIndex::ForDescriptor(js_obj->map(), i);
1659 Object* value = js_obj->RawFastPropertyAt(field_index);
1660 if (k != heap_->hidden_string()) {
1661 SetPropertyReference(js_obj, entry, k, value);
1662 } else {
1663 TagObject(value, "(hidden properties)");
1664 SetInternalReference(js_obj, entry, "hidden_properties", value);
1665 }
1666 }
1667 break;
1668 }
1669 case CONSTANT:
1670 SetPropertyReference(
1671 js_obj, entry,
1672 descs->GetKey(i), descs->GetConstant(i));
1673 break;
1674 case CALLBACKS:
1675 ExtractAccessorPairProperty(
1676 js_obj, entry,
1677 descs->GetKey(i), descs->GetValue(i));
1678 break;
1679 case NORMAL: // only in slow mode
1680 UNREACHABLE();
1681 break;
1682 }
1683 }
1684 } else {
1685 NameDictionary* dictionary = js_obj->property_dictionary();
1686 int length = dictionary->Capacity();
1687 for (int i = 0; i < length; ++i) {
1688 Object* k = dictionary->KeyAt(i);
1689 if (dictionary->IsKey(k)) {
1690 Object* target = dictionary->ValueAt(i);
1691 // We assume that global objects can only have slow properties.
1692 Object* value = target->IsPropertyCell()
1693 ? PropertyCell::cast(target)->value()
1694 : target;
1695 if (k == heap_->hidden_string()) {
1696 TagObject(value, "(hidden properties)");
1697 SetInternalReference(js_obj, entry, "hidden_properties", value);
1698 continue;
1699 }
1700 if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1701 SetPropertyReference(js_obj, entry, String::cast(k), value);
1702 }
1703 }
1704 }
1705}
1706
1707
1708bool V8HeapExplorer::ExtractAccessorPairProperty(
1709 JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1710 if (!callback_obj->IsAccessorPair()) return false;
1711 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1712 Object* getter = accessors->getter();
1713 if (!getter->IsOddball()) {
1714 SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1715 }
1716 Object* setter = accessors->setter();
1717 if (!setter->IsOddball()) {
1718 SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1719 }
1720 return true;
1721}
1722
1723
1724void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1725 if (js_obj->HasFastObjectElements()) {
1726 FixedArray* elements = FixedArray::cast(js_obj->elements());
1727 int length = js_obj->IsJSArray() ?
1728 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1729 elements->length();
1730 for (int i = 0; i < length; ++i) {
1731 if (!elements->get(i)->IsTheHole()) {
1732 SetElementReference(js_obj, entry, i, elements->get(i));
1733 }
1734 }
1735 } else if (js_obj->HasDictionaryElements()) {
1736 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1737 int length = dictionary->Capacity();
1738 for (int i = 0; i < length; ++i) {
1739 Object* k = dictionary->KeyAt(i);
1740 if (dictionary->IsKey(k)) {
1741 DCHECK(k->IsNumber());
1742 uint32_t index = static_cast<uint32_t>(k->Number());
1743 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1744 }
1745 }
1746 }
1747}
1748
1749
1750void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1751 int length = js_obj->GetInternalFieldCount();
1752 for (int i = 0; i < length; ++i) {
1753 Object* o = js_obj->GetInternalField(i);
1754 SetInternalReference(
1755 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1756 }
1757}
1758
1759
1760String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1761 Heap* heap = object->GetHeap();
1762 if (object->IsJSFunction()) return heap->closure_string();
1763 String* constructor_name = object->constructor_name();
1764 if (constructor_name == heap->Object_string()) {
1765 // TODO(verwaest): Try to get object.constructor.name in this case.
1766 // This requires handlification of the V8HeapExplorer.
1767 }
1768 return object->constructor_name();
1769}
1770
1771
1772HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1773 if (!obj->IsHeapObject()) return NULL;
1774 return filler_->FindOrAddEntry(obj, this);
1775}
1776
1777
1778class RootsReferencesExtractor : public ObjectVisitor {
1779 private:
1780 struct IndexTag {
1781 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1782 : index(index), tag(tag) { }
1783 int index;
1784 VisitorSynchronization::SyncTag tag;
1785 };
1786
1787 public:
1788 explicit RootsReferencesExtractor(Heap* heap)
1789 : collecting_all_references_(false),
1790 previous_reference_count_(0),
1791 heap_(heap) {
1792 }
1793
1794 void VisitPointers(Object** start, Object** end) {
1795 if (collecting_all_references_) {
1796 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1797 } else {
1798 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1799 }
1800 }
1801
1802 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1803
1804 void FillReferences(V8HeapExplorer* explorer) {
1805 DCHECK(strong_references_.length() <= all_references_.length());
1806 Builtins* builtins = heap_->isolate()->builtins();
1807 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1808 while (all_index < all_references_.length()) {
1809 bool is_strong = strong_index < strong_references_.length()
1810 && strong_references_[strong_index] == all_references_[all_index];
1811 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1812 !is_strong,
1813 all_references_[all_index]);
1814 if (reference_tags_[tags_index].tag ==
1815 VisitorSynchronization::kBuiltins) {
1816 DCHECK(all_references_[all_index]->IsCode());
1817 explorer->TagBuiltinCodeObject(
1818 Code::cast(all_references_[all_index]),
1819 builtins->name(builtin_index++));
1820 }
1821 ++all_index;
1822 if (is_strong) ++strong_index;
1823 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1824 }
1825 }
1826
1827 void Synchronize(VisitorSynchronization::SyncTag tag) {
1828 if (collecting_all_references_ &&
1829 previous_reference_count_ != all_references_.length()) {
1830 previous_reference_count_ = all_references_.length();
1831 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1832 }
1833 }
1834
1835 private:
1836 bool collecting_all_references_;
1837 List<Object*> strong_references_;
1838 List<Object*> all_references_;
1839 int previous_reference_count_;
1840 List<IndexTag> reference_tags_;
1841 Heap* heap_;
1842};
1843
1844
1845bool V8HeapExplorer::IterateAndExtractReferences(
1846 SnapshotFiller* filler) {
1847 filler_ = filler;
1848
1849 // Create references to the synthetic roots.
1850 SetRootGcRootsReference();
1851 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1852 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1853 }
1854
1855 // Make sure builtin code objects get their builtin tags
1856 // first. Otherwise a particular JSFunction object could set
1857 // its custom name to a generic builtin.
1858 RootsReferencesExtractor extractor(heap_);
1859 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1860 extractor.SetCollectingAllReferences();
1861 heap_->IterateRoots(&extractor, VISIT_ALL);
1862 extractor.FillReferences(this);
1863
1864 // We have to do two passes as sometimes FixedArrays are used
1865 // to weakly hold their items, and it's impossible to distinguish
1866 // between these cases without processing the array owner first.
1867 bool interrupted =
1868 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1869 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1870
1871 if (interrupted) {
1872 filler_ = NULL;
1873 return false;
1874 }
1875
1876 filler_ = NULL;
1877 return progress_->ProgressReport(true);
1878}
1879
1880
1881template<V8HeapExplorer::ExtractReferencesMethod extractor>
1882bool V8HeapExplorer::IterateAndExtractSinglePass() {
1883 // Now iterate the whole heap.
1884 bool interrupted = false;
1885 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1886 // Heap iteration with filtering must be finished in any case.
1887 for (HeapObject* obj = iterator.next();
1888 obj != NULL;
1889 obj = iterator.next(), progress_->ProgressStep()) {
1890 if (interrupted) continue;
1891
1892 HeapEntry* heap_entry = GetEntry(obj);
1893 int entry = heap_entry->index();
1894 if ((this->*extractor)(entry, obj)) {
1895 SetInternalReference(obj, entry,
1896 "map", obj->map(), HeapObject::kMapOffset);
1897 // Extract unvisited fields as hidden references and restore tags
1898 // of visited fields.
1899 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1900 obj->Iterate(&refs_extractor);
1901 }
1902
1903 if (!progress_->ProgressReport(false)) interrupted = true;
1904 }
1905 return interrupted;
1906}
1907
1908
1909bool V8HeapExplorer::IsEssentialObject(Object* object) {
1910 return object->IsHeapObject()
1911 && !object->IsOddball()
1912 && object != heap_->empty_byte_array()
1913 && object != heap_->empty_fixed_array()
1914 && object != heap_->empty_descriptor_array()
1915 && object != heap_->fixed_array_map()
1916 && object != heap_->cell_map()
1917 && object != heap_->global_property_cell_map()
1918 && object != heap_->shared_function_info_map()
1919 && object != heap_->free_space_map()
1920 && object != heap_->one_pointer_filler_map()
1921 && object != heap_->two_pointer_filler_map();
1922}
1923
1924
1925void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1926 int parent_entry,
1927 String* reference_name,
1928 Object* child_obj,
1929 int field_offset) {
1930 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1931 HeapEntry* child_entry = GetEntry(child_obj);
1932 if (child_entry != NULL) {
1933 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1934 parent_entry,
1935 names_->GetName(reference_name),
1936 child_entry);
1937 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1938 }
1939}
1940
1941
1942void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1943 int parent_entry,
1944 const char* reference_name,
1945 Object* child_obj) {
1946 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1947 HeapEntry* child_entry = GetEntry(child_obj);
1948 if (child_entry != NULL) {
1949 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1950 parent_entry,
1951 reference_name,
1952 child_entry);
1953 }
1954}
1955
1956
1957void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1958 int parent_entry,
1959 int index,
1960 Object* child_obj) {
1961 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1962 HeapEntry* child_entry = GetEntry(child_obj);
1963 if (child_entry != NULL) {
1964 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1965 parent_entry,
1966 index,
1967 child_entry);
1968 }
1969}
1970
1971
1972void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1973 int parent_entry,
1974 const char* reference_name,
1975 Object* child_obj,
1976 int field_offset) {
1977 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1978 HeapEntry* child_entry = GetEntry(child_obj);
1979 if (child_entry == NULL) return;
1980 if (IsEssentialObject(child_obj)) {
1981 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1982 parent_entry,
1983 reference_name,
1984 child_entry);
1985 }
1986 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1987}
1988
1989
1990void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1991 int parent_entry,
1992 int index,
1993 Object* child_obj,
1994 int field_offset) {
1995 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1996 HeapEntry* child_entry = GetEntry(child_obj);
1997 if (child_entry == NULL) return;
1998 if (IsEssentialObject(child_obj)) {
1999 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2000 parent_entry,
2001 names_->GetName(index),
2002 child_entry);
2003 }
2004 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2005}
2006
2007
2008void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2009 int parent_entry,
2010 int index,
2011 Object* child_obj) {
2012 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2013 HeapEntry* child_entry = GetEntry(child_obj);
2014 if (child_entry != NULL && IsEssentialObject(child_obj)) {
2015 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2016 parent_entry,
2017 index,
2018 child_entry);
2019 }
2020}
2021
2022
2023void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2024 int parent_entry,
2025 const char* reference_name,
2026 Object* child_obj,
2027 int field_offset) {
2028 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2029 HeapEntry* child_entry = GetEntry(child_obj);
2030 if (child_entry == NULL) return;
2031 if (IsEssentialObject(child_obj)) {
2032 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2033 parent_entry,
2034 reference_name,
2035 child_entry);
2036 }
2037 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2038}
2039
2040
2041void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2042 int parent_entry,
2043 int index,
2044 Object* child_obj,
2045 int field_offset) {
2046 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2047 HeapEntry* child_entry = GetEntry(child_obj);
2048 if (child_entry == NULL) return;
2049 if (IsEssentialObject(child_obj)) {
2050 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2051 parent_entry,
2052 names_->GetFormatted("%d", index),
2053 child_entry);
2054 }
2055 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2056}
2057
2058
2059void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2060 int parent_entry,
2061 Name* reference_name,
2062 Object* child_obj,
2063 const char* name_format_string,
2064 int field_offset) {
2065 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2066 HeapEntry* child_entry = GetEntry(child_obj);
2067 if (child_entry != NULL) {
2068 HeapGraphEdge::Type type =
2069 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2070 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2071 const char* name = name_format_string != NULL && reference_name->IsString()
2072 ? names_->GetFormatted(
2073 name_format_string,
2074 String::cast(reference_name)->ToCString(
2075 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2076 names_->GetName(reference_name);
2077
2078 filler_->SetNamedReference(type,
2079 parent_entry,
2080 name,
2081 child_entry);
2082 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2083 }
2084}
2085
2086
2087void V8HeapExplorer::SetRootGcRootsReference() {
2088 filler_->SetIndexedAutoIndexReference(
2089 HeapGraphEdge::kElement,
2090 snapshot_->root()->index(),
2091 snapshot_->gc_roots());
2092}
2093
2094
2095void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2096 HeapEntry* child_entry = GetEntry(child_obj);
2097 DCHECK(child_entry != NULL);
2098 filler_->SetNamedAutoIndexReference(
2099 HeapGraphEdge::kShortcut,
2100 snapshot_->root()->index(),
2101 child_entry);
2102}
2103
2104
2105void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2106 filler_->SetIndexedAutoIndexReference(
2107 HeapGraphEdge::kElement,
2108 snapshot_->gc_roots()->index(),
2109 snapshot_->gc_subroot(tag));
2110}
2111
2112
2113void V8HeapExplorer::SetGcSubrootReference(
2114 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2115 HeapEntry* child_entry = GetEntry(child_obj);
2116 if (child_entry != NULL) {
2117 const char* name = GetStrongGcSubrootName(child_obj);
2118 if (name != NULL) {
2119 filler_->SetNamedReference(
2120 HeapGraphEdge::kInternal,
2121 snapshot_->gc_subroot(tag)->index(),
2122 name,
2123 child_entry);
2124 } else {
2125 if (is_weak) {
2126 filler_->SetNamedAutoIndexReference(
2127 HeapGraphEdge::kWeak,
2128 snapshot_->gc_subroot(tag)->index(),
2129 child_entry);
2130 } else {
2131 filler_->SetIndexedAutoIndexReference(
2132 HeapGraphEdge::kElement,
2133 snapshot_->gc_subroot(tag)->index(),
2134 child_entry);
2135 }
2136 }
2137
2138 // Add a shortcut to JS global object reference at snapshot root.
2139 if (child_obj->IsNativeContext()) {
2140 Context* context = Context::cast(child_obj);
2141 GlobalObject* global = context->global_object();
2142 if (global->IsJSGlobalObject()) {
2143 bool is_debug_object = false;
2144 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2145 if (!is_debug_object && !user_roots_.Contains(global)) {
2146 user_roots_.Insert(global);
2147 SetUserGlobalReference(global);
2148 }
2149 }
2150 }
2151 }
2152}
2153
2154
2155const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2156 if (strong_gc_subroot_names_.is_empty()) {
2157#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2158#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2159 STRONG_ROOT_LIST(ROOT_NAME)
2160#undef ROOT_NAME
2161#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2162 STRUCT_LIST(STRUCT_MAP_NAME)
2163#undef STRUCT_MAP_NAME
2164#define STRING_NAME(name, str) NAME_ENTRY(name)
2165 INTERNALIZED_STRING_LIST(STRING_NAME)
2166#undef STRING_NAME
2167#undef NAME_ENTRY
2168 CHECK(!strong_gc_subroot_names_.is_empty());
2169 }
2170 return strong_gc_subroot_names_.GetTag(object);
2171}
2172
2173
2174void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2175 if (IsEssentialObject(obj)) {
2176 HeapEntry* entry = GetEntry(obj);
2177 if (entry->name()[0] == '\0') {
2178 entry->set_name(tag);
2179 }
2180 }
2181}
2182
2183
2184void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2185 if (IsEssentialObject(object) && object->IsFixedArray()) {
2186 weak_containers_.Insert(object);
2187 }
2188}
2189
2190
2191class GlobalObjectsEnumerator : public ObjectVisitor {
2192 public:
2193 virtual void VisitPointers(Object** start, Object** end) {
2194 for (Object** p = start; p < end; p++) {
2195 if ((*p)->IsNativeContext()) {
2196 Context* context = Context::cast(*p);
2197 JSObject* proxy = context->global_proxy();
2198 if (proxy->IsJSGlobalProxy()) {
2199 Object* global = proxy->map()->prototype();
2200 if (global->IsJSGlobalObject()) {
2201 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2202 }
2203 }
2204 }
2205 }
2206 }
2207 int count() { return objects_.length(); }
2208 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2209
2210 private:
2211 List<Handle<JSGlobalObject> > objects_;
2212};
2213
2214
2215// Modifies heap. Must not be run during heap traversal.
2216void V8HeapExplorer::TagGlobalObjects() {
2217 Isolate* isolate = heap_->isolate();
2218 HandleScope scope(isolate);
2219 GlobalObjectsEnumerator enumerator;
2220 isolate->global_handles()->IterateAllRoots(&enumerator);
2221 const char** urls = NewArray<const char*>(enumerator.count());
2222 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2223 if (global_object_name_resolver_) {
2224 HandleScope scope(isolate);
2225 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2226 urls[i] = global_object_name_resolver_->GetName(
2227 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2228 } else {
2229 urls[i] = NULL;
2230 }
2231 }
2232
2233 DisallowHeapAllocation no_allocation;
2234 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2235 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2236 }
2237
2238 DeleteArray(urls);
2239}
2240
2241
2242class GlobalHandlesExtractor : public ObjectVisitor {
2243 public:
2244 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2245 : explorer_(explorer) {}
2246 virtual ~GlobalHandlesExtractor() {}
2247 virtual void VisitPointers(Object** start, Object** end) {
2248 UNREACHABLE();
2249 }
2250 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2251 explorer_->VisitSubtreeWrapper(p, class_id);
2252 }
2253 private:
2254 NativeObjectsExplorer* explorer_;
2255};
2256
2257
2258class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2259 public:
2260 BasicHeapEntriesAllocator(
2261 HeapSnapshot* snapshot,
2262 HeapEntry::Type entries_type)
2263 : snapshot_(snapshot),
2264 names_(snapshot_->profiler()->names()),
2265 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2266 entries_type_(entries_type) {
2267 }
2268 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2269 private:
2270 HeapSnapshot* snapshot_;
2271 StringsStorage* names_;
2272 HeapObjectsMap* heap_object_map_;
2273 HeapEntry::Type entries_type_;
2274};
2275
2276
2277HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2278 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2279 intptr_t elements = info->GetElementCount();
2280 intptr_t size = info->GetSizeInBytes();
2281 const char* name = elements != -1
2282 ? names_->GetFormatted(
2283 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2284 : names_->GetCopy(info->GetLabel());
2285 return snapshot_->AddEntry(
2286 entries_type_,
2287 name,
2288 heap_object_map_->GenerateId(info),
2289 size != -1 ? static_cast<int>(size) : 0,
2290 0);
2291}
2292
2293
2294NativeObjectsExplorer::NativeObjectsExplorer(
2295 HeapSnapshot* snapshot,
2296 SnapshottingProgressReportingInterface* progress)
2297 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2298 snapshot_(snapshot),
2299 names_(snapshot_->profiler()->names()),
2300 progress_(progress),
2301 embedder_queried_(false),
2302 objects_by_info_(RetainedInfosMatch),
2303 native_groups_(StringsMatch),
2304 filler_(NULL) {
2305 synthetic_entries_allocator_ =
2306 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2307 native_entries_allocator_ =
2308 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2309}
2310
2311
2312NativeObjectsExplorer::~NativeObjectsExplorer() {
2313 for (HashMap::Entry* p = objects_by_info_.Start();
2314 p != NULL;
2315 p = objects_by_info_.Next(p)) {
2316 v8::RetainedObjectInfo* info =
2317 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2318 info->Dispose();
2319 List<HeapObject*>* objects =
2320 reinterpret_cast<List<HeapObject*>* >(p->value);
2321 delete objects;
2322 }
2323 for (HashMap::Entry* p = native_groups_.Start();
2324 p != NULL;
2325 p = native_groups_.Next(p)) {
2326 v8::RetainedObjectInfo* info =
2327 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2328 info->Dispose();
2329 }
2330 delete synthetic_entries_allocator_;
2331 delete native_entries_allocator_;
2332}
2333
2334
2335int NativeObjectsExplorer::EstimateObjectsCount() {
2336 FillRetainedObjects();
2337 return objects_by_info_.occupancy();
2338}
2339
2340
2341void NativeObjectsExplorer::FillRetainedObjects() {
2342 if (embedder_queried_) return;
2343 Isolate* isolate = isolate_;
2344 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2345 // Record objects that are joined into ObjectGroups.
2346 isolate->heap()->CallGCPrologueCallbacks(
2347 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2348 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2349 for (int i = 0; i < groups->length(); ++i) {
2350 ObjectGroup* group = groups->at(i);
2351 if (group->info == NULL) continue;
2352 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2353 for (size_t j = 0; j < group->length; ++j) {
2354 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2355 list->Add(obj);
2356 in_groups_.Insert(obj);
2357 }
2358 group->info = NULL; // Acquire info object ownership.
2359 }
2360 isolate->global_handles()->RemoveObjectGroups();
2361 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2362 // Record objects that are not in ObjectGroups, but have class ID.
2363 GlobalHandlesExtractor extractor(this);
2364 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2365 embedder_queried_ = true;
2366}
2367
2368
2369void NativeObjectsExplorer::FillImplicitReferences() {
2370 Isolate* isolate = isolate_;
2371 List<ImplicitRefGroup*>* groups =
2372 isolate->global_handles()->implicit_ref_groups();
2373 for (int i = 0; i < groups->length(); ++i) {
2374 ImplicitRefGroup* group = groups->at(i);
2375 HeapObject* parent = *group->parent;
2376 int parent_entry =
2377 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2378 DCHECK(parent_entry != HeapEntry::kNoEntry);
2379 Object*** children = group->children;
2380 for (size_t j = 0; j < group->length; ++j) {
2381 Object* child = *children[j];
2382 HeapEntry* child_entry =
2383 filler_->FindOrAddEntry(child, native_entries_allocator_);
2384 filler_->SetNamedReference(
2385 HeapGraphEdge::kInternal,
2386 parent_entry,
2387 "native",
2388 child_entry);
2389 }
2390 }
2391 isolate->global_handles()->RemoveImplicitRefGroups();
2392}
2393
2394List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2395 v8::RetainedObjectInfo* info) {
2396 HashMap::Entry* entry =
2397 objects_by_info_.Lookup(info, InfoHash(info), true);
2398 if (entry->value != NULL) {
2399 info->Dispose();
2400 } else {
2401 entry->value = new List<HeapObject*>(4);
2402 }
2403 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2404}
2405
2406
2407bool NativeObjectsExplorer::IterateAndExtractReferences(
2408 SnapshotFiller* filler) {
2409 filler_ = filler;
2410 FillRetainedObjects();
2411 FillImplicitReferences();
2412 if (EstimateObjectsCount() > 0) {
2413 for (HashMap::Entry* p = objects_by_info_.Start();
2414 p != NULL;
2415 p = objects_by_info_.Next(p)) {
2416 v8::RetainedObjectInfo* info =
2417 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2418 SetNativeRootReference(info);
2419 List<HeapObject*>* objects =
2420 reinterpret_cast<List<HeapObject*>* >(p->value);
2421 for (int i = 0; i < objects->length(); ++i) {
2422 SetWrapperNativeReferences(objects->at(i), info);
2423 }
2424 }
2425 SetRootNativeRootsReference();
2426 }
2427 filler_ = NULL;
2428 return true;
2429}
2430
2431
2432class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2433 public:
2434 explicit NativeGroupRetainedObjectInfo(const char* label)
2435 : disposed_(false),
2436 hash_(reinterpret_cast<intptr_t>(label)),
2437 label_(label) {
2438 }
2439
2440 virtual ~NativeGroupRetainedObjectInfo() {}
2441 virtual void Dispose() {
2442 CHECK(!disposed_);
2443 disposed_ = true;
2444 delete this;
2445 }
2446 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2447 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2448 }
2449 virtual intptr_t GetHash() { return hash_; }
2450 virtual const char* GetLabel() { return label_; }
2451
2452 private:
2453 bool disposed_;
2454 intptr_t hash_;
2455 const char* label_;
2456};
2457
2458
2459NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2460 const char* label) {
2461 const char* label_copy = names_->GetCopy(label);
2462 uint32_t hash = StringHasher::HashSequentialString(
2463 label_copy,
2464 static_cast<int>(strlen(label_copy)),
2465 isolate_->heap()->HashSeed());
2466 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2467 hash, true);
2468 if (entry->value == NULL) {
2469 entry->value = new NativeGroupRetainedObjectInfo(label);
2470 }
2471 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2472}
2473
2474
2475void NativeObjectsExplorer::SetNativeRootReference(
2476 v8::RetainedObjectInfo* info) {
2477 HeapEntry* child_entry =
2478 filler_->FindOrAddEntry(info, native_entries_allocator_);
2479 DCHECK(child_entry != NULL);
2480 NativeGroupRetainedObjectInfo* group_info =
2481 FindOrAddGroupInfo(info->GetGroupLabel());
2482 HeapEntry* group_entry =
2483 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2484 filler_->SetNamedAutoIndexReference(
2485 HeapGraphEdge::kInternal,
2486 group_entry->index(),
2487 child_entry);
2488}
2489
2490
2491void NativeObjectsExplorer::SetWrapperNativeReferences(
2492 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2493 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2494 DCHECK(wrapper_entry != NULL);
2495 HeapEntry* info_entry =
2496 filler_->FindOrAddEntry(info, native_entries_allocator_);
2497 DCHECK(info_entry != NULL);
2498 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2499 wrapper_entry->index(),
2500 "native",
2501 info_entry);
2502 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2503 info_entry->index(),
2504 wrapper_entry);
2505}
2506
2507
2508void NativeObjectsExplorer::SetRootNativeRootsReference() {
2509 for (HashMap::Entry* entry = native_groups_.Start();
2510 entry;
2511 entry = native_groups_.Next(entry)) {
2512 NativeGroupRetainedObjectInfo* group_info =
2513 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2514 HeapEntry* group_entry =
2515 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2516 DCHECK(group_entry != NULL);
2517 filler_->SetIndexedAutoIndexReference(
2518 HeapGraphEdge::kElement,
2519 snapshot_->root()->index(),
2520 group_entry);
2521 }
2522}
2523
2524
2525void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2526 if (in_groups_.Contains(*p)) return;
2527 Isolate* isolate = isolate_;
2528 v8::RetainedObjectInfo* info =
2529 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2530 if (info == NULL) return;
2531 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2532}
2533
2534
2535HeapSnapshotGenerator::HeapSnapshotGenerator(
2536 HeapSnapshot* snapshot,
2537 v8::ActivityControl* control,
2538 v8::HeapProfiler::ObjectNameResolver* resolver,
2539 Heap* heap)
2540 : snapshot_(snapshot),
2541 control_(control),
2542 v8_heap_explorer_(snapshot_, this, resolver),
2543 dom_explorer_(snapshot_, this),
2544 heap_(heap) {
2545}
2546
2547
2548bool HeapSnapshotGenerator::GenerateSnapshot() {
2549 v8_heap_explorer_.TagGlobalObjects();
2550
2551 // TODO(1562) Profiler assumes that any object that is in the heap after
2552 // full GC is reachable from the root when computing dominators.
2553 // This is not true for weakly reachable objects.
2554 // As a temporary solution we call GC twice.
2555 heap_->CollectAllGarbage(
2556 Heap::kMakeHeapIterableMask,
2557 "HeapSnapshotGenerator::GenerateSnapshot");
2558 heap_->CollectAllGarbage(
2559 Heap::kMakeHeapIterableMask,
2560 "HeapSnapshotGenerator::GenerateSnapshot");
2561
2562#ifdef VERIFY_HEAP
2563 Heap* debug_heap = heap_;
2564 debug_heap->Verify();
2565#endif
2566
2567 SetProgressTotal(2); // 2 passes.
2568
2569#ifdef VERIFY_HEAP
2570 debug_heap->Verify();
2571#endif
2572
2573 snapshot_->AddSyntheticRootEntries();
2574
2575 if (!FillReferences()) return false;
2576
2577 snapshot_->FillChildren();
2578 snapshot_->RememberLastJSObjectId();
2579
2580 progress_counter_ = progress_total_;
2581 if (!ProgressReport(true)) return false;
2582 return true;
2583}
2584
2585
2586void HeapSnapshotGenerator::ProgressStep() {
2587 ++progress_counter_;
2588}
2589
2590
2591bool HeapSnapshotGenerator::ProgressReport(bool force) {
2592 const int kProgressReportGranularity = 10000;
2593 if (control_ != NULL
2594 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2595 return
2596 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2597 v8::ActivityControl::kContinue;
2598 }
2599 return true;
2600}
2601
2602
2603void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2604 if (control_ == NULL) return;
2605 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2606 progress_total_ = iterations_count * (
2607 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2608 dom_explorer_.EstimateObjectsCount());
2609 progress_counter_ = 0;
2610}
2611
2612
2613bool HeapSnapshotGenerator::FillReferences() {
2614 SnapshotFiller filler(snapshot_, &entries_);
2615 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2616 && dom_explorer_.IterateAndExtractReferences(&filler);
2617}
2618
2619
2620template<int bytes> struct MaxDecimalDigitsIn;
2621template<> struct MaxDecimalDigitsIn<4> {
2622 static const int kSigned = 11;
2623 static const int kUnsigned = 10;
2624};
2625template<> struct MaxDecimalDigitsIn<8> {
2626 static const int kSigned = 20;
2627 static const int kUnsigned = 20;
2628};
2629
2630
2631class OutputStreamWriter {
2632 public:
2633 explicit OutputStreamWriter(v8::OutputStream* stream)
2634 : stream_(stream),
2635 chunk_size_(stream->GetChunkSize()),
2636 chunk_(chunk_size_),
2637 chunk_pos_(0),
2638 aborted_(false) {
2639 DCHECK(chunk_size_ > 0);
2640 }
2641 bool aborted() { return aborted_; }
2642 void AddCharacter(char c) {
2643 DCHECK(c != '\0');
2644 DCHECK(chunk_pos_ < chunk_size_);
2645 chunk_[chunk_pos_++] = c;
2646 MaybeWriteChunk();
2647 }
2648 void AddString(const char* s) {
2649 AddSubstring(s, StrLength(s));
2650 }
2651 void AddSubstring(const char* s, int n) {
2652 if (n <= 0) return;
2653 DCHECK(static_cast<size_t>(n) <= strlen(s));
2654 const char* s_end = s + n;
2655 while (s < s_end) {
2656 int s_chunk_size =
2657 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2658 DCHECK(s_chunk_size > 0);
2659 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2660 s += s_chunk_size;
2661 chunk_pos_ += s_chunk_size;
2662 MaybeWriteChunk();
2663 }
2664 }
2665 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2666 void Finalize() {
2667 if (aborted_) return;
2668 DCHECK(chunk_pos_ < chunk_size_);
2669 if (chunk_pos_ != 0) {
2670 WriteChunk();
2671 }
2672 stream_->EndOfStream();
2673 }
2674
2675 private:
2676 template<typename T>
2677 void AddNumberImpl(T n, const char* format) {
2678 // Buffer for the longest value plus trailing \0
2679 static const int kMaxNumberSize =
2680 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2681 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2682 int result = SNPrintF(
2683 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2684 DCHECK(result != -1);
2685 chunk_pos_ += result;
2686 MaybeWriteChunk();
2687 } else {
2688 EmbeddedVector<char, kMaxNumberSize> buffer;
2689 int result = SNPrintF(buffer, format, n);
2690 USE(result);
2691 DCHECK(result != -1);
2692 AddString(buffer.start());
2693 }
2694 }
2695 void MaybeWriteChunk() {
2696 DCHECK(chunk_pos_ <= chunk_size_);
2697 if (chunk_pos_ == chunk_size_) {
2698 WriteChunk();
2699 }
2700 }
2701 void WriteChunk() {
2702 if (aborted_) return;
2703 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2704 v8::OutputStream::kAbort) aborted_ = true;
2705 chunk_pos_ = 0;
2706 }
2707
2708 v8::OutputStream* stream_;
2709 int chunk_size_;
2710 ScopedVector<char> chunk_;
2711 int chunk_pos_;
2712 bool aborted_;
2713};
2714
2715
2716// type, name|index, to_node.
2717const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2718// type, name, id, self_size, edge_count, trace_node_id.
2719const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2720
2721void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2722 if (AllocationTracker* allocation_tracker =
2723 snapshot_->profiler()->allocation_tracker()) {
2724 allocation_tracker->PrepareForSerialization();
2725 }
2726 DCHECK(writer_ == NULL);
2727 writer_ = new OutputStreamWriter(stream);
2728 SerializeImpl();
2729 delete writer_;
2730 writer_ = NULL;
2731}
2732
2733
2734void HeapSnapshotJSONSerializer::SerializeImpl() {
2735 DCHECK(0 == snapshot_->root()->index());
2736 writer_->AddCharacter('{');
2737 writer_->AddString("\"snapshot\":{");
2738 SerializeSnapshot();
2739 if (writer_->aborted()) return;
2740 writer_->AddString("},\n");
2741 writer_->AddString("\"nodes\":[");
2742 SerializeNodes();
2743 if (writer_->aborted()) return;
2744 writer_->AddString("],\n");
2745 writer_->AddString("\"edges\":[");
2746 SerializeEdges();
2747 if (writer_->aborted()) return;
2748 writer_->AddString("],\n");
2749
2750 writer_->AddString("\"trace_function_infos\":[");
2751 SerializeTraceNodeInfos();
2752 if (writer_->aborted()) return;
2753 writer_->AddString("],\n");
2754 writer_->AddString("\"trace_tree\":[");
2755 SerializeTraceTree();
2756 if (writer_->aborted()) return;
2757 writer_->AddString("],\n");
2758
2759 writer_->AddString("\"strings\":[");
2760 SerializeStrings();
2761 if (writer_->aborted()) return;
2762 writer_->AddCharacter(']');
2763 writer_->AddCharacter('}');
2764 writer_->Finalize();
2765}
2766
2767
2768int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2769 HashMap::Entry* cache_entry = strings_.Lookup(
2770 const_cast<char*>(s), StringHash(s), true);
2771 if (cache_entry->value == NULL) {
2772 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2773 }
2774 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2775}
2776
2777
2778namespace {
2779
2780template<size_t size> struct ToUnsigned;
2781
2782template<> struct ToUnsigned<4> {
2783 typedef uint32_t Type;
2784};
2785
2786template<> struct ToUnsigned<8> {
2787 typedef uint64_t Type;
2788};
2789
2790} // namespace
2791
2792
2793template<typename T>
2794static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2795 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2796 int number_of_digits = 0;
2797 T t = value;
2798 do {
2799 ++number_of_digits;
2800 } while (t /= 10);
2801
2802 buffer_pos += number_of_digits;
2803 int result = buffer_pos;
2804 do {
2805 int last_digit = static_cast<int>(value % 10);
2806 buffer[--buffer_pos] = '0' + last_digit;
2807 value /= 10;
2808 } while (value);
2809 return result;
2810}
2811
2812
2813template<typename T>
2814static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2815 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2816 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2817 return utoa_impl(unsigned_value, buffer, buffer_pos);
2818}
2819
2820
2821void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2822 bool first_edge) {
2823 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2824 static const int kBufferSize =
2825 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2826 EmbeddedVector<char, kBufferSize> buffer;
2827 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2828 || edge->type() == HeapGraphEdge::kHidden
2829 ? edge->index() : GetStringId(edge->name());
2830 int buffer_pos = 0;
2831 if (!first_edge) {
2832 buffer[buffer_pos++] = ',';
2833 }
2834 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2835 buffer[buffer_pos++] = ',';
2836 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2837 buffer[buffer_pos++] = ',';
2838 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2839 buffer[buffer_pos++] = '\n';
2840 buffer[buffer_pos++] = '\0';
2841 writer_->AddString(buffer.start());
2842}
2843
2844
2845void HeapSnapshotJSONSerializer::SerializeEdges() {
2846 List<HeapGraphEdge*>& edges = snapshot_->children();
2847 for (int i = 0; i < edges.length(); ++i) {
2848 DCHECK(i == 0 ||
2849 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2850 SerializeEdge(edges[i], i == 0);
2851 if (writer_->aborted()) return;
2852 }
2853}
2854
2855
2856void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2857 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2858 static const int kBufferSize =
2859 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2860 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2861 + 6 + 1 + 1;
2862 EmbeddedVector<char, kBufferSize> buffer;
2863 int buffer_pos = 0;
2864 if (entry_index(entry) != 0) {
2865 buffer[buffer_pos++] = ',';
2866 }
2867 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2868 buffer[buffer_pos++] = ',';
2869 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2870 buffer[buffer_pos++] = ',';
2871 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2872 buffer[buffer_pos++] = ',';
2873 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2874 buffer[buffer_pos++] = ',';
2875 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2876 buffer[buffer_pos++] = ',';
2877 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2878 buffer[buffer_pos++] = '\n';
2879 buffer[buffer_pos++] = '\0';
2880 writer_->AddString(buffer.start());
2881}
2882
2883
2884void HeapSnapshotJSONSerializer::SerializeNodes() {
2885 List<HeapEntry>& entries = snapshot_->entries();
2886 for (int i = 0; i < entries.length(); ++i) {
2887 SerializeNode(&entries[i]);
2888 if (writer_->aborted()) return;
2889 }
2890}
2891
2892
2893void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2894 writer_->AddString("\"title\":\"");
2895 writer_->AddString(snapshot_->title());
2896 writer_->AddString("\"");
2897 writer_->AddString(",\"uid\":");
2898 writer_->AddNumber(snapshot_->uid());
2899 writer_->AddString(",\"meta\":");
2900 // The object describing node serialization layout.
2901 // We use a set of macros to improve readability.
2902#define JSON_A(s) "[" s "]"
2903#define JSON_O(s) "{" s "}"
2904#define JSON_S(s) "\"" s "\""
2905 writer_->AddString(JSON_O(
2906 JSON_S("node_fields") ":" JSON_A(
2907 JSON_S("type") ","
2908 JSON_S("name") ","
2909 JSON_S("id") ","
2910 JSON_S("self_size") ","
2911 JSON_S("edge_count") ","
2912 JSON_S("trace_node_id")) ","
2913 JSON_S("node_types") ":" JSON_A(
2914 JSON_A(
2915 JSON_S("hidden") ","
2916 JSON_S("array") ","
2917 JSON_S("string") ","
2918 JSON_S("object") ","
2919 JSON_S("code") ","
2920 JSON_S("closure") ","
2921 JSON_S("regexp") ","
2922 JSON_S("number") ","
2923 JSON_S("native") ","
2924 JSON_S("synthetic") ","
2925 JSON_S("concatenated string") ","
2926 JSON_S("sliced string")) ","
2927 JSON_S("string") ","
2928 JSON_S("number") ","
2929 JSON_S("number") ","
2930 JSON_S("number") ","
2931 JSON_S("number") ","
2932 JSON_S("number")) ","
2933 JSON_S("edge_fields") ":" JSON_A(
2934 JSON_S("type") ","
2935 JSON_S("name_or_index") ","
2936 JSON_S("to_node")) ","
2937 JSON_S("edge_types") ":" JSON_A(
2938 JSON_A(
2939 JSON_S("context") ","
2940 JSON_S("element") ","
2941 JSON_S("property") ","
2942 JSON_S("internal") ","
2943 JSON_S("hidden") ","
2944 JSON_S("shortcut") ","
2945 JSON_S("weak")) ","
2946 JSON_S("string_or_number") ","
2947 JSON_S("node")) ","
2948 JSON_S("trace_function_info_fields") ":" JSON_A(
2949 JSON_S("function_id") ","
2950 JSON_S("name") ","
2951 JSON_S("script_name") ","
2952 JSON_S("script_id") ","
2953 JSON_S("line") ","
2954 JSON_S("column")) ","
2955 JSON_S("trace_node_fields") ":" JSON_A(
2956 JSON_S("id") ","
2957 JSON_S("function_info_index") ","
2958 JSON_S("count") ","
2959 JSON_S("size") ","
2960 JSON_S("children"))));
2961#undef JSON_S
2962#undef JSON_O
2963#undef JSON_A
2964 writer_->AddString(",\"node_count\":");
2965 writer_->AddNumber(snapshot_->entries().length());
2966 writer_->AddString(",\"edge_count\":");
2967 writer_->AddNumber(snapshot_->edges().length());
2968 writer_->AddString(",\"trace_function_count\":");
2969 uint32_t count = 0;
2970 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2971 if (tracker) {
2972 count = tracker->function_info_list().length();
2973 }
2974 writer_->AddNumber(count);
2975}
2976
2977
2978static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2979 static const char hex_chars[] = "0123456789ABCDEF";
2980 w->AddString("\\u");
2981 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2982 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2983 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2984 w->AddCharacter(hex_chars[u & 0xf]);
2985}
2986
2987
2988void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2989 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2990 if (!tracker) return;
2991 AllocationTraceTree* traces = tracker->trace_tree();
2992 SerializeTraceNode(traces->root());
2993}
2994
2995
2996void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2997 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2998 const int kBufferSize =
2999 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3000 + 4 + 1 + 1;
3001 EmbeddedVector<char, kBufferSize> buffer;
3002 int buffer_pos = 0;
3003 buffer_pos = utoa(node->id(), buffer, buffer_pos);
3004 buffer[buffer_pos++] = ',';
3005 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3006 buffer[buffer_pos++] = ',';
3007 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3008 buffer[buffer_pos++] = ',';
3009 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3010 buffer[buffer_pos++] = ',';
3011 buffer[buffer_pos++] = '[';
3012 buffer[buffer_pos++] = '\0';
3013 writer_->AddString(buffer.start());
3014
3015 Vector<AllocationTraceNode*> children = node->children();
3016 for (int i = 0; i < children.length(); i++) {
3017 if (i > 0) {
3018 writer_->AddCharacter(',');
3019 }
3020 SerializeTraceNode(children[i]);
3021 }
3022 writer_->AddCharacter(']');
3023}
3024
3025
3026// 0-based position is converted to 1-based during the serialization.
3027static int SerializePosition(int position, const Vector<char>& buffer,
3028 int buffer_pos) {
3029 if (position == -1) {
3030 buffer[buffer_pos++] = '0';
3031 } else {
3032 DCHECK(position >= 0);
3033 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3034 }
3035 return buffer_pos;
3036}
3037
3038
3039void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3040 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3041 if (!tracker) return;
3042 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3043 const int kBufferSize =
3044 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3045 + 6 + 1 + 1;
3046 EmbeddedVector<char, kBufferSize> buffer;
3047 const List<AllocationTracker::FunctionInfo*>& list =
3048 tracker->function_info_list();
3049 bool first_entry = true;
3050 for (int i = 0; i < list.length(); i++) {
3051 AllocationTracker::FunctionInfo* info = list[i];
3052 int buffer_pos = 0;
3053 if (first_entry) {
3054 first_entry = false;
3055 } else {
3056 buffer[buffer_pos++] = ',';
3057 }
3058 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3059 buffer[buffer_pos++] = ',';
3060 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3061 buffer[buffer_pos++] = ',';
3062 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3063 buffer[buffer_pos++] = ',';
3064 // The cast is safe because script id is a non-negative Smi.
3065 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3066 buffer_pos);
3067 buffer[buffer_pos++] = ',';
3068 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3069 buffer[buffer_pos++] = ',';
3070 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3071 buffer[buffer_pos++] = '\n';
3072 buffer[buffer_pos++] = '\0';
3073 writer_->AddString(buffer.start());
3074 }
3075}
3076
3077
3078void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3079 writer_->AddCharacter('\n');
3080 writer_->AddCharacter('\"');
3081 for ( ; *s != '\0'; ++s) {
3082 switch (*s) {
3083 case '\b':
3084 writer_->AddString("\\b");
3085 continue;
3086 case '\f':
3087 writer_->AddString("\\f");
3088 continue;
3089 case '\n':
3090 writer_->AddString("\\n");
3091 continue;
3092 case '\r':
3093 writer_->AddString("\\r");
3094 continue;
3095 case '\t':
3096 writer_->AddString("\\t");
3097 continue;
3098 case '\"':
3099 case '\\':
3100 writer_->AddCharacter('\\');
3101 writer_->AddCharacter(*s);
3102 continue;
3103 default:
3104 if (*s > 31 && *s < 128) {
3105 writer_->AddCharacter(*s);
3106 } else if (*s <= 31) {
3107 // Special character with no dedicated literal.
3108 WriteUChar(writer_, *s);
3109 } else {
3110 // Convert UTF-8 into \u UTF-16 literal.
3111 unsigned length = 1, cursor = 0;
3112 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3113 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3114 if (c != unibrow::Utf8::kBadChar) {
3115 WriteUChar(writer_, c);
3116 DCHECK(cursor != 0);
3117 s += cursor - 1;
3118 } else {
3119 writer_->AddCharacter('?');
3120 }
3121 }
3122 }
3123 }
3124 writer_->AddCharacter('\"');
3125}
3126
3127
3128void HeapSnapshotJSONSerializer::SerializeStrings() {
3129 ScopedVector<const unsigned char*> sorted_strings(
3130 strings_.occupancy() + 1);
3131 for (HashMap::Entry* entry = strings_.Start();
3132 entry != NULL;
3133 entry = strings_.Next(entry)) {
3134 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3135 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3136 }
3137 writer_->AddString("\"<dummy>\"");
3138 for (int i = 1; i < sorted_strings.length(); ++i) {
3139 writer_->AddCharacter(',');
3140 SerializeString(sorted_strings[i]);
3141 if (writer_->aborted()) return;
3142 }
3143}
3144
3145
3146} } // namespace v8::internal