blob: c80877f62392923612654013cbd2e526472d3d14 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/profiler/heap-snapshot-generator.h"
6
7#include "src/code-stubs.h"
8#include "src/conversions.h"
9#include "src/debug/debug.h"
10#include "src/objects-body-descriptors.h"
11#include "src/profiler/allocation-tracker.h"
12#include "src/profiler/heap-profiler.h"
13#include "src/profiler/heap-snapshot-generator-inl.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014
15namespace v8 {
16namespace internal {
17
18
19HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
20 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
21 to_index_(to),
22 name_(name) {
23 DCHECK(type == kContextVariable
24 || type == kProperty
25 || type == kInternal
26 || type == kShortcut
27 || type == kWeak);
28}
29
30
31HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
32 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
33 to_index_(to),
34 index_(index) {
35 DCHECK(type == kElement || type == kHidden);
36}
37
38
39void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
40 to_entry_ = &snapshot->entries()[to_index_];
41}
42
43
44const int HeapEntry::kNoEntry = -1;
45
46HeapEntry::HeapEntry(HeapSnapshot* snapshot,
47 Type type,
48 const char* name,
49 SnapshotObjectId id,
50 size_t self_size,
51 unsigned trace_node_id)
52 : type_(type),
53 children_count_(0),
54 children_index_(-1),
55 self_size_(self_size),
56 snapshot_(snapshot),
57 name_(name),
58 id_(id),
59 trace_node_id_(trace_node_id) { }
60
61
62void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
63 const char* name,
64 HeapEntry* entry) {
65 HeapGraphEdge edge(type, name, this->index(), entry->index());
66 snapshot_->edges().Add(edge);
67 ++children_count_;
68}
69
70
71void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
72 int index,
73 HeapEntry* entry) {
74 HeapGraphEdge edge(type, index, this->index(), entry->index());
75 snapshot_->edges().Add(edge);
76 ++children_count_;
77}
78
79
80void HeapEntry::Print(
81 const char* prefix, const char* edge_name, int max_depth, int indent) {
82 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
Ben Murdoch61f157c2016-09-16 13:49:30 +010083 base::OS::Print("%6" PRIuS " @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
84 prefix, edge_name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 if (type() != kString) {
86 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
87 } else {
88 base::OS::Print("\"");
89 const char* c = name_;
90 while (*c && (c - name_) <= 40) {
91 if (*c != '\n')
92 base::OS::Print("%c", *c);
93 else
94 base::OS::Print("\\n");
95 ++c;
96 }
97 base::OS::Print("\"\n");
98 }
99 if (--max_depth == 0) return;
100 Vector<HeapGraphEdge*> ch = children();
101 for (int i = 0; i < ch.length(); ++i) {
102 HeapGraphEdge& edge = *ch[i];
103 const char* edge_prefix = "";
104 EmbeddedVector<char, 64> index;
105 const char* edge_name = index.start();
106 switch (edge.type()) {
107 case HeapGraphEdge::kContextVariable:
108 edge_prefix = "#";
109 edge_name = edge.name();
110 break;
111 case HeapGraphEdge::kElement:
112 SNPrintF(index, "%d", edge.index());
113 break;
114 case HeapGraphEdge::kInternal:
115 edge_prefix = "$";
116 edge_name = edge.name();
117 break;
118 case HeapGraphEdge::kProperty:
119 edge_name = edge.name();
120 break;
121 case HeapGraphEdge::kHidden:
122 edge_prefix = "$";
123 SNPrintF(index, "%d", edge.index());
124 break;
125 case HeapGraphEdge::kShortcut:
126 edge_prefix = "^";
127 edge_name = edge.name();
128 break;
129 case HeapGraphEdge::kWeak:
130 edge_prefix = "w";
131 edge_name = edge.name();
132 break;
133 default:
134 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
135 }
136 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
137 }
138}
139
140
141const char* HeapEntry::TypeAsString() {
142 switch (type()) {
143 case kHidden: return "/hidden/";
144 case kObject: return "/object/";
145 case kClosure: return "/closure/";
146 case kString: return "/string/";
147 case kCode: return "/code/";
148 case kArray: return "/array/";
149 case kRegExp: return "/regexp/";
150 case kHeapNumber: return "/number/";
151 case kNative: return "/native/";
152 case kSynthetic: return "/synthetic/";
153 case kConsString: return "/concatenated string/";
154 case kSlicedString: return "/sliced string/";
155 case kSymbol: return "/symbol/";
156 case kSimdValue: return "/simd/";
157 default: return "???";
158 }
159}
160
161
162// It is very important to keep objects that form a heap snapshot
163// as small as possible.
164namespace { // Avoid littering the global namespace.
165
166template <size_t ptr_size> struct SnapshotSizeConstants;
167
168template <> struct SnapshotSizeConstants<4> {
169 static const int kExpectedHeapGraphEdgeSize = 12;
170 static const int kExpectedHeapEntrySize = 28;
171};
172
173template <> struct SnapshotSizeConstants<8> {
174 static const int kExpectedHeapGraphEdgeSize = 24;
175 static const int kExpectedHeapEntrySize = 40;
176};
177
178} // namespace
179
180
181HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
182 : profiler_(profiler),
183 root_index_(HeapEntry::kNoEntry),
184 gc_roots_index_(HeapEntry::kNoEntry),
185 max_snapshot_js_object_id_(0) {
186 STATIC_ASSERT(
187 sizeof(HeapGraphEdge) ==
188 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
189 STATIC_ASSERT(
190 sizeof(HeapEntry) ==
191 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
192 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
193 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
194 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
195 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
196 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
197 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
198 }
199}
200
201
202void HeapSnapshot::Delete() {
203 profiler_->RemoveSnapshot(this);
204 delete this;
205}
206
207
208void HeapSnapshot::RememberLastJSObjectId() {
209 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
210}
211
212
213void HeapSnapshot::AddSyntheticRootEntries() {
214 AddRootEntry();
215 AddGcRootsEntry();
216 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
217 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
218 AddGcSubrootEntry(tag, id);
219 id += HeapObjectsMap::kObjectIdStep;
220 }
221 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
222}
223
224
225HeapEntry* HeapSnapshot::AddRootEntry() {
226 DCHECK(root_index_ == HeapEntry::kNoEntry);
227 DCHECK(entries_.is_empty()); // Root entry must be the first one.
228 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
229 "",
230 HeapObjectsMap::kInternalRootObjectId,
231 0,
232 0);
233 root_index_ = entry->index();
234 DCHECK(root_index_ == 0);
235 return entry;
236}
237
238
239HeapEntry* HeapSnapshot::AddGcRootsEntry() {
240 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
241 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
242 "(GC roots)",
243 HeapObjectsMap::kGcRootsObjectId,
244 0,
245 0);
246 gc_roots_index_ = entry->index();
247 return entry;
248}
249
250
251HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
252 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
253 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
254 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
255 VisitorSynchronization::kTagNames[tag], id, 0, 0);
256 gc_subroot_indexes_[tag] = entry->index();
257 return entry;
258}
259
260
261HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
262 const char* name,
263 SnapshotObjectId id,
264 size_t size,
265 unsigned trace_node_id) {
266 HeapEntry entry(this, type, name, id, size, trace_node_id);
267 entries_.Add(entry);
268 return &entries_.last();
269}
270
271
272void HeapSnapshot::FillChildren() {
273 DCHECK(children().is_empty());
274 children().Allocate(edges().length());
275 int children_index = 0;
276 for (int i = 0; i < entries().length(); ++i) {
277 HeapEntry* entry = &entries()[i];
278 children_index = entry->set_children_index(children_index);
279 }
280 DCHECK(edges().length() == children_index);
281 for (int i = 0; i < edges().length(); ++i) {
282 HeapGraphEdge* edge = &edges()[i];
283 edge->ReplaceToIndexWithEntry(this);
284 edge->from()->add_child(edge);
285 }
286}
287
288
289class FindEntryById {
290 public:
291 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
292 int operator()(HeapEntry* const* entry) {
293 if ((*entry)->id() == id_) return 0;
294 return (*entry)->id() < id_ ? -1 : 1;
295 }
296 private:
297 SnapshotObjectId id_;
298};
299
300
301HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
302 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
303 // Perform a binary search by id.
304 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
305 if (index == -1)
306 return NULL;
307 return entries_by_id->at(index);
308}
309
310
311template<class T>
312static int SortByIds(const T* entry1_ptr,
313 const T* entry2_ptr) {
314 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
315 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
316}
317
318
319List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
320 if (sorted_entries_.is_empty()) {
321 sorted_entries_.Allocate(entries_.length());
322 for (int i = 0; i < entries_.length(); ++i) {
323 sorted_entries_[i] = &entries_[i];
324 }
325 sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
326 SortByIds);
327 }
328 return &sorted_entries_;
329}
330
331
332void HeapSnapshot::Print(int max_depth) {
333 root()->Print("", "", max_depth, 0);
334}
335
336
337size_t HeapSnapshot::RawSnapshotSize() const {
338 return
339 sizeof(*this) +
340 GetMemoryUsedByList(entries_) +
341 GetMemoryUsedByList(edges_) +
342 GetMemoryUsedByList(children_) +
343 GetMemoryUsedByList(sorted_entries_);
344}
345
346
347// We split IDs on evens for embedder objects (see
348// HeapObjectsMap::GenerateId) and odds for native objects.
349const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
350const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
351 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
352const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
353 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
354const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
355 HeapObjectsMap::kGcRootsFirstSubrootId +
356 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
357
358
359static bool AddressesMatch(void* key1, void* key2) {
360 return key1 == key2;
361}
362
363
364HeapObjectsMap::HeapObjectsMap(Heap* heap)
365 : next_id_(kFirstAvailableObjectId),
366 entries_map_(AddressesMatch),
367 heap_(heap) {
368 // This dummy element solves a problem with entries_map_.
369 // When we do lookup in HashMap we see no difference between two cases:
370 // it has an entry with NULL as the value or it has created
371 // a new entry on the fly with NULL as the default value.
372 // With such dummy element we have a guaranty that all entries_map_ entries
373 // will have the value field grater than 0.
374 // This fact is using in MoveObject method.
375 entries_.Add(EntryInfo(0, NULL, 0));
376}
377
378
379bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
380 DCHECK(to != NULL);
381 DCHECK(from != NULL);
382 if (from == to) return false;
383 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
384 if (from_value == NULL) {
385 // It may occur that some untracked object moves to an address X and there
386 // is a tracked object at that address. In this case we should remove the
387 // entry as we know that the object has died.
388 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
389 if (to_value != NULL) {
390 int to_entry_info_index =
391 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
392 entries_.at(to_entry_info_index).addr = NULL;
393 }
394 } else {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100395 base::HashMap::Entry* to_entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000396 entries_map_.LookupOrInsert(to, ComputePointerHash(to));
397 if (to_entry->value != NULL) {
398 // We found the existing entry with to address for an old object.
399 // Without this operation we will have two EntryInfo's with the same
400 // value in addr field. It is bad because later at RemoveDeadEntries
401 // one of this entry will be removed with the corresponding entries_map_
402 // entry.
403 int to_entry_info_index =
404 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
405 entries_.at(to_entry_info_index).addr = NULL;
406 }
407 int from_entry_info_index =
408 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
409 entries_.at(from_entry_info_index).addr = to;
410 // Size of an object can change during its life, so to keep information
411 // about the object in entries_ consistent, we have to adjust size when the
412 // object is migrated.
413 if (FLAG_heap_profiler_trace_objects) {
414 PrintF("Move object from %p to %p old size %6d new size %6d\n",
Ben Murdoch61f157c2016-09-16 13:49:30 +0100415 static_cast<void*>(from), static_cast<void*>(to),
416 entries_.at(from_entry_info_index).size, object_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000417 }
418 entries_.at(from_entry_info_index).size = object_size;
419 to_entry->value = from_value;
420 }
421 return from_value != NULL;
422}
423
424
425void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
426 FindOrAddEntry(addr, size, false);
427}
428
429
430SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100431 base::HashMap::Entry* entry =
432 entries_map_.Lookup(addr, ComputePointerHash(addr));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 if (entry == NULL) return 0;
434 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
435 EntryInfo& entry_info = entries_.at(entry_index);
436 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
437 return entry_info.id;
438}
439
440
441SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
442 unsigned int size,
443 bool accessed) {
444 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100445 base::HashMap::Entry* entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000446 entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
447 if (entry->value != NULL) {
448 int entry_index =
449 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
450 EntryInfo& entry_info = entries_.at(entry_index);
451 entry_info.accessed = accessed;
452 if (FLAG_heap_profiler_trace_objects) {
453 PrintF("Update object size : %p with old size %d and new size %d\n",
Ben Murdoch61f157c2016-09-16 13:49:30 +0100454 static_cast<void*>(addr), entry_info.size, size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 }
456 entry_info.size = size;
457 return entry_info.id;
458 }
459 entry->value = reinterpret_cast<void*>(entries_.length());
460 SnapshotObjectId id = next_id_;
461 next_id_ += kObjectIdStep;
462 entries_.Add(EntryInfo(id, addr, size, accessed));
463 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
464 return id;
465}
466
467
468void HeapObjectsMap::StopHeapObjectsTracking() {
469 time_intervals_.Clear();
470}
471
472
473void HeapObjectsMap::UpdateHeapObjectsMap() {
474 if (FLAG_heap_profiler_trace_objects) {
475 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
476 entries_map_.occupancy());
477 }
478 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
479 "HeapObjectsMap::UpdateHeapObjectsMap");
480 HeapIterator iterator(heap_);
481 for (HeapObject* obj = iterator.next();
482 obj != NULL;
483 obj = iterator.next()) {
484 FindOrAddEntry(obj->address(), obj->Size());
485 if (FLAG_heap_profiler_trace_objects) {
486 PrintF("Update object : %p %6d. Next address is %p\n",
Ben Murdoch61f157c2016-09-16 13:49:30 +0100487 static_cast<void*>(obj->address()), obj->Size(),
488 static_cast<void*>(obj->address() + obj->Size()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000489 }
490 }
491 RemoveDeadEntries();
492 if (FLAG_heap_profiler_trace_objects) {
493 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
494 entries_map_.occupancy());
495 }
496}
497
498
499namespace {
500
501
502struct HeapObjectInfo {
503 HeapObjectInfo(HeapObject* obj, int expected_size)
504 : obj(obj),
505 expected_size(expected_size) {
506 }
507
508 HeapObject* obj;
509 int expected_size;
510
511 bool IsValid() const { return expected_size == obj->Size(); }
512
513 void Print() const {
514 if (expected_size == 0) {
515 PrintF("Untracked object : %p %6d. Next address is %p\n",
Ben Murdoch61f157c2016-09-16 13:49:30 +0100516 static_cast<void*>(obj->address()), obj->Size(),
517 static_cast<void*>(obj->address() + obj->Size()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 } else if (obj->Size() != expected_size) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100519 PrintF("Wrong size %6d: %p %6d. Next address is %p\n", expected_size,
520 static_cast<void*>(obj->address()), obj->Size(),
521 static_cast<void*>(obj->address() + obj->Size()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522 } else {
523 PrintF("Good object : %p %6d. Next address is %p\n",
Ben Murdoch61f157c2016-09-16 13:49:30 +0100524 static_cast<void*>(obj->address()), expected_size,
525 static_cast<void*>(obj->address() + obj->Size()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526 }
527 }
528};
529
530
531static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
532 if (a->obj < b->obj) return -1;
533 if (a->obj > b->obj) return 1;
534 return 0;
535}
536
537
538} // namespace
539
540
541int HeapObjectsMap::FindUntrackedObjects() {
542 List<HeapObjectInfo> heap_objects(1000);
543
544 HeapIterator iterator(heap_);
545 int untracked = 0;
546 for (HeapObject* obj = iterator.next();
547 obj != NULL;
548 obj = iterator.next()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100549 base::HashMap::Entry* entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
551 if (entry == NULL) {
552 ++untracked;
553 if (FLAG_heap_profiler_trace_objects) {
554 heap_objects.Add(HeapObjectInfo(obj, 0));
555 }
556 } else {
557 int entry_index = static_cast<int>(
558 reinterpret_cast<intptr_t>(entry->value));
559 EntryInfo& entry_info = entries_.at(entry_index);
560 if (FLAG_heap_profiler_trace_objects) {
561 heap_objects.Add(HeapObjectInfo(obj,
562 static_cast<int>(entry_info.size)));
563 if (obj->Size() != static_cast<int>(entry_info.size))
564 ++untracked;
565 } else {
566 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
567 }
568 }
569 }
570 if (FLAG_heap_profiler_trace_objects) {
571 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
572 entries_map_.occupancy());
573 heap_objects.Sort(comparator);
574 int last_printed_object = -1;
575 bool print_next_object = false;
576 for (int i = 0; i < heap_objects.length(); ++i) {
577 const HeapObjectInfo& object_info = heap_objects[i];
578 if (!object_info.IsValid()) {
579 ++untracked;
580 if (last_printed_object != i - 1) {
581 if (i > 0) {
582 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
583 heap_objects[i - 1].Print();
584 }
585 }
586 object_info.Print();
587 last_printed_object = i;
588 print_next_object = true;
589 } else if (print_next_object) {
590 object_info.Print();
591 print_next_object = false;
592 last_printed_object = i;
593 }
594 }
595 if (last_printed_object < heap_objects.length() - 1) {
596 PrintF("Last %d objects were skipped\n",
597 heap_objects.length() - 1 - last_printed_object);
598 }
599 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
600 entries_map_.occupancy());
601 }
602 return untracked;
603}
604
605
606SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
607 int64_t* timestamp_us) {
608 UpdateHeapObjectsMap();
609 time_intervals_.Add(TimeInterval(next_id_));
610 int prefered_chunk_size = stream->GetChunkSize();
611 List<v8::HeapStatsUpdate> stats_buffer;
612 DCHECK(!entries_.is_empty());
613 EntryInfo* entry_info = &entries_.first();
614 EntryInfo* end_entry_info = &entries_.last() + 1;
615 for (int time_interval_index = 0;
616 time_interval_index < time_intervals_.length();
617 ++time_interval_index) {
618 TimeInterval& time_interval = time_intervals_[time_interval_index];
619 SnapshotObjectId time_interval_id = time_interval.id;
620 uint32_t entries_size = 0;
621 EntryInfo* start_entry_info = entry_info;
622 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
623 entries_size += entry_info->size;
624 ++entry_info;
625 }
626 uint32_t entries_count =
627 static_cast<uint32_t>(entry_info - start_entry_info);
628 if (time_interval.count != entries_count ||
629 time_interval.size != entries_size) {
630 stats_buffer.Add(v8::HeapStatsUpdate(
631 time_interval_index,
632 time_interval.count = entries_count,
633 time_interval.size = entries_size));
634 if (stats_buffer.length() >= prefered_chunk_size) {
635 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
636 &stats_buffer.first(), stats_buffer.length());
637 if (result == OutputStream::kAbort) return last_assigned_id();
638 stats_buffer.Clear();
639 }
640 }
641 }
642 DCHECK(entry_info == end_entry_info);
643 if (!stats_buffer.is_empty()) {
644 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
645 &stats_buffer.first(), stats_buffer.length());
646 if (result == OutputStream::kAbort) return last_assigned_id();
647 }
648 stream->EndOfStream();
649 if (timestamp_us) {
650 *timestamp_us = (time_intervals_.last().timestamp -
651 time_intervals_[0].timestamp).InMicroseconds();
652 }
653 return last_assigned_id();
654}
655
656
657void HeapObjectsMap::RemoveDeadEntries() {
658 DCHECK(entries_.length() > 0 &&
659 entries_.at(0).id == 0 &&
660 entries_.at(0).addr == NULL);
661 int first_free_entry = 1;
662 for (int i = 1; i < entries_.length(); ++i) {
663 EntryInfo& entry_info = entries_.at(i);
664 if (entry_info.accessed) {
665 if (first_free_entry != i) {
666 entries_.at(first_free_entry) = entry_info;
667 }
668 entries_.at(first_free_entry).accessed = false;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100669 base::HashMap::Entry* entry = entries_map_.Lookup(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000670 entry_info.addr, ComputePointerHash(entry_info.addr));
671 DCHECK(entry);
672 entry->value = reinterpret_cast<void*>(first_free_entry);
673 ++first_free_entry;
674 } else {
675 if (entry_info.addr) {
676 entries_map_.Remove(entry_info.addr,
677 ComputePointerHash(entry_info.addr));
678 }
679 }
680 }
681 entries_.Rewind(first_free_entry);
682 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
683 entries_map_.occupancy());
684}
685
686
687SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
688 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
689 const char* label = info->GetLabel();
690 id ^= StringHasher::HashSequentialString(label,
691 static_cast<int>(strlen(label)),
692 heap_->HashSeed());
693 intptr_t element_count = info->GetElementCount();
694 if (element_count != -1)
695 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
696 v8::internal::kZeroHashSeed);
697 return id << 1;
698}
699
700
701size_t HeapObjectsMap::GetUsedMemorySize() const {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100702 return sizeof(*this) +
703 sizeof(base::HashMap::Entry) * entries_map_.capacity() +
704 GetMemoryUsedByList(entries_) + GetMemoryUsedByList(time_intervals_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705}
706
Ben Murdoch61f157c2016-09-16 13:49:30 +0100707HeapEntriesMap::HeapEntriesMap() : entries_(base::HashMap::PointersMatch) {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000708
709int HeapEntriesMap::Map(HeapThing thing) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100710 base::HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000711 if (cache_entry == NULL) return HeapEntry::kNoEntry;
712 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
713}
714
715
716void HeapEntriesMap::Pair(HeapThing thing, int entry) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100717 base::HashMap::Entry* cache_entry =
718 entries_.LookupOrInsert(thing, Hash(thing));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719 DCHECK(cache_entry->value == NULL);
720 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
721}
722
Ben Murdoch61f157c2016-09-16 13:49:30 +0100723HeapObjectsSet::HeapObjectsSet() : entries_(base::HashMap::PointersMatch) {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000724
725void HeapObjectsSet::Clear() {
726 entries_.Clear();
727}
728
729
730bool HeapObjectsSet::Contains(Object* obj) {
731 if (!obj->IsHeapObject()) return false;
732 HeapObject* object = HeapObject::cast(obj);
733 return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
734}
735
736
737void HeapObjectsSet::Insert(Object* obj) {
738 if (!obj->IsHeapObject()) return;
739 HeapObject* object = HeapObject::cast(obj);
740 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
741}
742
743
744const char* HeapObjectsSet::GetTag(Object* obj) {
745 HeapObject* object = HeapObject::cast(obj);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100746 base::HashMap::Entry* cache_entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 entries_.Lookup(object, HeapEntriesMap::Hash(object));
748 return cache_entry != NULL
749 ? reinterpret_cast<const char*>(cache_entry->value)
750 : NULL;
751}
752
753
754void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
755 if (!obj->IsHeapObject()) return;
756 HeapObject* object = HeapObject::cast(obj);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100757 base::HashMap::Entry* cache_entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000758 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
759 cache_entry->value = const_cast<char*>(tag);
760}
761
762
763V8HeapExplorer::V8HeapExplorer(
764 HeapSnapshot* snapshot,
765 SnapshottingProgressReportingInterface* progress,
766 v8::HeapProfiler::ObjectNameResolver* resolver)
767 : heap_(snapshot->profiler()->heap_object_map()->heap()),
768 snapshot_(snapshot),
769 names_(snapshot_->profiler()->names()),
770 heap_object_map_(snapshot_->profiler()->heap_object_map()),
771 progress_(progress),
772 filler_(NULL),
773 global_object_name_resolver_(resolver) {
774}
775
776
777V8HeapExplorer::~V8HeapExplorer() {
778}
779
780
781HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
782 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
783}
784
785
786HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
787 if (object->IsJSFunction()) {
788 JSFunction* func = JSFunction::cast(object);
789 SharedFunctionInfo* shared = func->shared();
790 const char* name = names_->GetName(String::cast(shared->name()));
791 return AddEntry(object, HeapEntry::kClosure, name);
792 } else if (object->IsJSBoundFunction()) {
793 return AddEntry(object, HeapEntry::kClosure, "native_bind");
794 } else if (object->IsJSRegExp()) {
795 JSRegExp* re = JSRegExp::cast(object);
796 return AddEntry(object,
797 HeapEntry::kRegExp,
798 names_->GetName(re->Pattern()));
799 } else if (object->IsJSObject()) {
800 const char* name = names_->GetName(
801 GetConstructorName(JSObject::cast(object)));
802 if (object->IsJSGlobalObject()) {
803 const char* tag = objects_tags_.GetTag(object);
804 if (tag != NULL) {
805 name = names_->GetFormatted("%s / %s", name, tag);
806 }
807 }
808 return AddEntry(object, HeapEntry::kObject, name);
809 } else if (object->IsString()) {
810 String* string = String::cast(object);
811 if (string->IsConsString())
812 return AddEntry(object,
813 HeapEntry::kConsString,
814 "(concatenated string)");
815 if (string->IsSlicedString())
816 return AddEntry(object,
817 HeapEntry::kSlicedString,
818 "(sliced string)");
819 return AddEntry(object,
820 HeapEntry::kString,
821 names_->GetName(String::cast(object)));
822 } else if (object->IsSymbol()) {
823 if (Symbol::cast(object)->is_private())
824 return AddEntry(object, HeapEntry::kHidden, "private symbol");
825 else
826 return AddEntry(object, HeapEntry::kSymbol, "symbol");
827 } else if (object->IsCode()) {
828 return AddEntry(object, HeapEntry::kCode, "");
829 } else if (object->IsSharedFunctionInfo()) {
830 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
831 return AddEntry(object,
832 HeapEntry::kCode,
833 names_->GetName(name));
834 } else if (object->IsScript()) {
835 Object* name = Script::cast(object)->name();
836 return AddEntry(object,
837 HeapEntry::kCode,
838 name->IsString()
839 ? names_->GetName(String::cast(name))
840 : "");
841 } else if (object->IsNativeContext()) {
842 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
843 } else if (object->IsContext()) {
844 return AddEntry(object, HeapEntry::kObject, "system / Context");
845 } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
846 object->IsByteArray()) {
847 return AddEntry(object, HeapEntry::kArray, "");
848 } else if (object->IsHeapNumber()) {
849 return AddEntry(object, HeapEntry::kHeapNumber, "number");
850 } else if (object->IsSimd128Value()) {
851 return AddEntry(object, HeapEntry::kSimdValue, "simd");
852 }
853 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
854}
855
856
857HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
858 HeapEntry::Type type,
859 const char* name) {
860 return AddEntry(object->address(), type, name, object->Size());
861}
862
863
864HeapEntry* V8HeapExplorer::AddEntry(Address address,
865 HeapEntry::Type type,
866 const char* name,
867 size_t size) {
868 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
869 address, static_cast<unsigned int>(size));
870 unsigned trace_node_id = 0;
871 if (AllocationTracker* allocation_tracker =
872 snapshot_->profiler()->allocation_tracker()) {
873 trace_node_id =
874 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
875 }
876 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
877}
878
879
880class SnapshotFiller {
881 public:
882 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
883 : snapshot_(snapshot),
884 names_(snapshot->profiler()->names()),
885 entries_(entries) { }
886 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
887 HeapEntry* entry = allocator->AllocateEntry(ptr);
888 entries_->Pair(ptr, entry->index());
889 return entry;
890 }
891 HeapEntry* FindEntry(HeapThing ptr) {
892 int index = entries_->Map(ptr);
893 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
894 }
895 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
896 HeapEntry* entry = FindEntry(ptr);
897 return entry != NULL ? entry : AddEntry(ptr, allocator);
898 }
899 void SetIndexedReference(HeapGraphEdge::Type type,
900 int parent,
901 int index,
902 HeapEntry* child_entry) {
903 HeapEntry* parent_entry = &snapshot_->entries()[parent];
904 parent_entry->SetIndexedReference(type, index, child_entry);
905 }
906 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
907 int parent,
908 HeapEntry* child_entry) {
909 HeapEntry* parent_entry = &snapshot_->entries()[parent];
910 int index = parent_entry->children_count() + 1;
911 parent_entry->SetIndexedReference(type, index, child_entry);
912 }
913 void SetNamedReference(HeapGraphEdge::Type type,
914 int parent,
915 const char* reference_name,
916 HeapEntry* child_entry) {
917 HeapEntry* parent_entry = &snapshot_->entries()[parent];
918 parent_entry->SetNamedReference(type, reference_name, child_entry);
919 }
920 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
921 int parent,
922 HeapEntry* child_entry) {
923 HeapEntry* parent_entry = &snapshot_->entries()[parent];
924 int index = parent_entry->children_count() + 1;
925 parent_entry->SetNamedReference(
926 type,
927 names_->GetName(index),
928 child_entry);
929 }
930
931 private:
932 HeapSnapshot* snapshot_;
933 StringsStorage* names_;
934 HeapEntriesMap* entries_;
935};
936
937
938const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
939 switch (object->map()->instance_type()) {
940 case MAP_TYPE:
941 switch (Map::cast(object)->instance_type()) {
942#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
943 case instance_type: return "system / Map (" #Name ")";
944 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
945#undef MAKE_STRING_MAP_CASE
946 default: return "system / Map";
947 }
948 case CELL_TYPE: return "system / Cell";
949 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
950 case FOREIGN_TYPE: return "system / Foreign";
951 case ODDBALL_TYPE: return "system / Oddball";
952#define MAKE_STRUCT_CASE(NAME, Name, name) \
953 case NAME##_TYPE: return "system / "#Name;
954 STRUCT_LIST(MAKE_STRUCT_CASE)
955#undef MAKE_STRUCT_CASE
956 default: return "system";
957 }
958}
959
960
961int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
962 int objects_count = 0;
963 for (HeapObject* obj = iterator->next();
964 obj != NULL;
965 obj = iterator->next()) {
966 objects_count++;
967 }
968 return objects_count;
969}
970
971
972class IndexedReferencesExtractor : public ObjectVisitor {
973 public:
974 IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj,
975 int parent)
976 : generator_(generator),
977 parent_obj_(parent_obj),
978 parent_start_(HeapObject::RawField(parent_obj_, 0)),
979 parent_end_(HeapObject::RawField(parent_obj_, parent_obj_->Size())),
980 parent_(parent),
981 next_index_(0) {}
982 void VisitCodeEntry(Address entry_address) override {
983 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
984 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
985 generator_->TagCodeObject(code);
986 }
987 void VisitPointers(Object** start, Object** end) override {
988 for (Object** p = start; p < end; p++) {
989 intptr_t index =
990 static_cast<intptr_t>(p - HeapObject::RawField(parent_obj_, 0));
991 ++next_index_;
992 // |p| could be outside of the object, e.g., while visiting RelocInfo of
993 // code objects.
994 if (p >= parent_start_ && p < parent_end_ && generator_->marks_[index]) {
995 generator_->marks_[index] = false;
996 continue;
997 }
998 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
999 }
1000 }
1001
1002 private:
1003 V8HeapExplorer* generator_;
1004 HeapObject* parent_obj_;
1005 Object** parent_start_;
1006 Object** parent_end_;
1007 int parent_;
1008 int next_index_;
1009};
1010
1011
1012bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1013 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1014
1015 if (obj->IsJSGlobalProxy()) {
1016 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1017 } else if (obj->IsJSArrayBuffer()) {
1018 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1019 } else if (obj->IsJSObject()) {
1020 if (obj->IsJSWeakSet()) {
1021 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1022 } else if (obj->IsJSWeakMap()) {
1023 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1024 } else if (obj->IsJSSet()) {
1025 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1026 } else if (obj->IsJSMap()) {
1027 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1028 }
1029 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1030 } else if (obj->IsString()) {
1031 ExtractStringReferences(entry, String::cast(obj));
1032 } else if (obj->IsSymbol()) {
1033 ExtractSymbolReferences(entry, Symbol::cast(obj));
1034 } else if (obj->IsMap()) {
1035 ExtractMapReferences(entry, Map::cast(obj));
1036 } else if (obj->IsSharedFunctionInfo()) {
1037 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1038 } else if (obj->IsScript()) {
1039 ExtractScriptReferences(entry, Script::cast(obj));
1040 } else if (obj->IsAccessorInfo()) {
1041 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1042 } else if (obj->IsAccessorPair()) {
1043 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 } else if (obj->IsCode()) {
1045 ExtractCodeReferences(entry, Code::cast(obj));
1046 } else if (obj->IsBox()) {
1047 ExtractBoxReferences(entry, Box::cast(obj));
1048 } else if (obj->IsCell()) {
1049 ExtractCellReferences(entry, Cell::cast(obj));
1050 } else if (obj->IsPropertyCell()) {
1051 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1052 } else if (obj->IsAllocationSite()) {
1053 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1054 }
1055 return true;
1056}
1057
1058
1059bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1060 if (!obj->IsFixedArray()) return false;
1061
1062 if (obj->IsContext()) {
1063 ExtractContextReferences(entry, Context::cast(obj));
1064 } else {
1065 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1066 }
1067 return true;
1068}
1069
1070
1071void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1072 int entry, JSGlobalProxy* proxy) {
1073 SetInternalReference(proxy, entry,
1074 "native_context", proxy->native_context(),
1075 JSGlobalProxy::kNativeContextOffset);
1076}
1077
1078
1079void V8HeapExplorer::ExtractJSObjectReferences(
1080 int entry, JSObject* js_obj) {
1081 HeapObject* obj = js_obj;
1082 ExtractPropertyReferences(js_obj, entry);
1083 ExtractElementReferences(js_obj, entry);
1084 ExtractInternalReferences(js_obj, entry);
1085 PrototypeIterator iter(heap_->isolate(), js_obj);
1086 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1087 if (obj->IsJSBoundFunction()) {
1088 JSBoundFunction* js_fun = JSBoundFunction::cast(obj);
1089 TagObject(js_fun->bound_arguments(), "(bound arguments)");
1090 SetInternalReference(js_fun, entry, "bindings", js_fun->bound_arguments(),
1091 JSBoundFunction::kBoundArgumentsOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001092 SetNativeBindReference(js_obj, entry, "bound_this", js_fun->bound_this());
1093 SetNativeBindReference(js_obj, entry, "bound_function",
1094 js_fun->bound_target_function());
1095 FixedArray* bindings = js_fun->bound_arguments();
1096 for (int i = 0; i < bindings->length(); i++) {
1097 const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1098 SetNativeBindReference(js_obj, entry, reference_name, bindings->get(i));
1099 }
1100 } else if (obj->IsJSFunction()) {
1101 JSFunction* js_fun = JSFunction::cast(js_obj);
1102 Object* proto_or_map = js_fun->prototype_or_initial_map();
Ben Murdoch61f157c2016-09-16 13:49:30 +01001103 if (!proto_or_map->IsTheHole(heap_->isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001104 if (!proto_or_map->IsMap()) {
1105 SetPropertyReference(
1106 obj, entry,
1107 heap_->prototype_string(), proto_or_map,
1108 NULL,
1109 JSFunction::kPrototypeOrInitialMapOffset);
1110 } else {
1111 SetPropertyReference(
1112 obj, entry,
1113 heap_->prototype_string(), js_fun->prototype());
1114 SetInternalReference(
1115 obj, entry, "initial_map", proto_or_map,
1116 JSFunction::kPrototypeOrInitialMapOffset);
1117 }
1118 }
1119 SharedFunctionInfo* shared_info = js_fun->shared();
1120 TagObject(js_fun->literals(), "(function literals)");
1121 SetInternalReference(js_fun, entry, "literals", js_fun->literals(),
1122 JSFunction::kLiteralsOffset);
1123 TagObject(shared_info, "(shared function info)");
1124 SetInternalReference(js_fun, entry,
1125 "shared", shared_info,
1126 JSFunction::kSharedFunctionInfoOffset);
1127 TagObject(js_fun->context(), "(context)");
1128 SetInternalReference(js_fun, entry,
1129 "context", js_fun->context(),
1130 JSFunction::kContextOffset);
1131 SetWeakReference(js_fun, entry,
1132 "next_function_link", js_fun->next_function_link(),
1133 JSFunction::kNextFunctionLinkOffset);
1134 // Ensure no new weak references appeared in JSFunction.
1135 STATIC_ASSERT(JSFunction::kCodeEntryOffset ==
1136 JSFunction::kNonWeakFieldsEndOffset);
1137 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
1138 JSFunction::kNextFunctionLinkOffset);
1139 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1140 == JSFunction::kSize);
1141 } else if (obj->IsJSGlobalObject()) {
1142 JSGlobalObject* global_obj = JSGlobalObject::cast(obj);
1143 SetInternalReference(global_obj, entry, "native_context",
1144 global_obj->native_context(),
1145 JSGlobalObject::kNativeContextOffset);
1146 SetInternalReference(global_obj, entry, "global_proxy",
1147 global_obj->global_proxy(),
1148 JSGlobalObject::kGlobalProxyOffset);
1149 STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
1150 2 * kPointerSize);
1151 } else if (obj->IsJSArrayBufferView()) {
1152 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1153 SetInternalReference(view, entry, "buffer", view->buffer(),
1154 JSArrayBufferView::kBufferOffset);
1155 }
1156 TagObject(js_obj->properties(), "(object properties)");
1157 SetInternalReference(obj, entry,
1158 "properties", js_obj->properties(),
1159 JSObject::kPropertiesOffset);
1160 TagObject(js_obj->elements(), "(object elements)");
1161 SetInternalReference(obj, entry,
1162 "elements", js_obj->elements(),
1163 JSObject::kElementsOffset);
1164}
1165
1166
1167void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1168 if (string->IsConsString()) {
1169 ConsString* cs = ConsString::cast(string);
1170 SetInternalReference(cs, entry, "first", cs->first(),
1171 ConsString::kFirstOffset);
1172 SetInternalReference(cs, entry, "second", cs->second(),
1173 ConsString::kSecondOffset);
1174 } else if (string->IsSlicedString()) {
1175 SlicedString* ss = SlicedString::cast(string);
1176 SetInternalReference(ss, entry, "parent", ss->parent(),
1177 SlicedString::kParentOffset);
1178 }
1179}
1180
1181
1182void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1183 SetInternalReference(symbol, entry,
1184 "name", symbol->name(),
1185 Symbol::kNameOffset);
1186}
1187
1188
1189void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1190 JSCollection* collection) {
1191 SetInternalReference(collection, entry, "table", collection->table(),
1192 JSCollection::kTableOffset);
1193}
1194
1195
1196void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1197 int entry, JSWeakCollection* collection) {
1198 MarkAsWeakContainer(collection->table());
1199 SetInternalReference(collection, entry,
1200 "table", collection->table(),
1201 JSWeakCollection::kTableOffset);
1202}
1203
1204
1205void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1206 if (context == context->declaration_context()) {
1207 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1208 // Add context allocated locals.
1209 int context_locals = scope_info->ContextLocalCount();
1210 for (int i = 0; i < context_locals; ++i) {
1211 String* local_name = scope_info->ContextLocalName(i);
1212 int idx = Context::MIN_CONTEXT_SLOTS + i;
1213 SetContextReference(context, entry, local_name, context->get(idx),
1214 Context::OffsetOfElementAt(idx));
1215 }
1216 if (scope_info->HasFunctionName()) {
1217 String* name = scope_info->FunctionName();
1218 VariableMode mode;
1219 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1220 if (idx >= 0) {
1221 SetContextReference(context, entry, name, context->get(idx),
1222 Context::OffsetOfElementAt(idx));
1223 }
1224 }
1225 }
1226
1227#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1228 if (Context::index < Context::FIRST_WEAK_SLOT || \
1229 Context::index == Context::MAP_CACHE_INDEX) { \
1230 SetInternalReference(context, entry, #name, context->get(Context::index), \
1231 FixedArray::OffsetOfElementAt(Context::index)); \
1232 } else { \
1233 SetWeakReference(context, entry, #name, context->get(Context::index), \
1234 FixedArray::OffsetOfElementAt(Context::index)); \
1235 }
1236 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1237 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1238 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, HeapObject, extension);
1239 EXTRACT_CONTEXT_FIELD(NATIVE_CONTEXT_INDEX, Context, native_context);
1240 if (context->IsNativeContext()) {
1241 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1242 TagObject(context->embedder_data(), "(context data)");
1243 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD)
1244 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1245 optimized_functions_list);
1246 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1247 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1248 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1249#undef EXTRACT_CONTEXT_FIELD
1250 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1251 Context::FIRST_WEAK_SLOT);
1252 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1253 Context::NATIVE_CONTEXT_SLOTS);
1254 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1255 Context::NATIVE_CONTEXT_SLOTS);
1256 }
1257}
1258
1259
1260void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1261 Object* raw_transitions_or_prototype_info = map->raw_transitions();
1262 if (TransitionArray::IsFullTransitionArray(
1263 raw_transitions_or_prototype_info)) {
1264 TransitionArray* transitions =
1265 TransitionArray::cast(raw_transitions_or_prototype_info);
1266 int transitions_entry = GetEntry(transitions)->index();
1267
1268 if (map->CanTransition()) {
1269 if (transitions->HasPrototypeTransitions()) {
1270 FixedArray* prototype_transitions =
1271 transitions->GetPrototypeTransitions();
1272 MarkAsWeakContainer(prototype_transitions);
1273 TagObject(prototype_transitions, "(prototype transitions");
1274 SetInternalReference(transitions, transitions_entry,
1275 "prototype_transitions", prototype_transitions);
1276 }
1277 // TODO(alph): transitions keys are strong links.
1278 MarkAsWeakContainer(transitions);
1279 }
1280
1281 TagObject(transitions, "(transition array)");
1282 SetInternalReference(map, entry, "transitions", transitions,
1283 Map::kTransitionsOrPrototypeInfoOffset);
1284 } else if (TransitionArray::IsSimpleTransition(
1285 raw_transitions_or_prototype_info)) {
1286 TagObject(raw_transitions_or_prototype_info, "(transition)");
1287 SetInternalReference(map, entry, "transition",
1288 raw_transitions_or_prototype_info,
1289 Map::kTransitionsOrPrototypeInfoOffset);
1290 } else if (map->is_prototype_map()) {
1291 TagObject(raw_transitions_or_prototype_info, "prototype_info");
1292 SetInternalReference(map, entry, "prototype_info",
1293 raw_transitions_or_prototype_info,
1294 Map::kTransitionsOrPrototypeInfoOffset);
1295 }
1296 DescriptorArray* descriptors = map->instance_descriptors();
1297 TagObject(descriptors, "(map descriptors)");
1298 SetInternalReference(map, entry,
1299 "descriptors", descriptors,
1300 Map::kDescriptorsOffset);
1301
1302 MarkAsWeakContainer(map->code_cache());
1303 SetInternalReference(map, entry,
1304 "code_cache", map->code_cache(),
1305 Map::kCodeCacheOffset);
1306 SetInternalReference(map, entry,
1307 "prototype", map->prototype(), Map::kPrototypeOffset);
1308 Object* constructor_or_backpointer = map->constructor_or_backpointer();
1309 if (constructor_or_backpointer->IsMap()) {
1310 TagObject(constructor_or_backpointer, "(back pointer)");
1311 SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1312 Map::kConstructorOrBackPointerOffset);
1313 } else {
1314 SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1315 Map::kConstructorOrBackPointerOffset);
1316 }
1317 TagObject(map->dependent_code(), "(dependent code)");
1318 MarkAsWeakContainer(map->dependent_code());
1319 SetInternalReference(map, entry,
1320 "dependent_code", map->dependent_code(),
1321 Map::kDependentCodeOffset);
1322}
1323
1324
1325void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1326 int entry, SharedFunctionInfo* shared) {
1327 HeapObject* obj = shared;
1328 String* shared_name = shared->DebugName();
1329 const char* name = NULL;
1330 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1331 name = names_->GetName(shared_name);
1332 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1333 } else {
1334 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1335 Code::Kind2String(shared->code()->kind())));
1336 }
1337
1338 SetInternalReference(obj, entry,
1339 "name", shared->name(),
1340 SharedFunctionInfo::kNameOffset);
1341 SetInternalReference(obj, entry,
1342 "code", shared->code(),
1343 SharedFunctionInfo::kCodeOffset);
1344 TagObject(shared->scope_info(), "(function scope info)");
1345 SetInternalReference(obj, entry,
1346 "scope_info", shared->scope_info(),
1347 SharedFunctionInfo::kScopeInfoOffset);
1348 SetInternalReference(obj, entry,
1349 "instance_class_name", shared->instance_class_name(),
1350 SharedFunctionInfo::kInstanceClassNameOffset);
1351 SetInternalReference(obj, entry,
1352 "script", shared->script(),
1353 SharedFunctionInfo::kScriptOffset);
1354 const char* construct_stub_name = name ?
1355 names_->GetFormatted("(construct stub code for %s)", name) :
1356 "(construct stub code)";
1357 TagObject(shared->construct_stub(), construct_stub_name);
1358 SetInternalReference(obj, entry,
1359 "construct_stub", shared->construct_stub(),
1360 SharedFunctionInfo::kConstructStubOffset);
1361 SetInternalReference(obj, entry,
1362 "function_data", shared->function_data(),
1363 SharedFunctionInfo::kFunctionDataOffset);
1364 SetInternalReference(obj, entry,
1365 "debug_info", shared->debug_info(),
1366 SharedFunctionInfo::kDebugInfoOffset);
Ben Murdochda12d292016-06-02 14:46:10 +01001367 SetInternalReference(obj, entry, "function_identifier",
1368 shared->function_identifier(),
1369 SharedFunctionInfo::kFunctionIdentifierOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370 SetInternalReference(obj, entry,
1371 "optimized_code_map", shared->optimized_code_map(),
1372 SharedFunctionInfo::kOptimizedCodeMapOffset);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001373 SetInternalReference(obj, entry, "feedback_metadata",
1374 shared->feedback_metadata(),
1375 SharedFunctionInfo::kFeedbackMetadataOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001376}
1377
1378
1379void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1380 HeapObject* obj = script;
1381 SetInternalReference(obj, entry,
1382 "source", script->source(),
1383 Script::kSourceOffset);
1384 SetInternalReference(obj, entry,
1385 "name", script->name(),
1386 Script::kNameOffset);
1387 SetInternalReference(obj, entry,
1388 "context_data", script->context_data(),
1389 Script::kContextOffset);
1390 TagObject(script->line_ends(), "(script line ends)");
1391 SetInternalReference(obj, entry,
1392 "line_ends", script->line_ends(),
1393 Script::kLineEndsOffset);
1394}
1395
1396
1397void V8HeapExplorer::ExtractAccessorInfoReferences(
1398 int entry, AccessorInfo* accessor_info) {
1399 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1400 AccessorInfo::kNameOffset);
1401 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1402 accessor_info->expected_receiver_type(),
1403 AccessorInfo::kExpectedReceiverTypeOffset);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001404 if (accessor_info->IsAccessorInfo()) {
1405 AccessorInfo* executable_accessor_info = AccessorInfo::cast(accessor_info);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 SetInternalReference(executable_accessor_info, entry, "getter",
1407 executable_accessor_info->getter(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001408 AccessorInfo::kGetterOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001409 SetInternalReference(executable_accessor_info, entry, "setter",
1410 executable_accessor_info->setter(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001411 AccessorInfo::kSetterOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001412 SetInternalReference(executable_accessor_info, entry, "data",
1413 executable_accessor_info->data(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001414 AccessorInfo::kDataOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 }
1416}
1417
1418
1419void V8HeapExplorer::ExtractAccessorPairReferences(
1420 int entry, AccessorPair* accessors) {
1421 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1422 AccessorPair::kGetterOffset);
1423 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1424 AccessorPair::kSetterOffset);
1425}
1426
1427
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1429 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1430}
1431
1432
1433void V8HeapExplorer::TagCodeObject(Code* code) {
1434 if (code->kind() == Code::STUB) {
1435 TagObject(code, names_->GetFormatted(
1436 "(%s code)",
1437 CodeStub::MajorName(CodeStub::GetMajorKey(code))));
1438 }
1439}
1440
1441
1442void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1443 TagCodeObject(code);
1444 TagObject(code->relocation_info(), "(code relocation info)");
1445 SetInternalReference(code, entry,
1446 "relocation_info", code->relocation_info(),
1447 Code::kRelocationInfoOffset);
1448 SetInternalReference(code, entry,
1449 "handler_table", code->handler_table(),
1450 Code::kHandlerTableOffset);
1451 TagObject(code->deoptimization_data(), "(code deopt data)");
1452 SetInternalReference(code, entry,
1453 "deoptimization_data", code->deoptimization_data(),
1454 Code::kDeoptimizationDataOffset);
1455 if (code->kind() == Code::FUNCTION) {
1456 SetInternalReference(code, entry,
1457 "type_feedback_info", code->type_feedback_info(),
1458 Code::kTypeFeedbackInfoOffset);
1459 }
1460 SetInternalReference(code, entry,
1461 "gc_metadata", code->gc_metadata(),
1462 Code::kGCMetadataOffset);
1463 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1464 SetWeakReference(code, entry,
1465 "next_code_link", code->next_code_link(),
1466 Code::kNextCodeLinkOffset);
1467 }
1468}
1469
1470
1471void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1472 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1473}
1474
1475
1476void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1477 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1478}
1479
1480
1481void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1482 PropertyCell* cell) {
1483 SetInternalReference(cell, entry, "value", cell->value(),
1484 PropertyCell::kValueOffset);
1485 MarkAsWeakContainer(cell->dependent_code());
1486 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1487 PropertyCell::kDependentCodeOffset);
1488}
1489
1490
1491void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1492 AllocationSite* site) {
1493 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1494 AllocationSite::kTransitionInfoOffset);
1495 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1496 AllocationSite::kNestedSiteOffset);
1497 MarkAsWeakContainer(site->dependent_code());
1498 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1499 AllocationSite::kDependentCodeOffset);
1500 // Do not visit weak_next as it is not visited by the StaticVisitor,
1501 // and we're not very interested in weak_next field here.
1502 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
Ben Murdoch097c5b22016-05-18 11:27:45 +01001503 AllocationSite::kPointerFieldsEndOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001504}
1505
1506
1507class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1508 public:
1509 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1510 : size_(size)
1511 , explorer_(explorer) {
1512 }
1513 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1514 return explorer_->AddEntry(
1515 static_cast<Address>(ptr),
1516 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1517 }
1518 private:
1519 size_t size_;
1520 V8HeapExplorer* explorer_;
1521};
1522
1523
1524void V8HeapExplorer::ExtractJSArrayBufferReferences(
1525 int entry, JSArrayBuffer* buffer) {
1526 // Setup a reference to a native memory backing_store object.
1527 if (!buffer->backing_store())
1528 return;
1529 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1530 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1531 HeapEntry* data_entry =
1532 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1533 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1534 entry, "backing_store", data_entry);
1535}
1536
1537
1538void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1539 bool is_weak = weak_containers_.Contains(array);
1540 for (int i = 0, l = array->length(); i < l; ++i) {
1541 if (is_weak) {
1542 SetWeakReference(array, entry,
1543 i, array->get(i), array->OffsetOfElementAt(i));
1544 } else {
1545 SetInternalReference(array, entry,
1546 i, array->get(i), array->OffsetOfElementAt(i));
1547 }
1548 }
1549}
1550
1551
1552void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001553 Isolate* isolate = js_obj->GetIsolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 if (js_obj->HasFastProperties()) {
1555 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1556 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1557 for (int i = 0; i < real_size; i++) {
1558 PropertyDetails details = descs->GetDetails(i);
1559 switch (details.location()) {
1560 case kField: {
1561 Representation r = details.representation();
1562 if (r.IsSmi() || r.IsDouble()) break;
1563
1564 Name* k = descs->GetKey(i);
1565 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1566 Object* value = js_obj->RawFastPropertyAt(field_index);
1567 int field_offset =
1568 field_index.is_inobject() ? field_index.offset() : -1;
1569
Ben Murdochc5610432016-08-08 18:44:38 +01001570 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1571 value, NULL, field_offset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 break;
1573 }
1574 case kDescriptor:
1575 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1576 descs->GetKey(i),
1577 descs->GetValue(i));
1578 break;
1579 }
1580 }
1581 } else if (js_obj->IsJSGlobalObject()) {
1582 // We assume that global objects can only have slow properties.
1583 GlobalDictionary* dictionary = js_obj->global_dictionary();
1584 int length = dictionary->Capacity();
1585 for (int i = 0; i < length; ++i) {
1586 Object* k = dictionary->KeyAt(i);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001587 if (dictionary->IsKey(isolate, k)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001588 DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1589 PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1590 Object* value = cell->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001591 PropertyDetails details = cell->property_details();
1592 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1593 Name::cast(k), value);
1594 }
1595 }
1596 } else {
1597 NameDictionary* dictionary = js_obj->property_dictionary();
1598 int length = dictionary->Capacity();
1599 for (int i = 0; i < length; ++i) {
1600 Object* k = dictionary->KeyAt(i);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001601 if (dictionary->IsKey(isolate, k)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001602 Object* value = dictionary->ValueAt(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 PropertyDetails details = dictionary->DetailsAt(i);
1604 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1605 Name::cast(k), value);
1606 }
1607 }
1608 }
1609}
1610
1611
1612void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1613 Name* key,
1614 Object* callback_obj,
1615 int field_offset) {
1616 if (!callback_obj->IsAccessorPair()) return;
1617 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1618 SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1619 Object* getter = accessors->getter();
1620 if (!getter->IsOddball()) {
1621 SetPropertyReference(js_obj, entry, key, getter, "get %s");
1622 }
1623 Object* setter = accessors->setter();
1624 if (!setter->IsOddball()) {
1625 SetPropertyReference(js_obj, entry, key, setter, "set %s");
1626 }
1627}
1628
1629
1630void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001631 Isolate* isolate = js_obj->GetIsolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 if (js_obj->HasFastObjectElements()) {
1633 FixedArray* elements = FixedArray::cast(js_obj->elements());
1634 int length = js_obj->IsJSArray() ?
1635 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1636 elements->length();
1637 for (int i = 0; i < length; ++i) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001638 if (!elements->get(i)->IsTheHole(isolate)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001639 SetElementReference(js_obj, entry, i, elements->get(i));
1640 }
1641 }
1642 } else if (js_obj->HasDictionaryElements()) {
1643 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1644 int length = dictionary->Capacity();
1645 for (int i = 0; i < length; ++i) {
1646 Object* k = dictionary->KeyAt(i);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001647 if (dictionary->IsKey(isolate, k)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648 DCHECK(k->IsNumber());
1649 uint32_t index = static_cast<uint32_t>(k->Number());
1650 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1651 }
1652 }
1653 }
1654}
1655
1656
1657void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1658 int length = js_obj->GetInternalFieldCount();
1659 for (int i = 0; i < length; ++i) {
1660 Object* o = js_obj->GetInternalField(i);
1661 SetInternalReference(
1662 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1663 }
1664}
1665
1666
1667String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1668 Isolate* isolate = object->GetIsolate();
1669 if (object->IsJSFunction()) return isolate->heap()->closure_string();
1670 DisallowHeapAllocation no_gc;
1671 HandleScope scope(isolate);
1672 return *JSReceiver::GetConstructorName(handle(object, isolate));
1673}
1674
1675
1676HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1677 if (!obj->IsHeapObject()) return NULL;
1678 return filler_->FindOrAddEntry(obj, this);
1679}
1680
1681
1682class RootsReferencesExtractor : public ObjectVisitor {
1683 private:
1684 struct IndexTag {
1685 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1686 : index(index), tag(tag) { }
1687 int index;
1688 VisitorSynchronization::SyncTag tag;
1689 };
1690
1691 public:
1692 explicit RootsReferencesExtractor(Heap* heap)
1693 : collecting_all_references_(false),
1694 previous_reference_count_(0),
1695 heap_(heap) {
1696 }
1697
1698 void VisitPointers(Object** start, Object** end) override {
1699 if (collecting_all_references_) {
1700 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1701 } else {
1702 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1703 }
1704 }
1705
1706 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1707
1708 void FillReferences(V8HeapExplorer* explorer) {
1709 DCHECK(strong_references_.length() <= all_references_.length());
1710 Builtins* builtins = heap_->isolate()->builtins();
1711 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1712 while (all_index < all_references_.length()) {
1713 bool is_strong = strong_index < strong_references_.length()
1714 && strong_references_[strong_index] == all_references_[all_index];
1715 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1716 !is_strong,
1717 all_references_[all_index]);
1718 if (reference_tags_[tags_index].tag ==
1719 VisitorSynchronization::kBuiltins) {
1720 DCHECK(all_references_[all_index]->IsCode());
1721 explorer->TagBuiltinCodeObject(
1722 Code::cast(all_references_[all_index]),
1723 builtins->name(builtin_index++));
1724 }
1725 ++all_index;
1726 if (is_strong) ++strong_index;
1727 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1728 }
1729 }
1730
1731 void Synchronize(VisitorSynchronization::SyncTag tag) override {
1732 if (collecting_all_references_ &&
1733 previous_reference_count_ != all_references_.length()) {
1734 previous_reference_count_ = all_references_.length();
1735 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1736 }
1737 }
1738
1739 private:
1740 bool collecting_all_references_;
1741 List<Object*> strong_references_;
1742 List<Object*> all_references_;
1743 int previous_reference_count_;
1744 List<IndexTag> reference_tags_;
1745 Heap* heap_;
1746};
1747
1748
1749bool V8HeapExplorer::IterateAndExtractReferences(
1750 SnapshotFiller* filler) {
1751 filler_ = filler;
1752
1753 // Create references to the synthetic roots.
1754 SetRootGcRootsReference();
1755 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1756 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1757 }
1758
1759 // Make sure builtin code objects get their builtin tags
1760 // first. Otherwise a particular JSFunction object could set
1761 // its custom name to a generic builtin.
1762 RootsReferencesExtractor extractor(heap_);
1763 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1764 extractor.SetCollectingAllReferences();
1765 heap_->IterateRoots(&extractor, VISIT_ALL);
1766 extractor.FillReferences(this);
1767
1768 // We have to do two passes as sometimes FixedArrays are used
1769 // to weakly hold their items, and it's impossible to distinguish
1770 // between these cases without processing the array owner first.
1771 bool interrupted =
1772 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1773 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1774
1775 if (interrupted) {
1776 filler_ = NULL;
1777 return false;
1778 }
1779
1780 filler_ = NULL;
1781 return progress_->ProgressReport(true);
1782}
1783
1784
1785template<V8HeapExplorer::ExtractReferencesMethod extractor>
1786bool V8HeapExplorer::IterateAndExtractSinglePass() {
1787 // Now iterate the whole heap.
1788 bool interrupted = false;
1789 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1790 // Heap iteration with filtering must be finished in any case.
1791 for (HeapObject* obj = iterator.next();
1792 obj != NULL;
1793 obj = iterator.next(), progress_->ProgressStep()) {
1794 if (interrupted) continue;
1795
1796 size_t max_pointer = obj->Size() / kPointerSize;
1797 if (max_pointer > marks_.size()) {
1798 // Clear the current bits.
1799 std::vector<bool>().swap(marks_);
1800 // Reallocate to right size.
1801 marks_.resize(max_pointer, false);
1802 }
1803
1804 HeapEntry* heap_entry = GetEntry(obj);
1805 int entry = heap_entry->index();
1806 if ((this->*extractor)(entry, obj)) {
1807 SetInternalReference(obj, entry,
1808 "map", obj->map(), HeapObject::kMapOffset);
1809 // Extract unvisited fields as hidden references and restore tags
1810 // of visited fields.
1811 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1812 obj->Iterate(&refs_extractor);
1813 }
1814
1815 if (!progress_->ProgressReport(false)) interrupted = true;
1816 }
1817 return interrupted;
1818}
1819
1820
1821bool V8HeapExplorer::IsEssentialObject(Object* object) {
1822 return object->IsHeapObject() && !object->IsOddball() &&
1823 object != heap_->empty_byte_array() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001824 object != heap_->empty_fixed_array() &&
1825 object != heap_->empty_descriptor_array() &&
1826 object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1827 object != heap_->global_property_cell_map() &&
1828 object != heap_->shared_function_info_map() &&
1829 object != heap_->free_space_map() &&
1830 object != heap_->one_pointer_filler_map() &&
1831 object != heap_->two_pointer_filler_map();
1832}
1833
1834
1835void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1836 int parent_entry,
1837 String* reference_name,
1838 Object* child_obj,
1839 int field_offset) {
1840 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1841 HeapEntry* child_entry = GetEntry(child_obj);
1842 if (child_entry != NULL) {
1843 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1844 parent_entry,
1845 names_->GetName(reference_name),
1846 child_entry);
1847 MarkVisitedField(parent_obj, field_offset);
1848 }
1849}
1850
1851
1852void V8HeapExplorer::MarkVisitedField(HeapObject* obj, int offset) {
1853 if (offset < 0) return;
1854 int index = offset / kPointerSize;
1855 DCHECK(!marks_[index]);
1856 marks_[index] = true;
1857}
1858
1859
1860void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1861 int parent_entry,
1862 const char* reference_name,
1863 Object* child_obj) {
1864 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1865 HeapEntry* child_entry = GetEntry(child_obj);
1866 if (child_entry != NULL) {
1867 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1868 parent_entry,
1869 reference_name,
1870 child_entry);
1871 }
1872}
1873
1874
1875void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1876 int parent_entry,
1877 int index,
1878 Object* child_obj) {
1879 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1880 HeapEntry* child_entry = GetEntry(child_obj);
1881 if (child_entry != NULL) {
1882 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1883 parent_entry,
1884 index,
1885 child_entry);
1886 }
1887}
1888
1889
1890void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1891 int parent_entry,
1892 const char* reference_name,
1893 Object* child_obj,
1894 int field_offset) {
1895 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1896 HeapEntry* child_entry = GetEntry(child_obj);
1897 if (child_entry == NULL) return;
1898 if (IsEssentialObject(child_obj)) {
1899 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1900 parent_entry,
1901 reference_name,
1902 child_entry);
1903 }
1904 MarkVisitedField(parent_obj, field_offset);
1905}
1906
1907
1908void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1909 int parent_entry,
1910 int index,
1911 Object* child_obj,
1912 int field_offset) {
1913 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1914 HeapEntry* child_entry = GetEntry(child_obj);
1915 if (child_entry == NULL) return;
1916 if (IsEssentialObject(child_obj)) {
1917 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1918 parent_entry,
1919 names_->GetName(index),
1920 child_entry);
1921 }
1922 MarkVisitedField(parent_obj, field_offset);
1923}
1924
1925
1926void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1927 int parent_entry,
1928 int index,
1929 Object* child_obj) {
1930 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1931 HeapEntry* child_entry = GetEntry(child_obj);
1932 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1933 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1934 parent_entry,
1935 index,
1936 child_entry);
1937 }
1938}
1939
1940
1941void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1942 int parent_entry,
1943 const char* reference_name,
1944 Object* child_obj,
1945 int field_offset) {
1946 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1947 HeapEntry* child_entry = GetEntry(child_obj);
1948 if (child_entry == NULL) return;
1949 if (IsEssentialObject(child_obj)) {
1950 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1951 parent_entry,
1952 reference_name,
1953 child_entry);
1954 }
1955 MarkVisitedField(parent_obj, field_offset);
1956}
1957
1958
1959void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1960 int parent_entry,
1961 int index,
1962 Object* child_obj,
1963 int field_offset) {
1964 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1965 HeapEntry* child_entry = GetEntry(child_obj);
1966 if (child_entry == NULL) return;
1967 if (IsEssentialObject(child_obj)) {
1968 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1969 parent_entry,
1970 names_->GetFormatted("%d", index),
1971 child_entry);
1972 }
1973 MarkVisitedField(parent_obj, field_offset);
1974}
1975
1976
1977void V8HeapExplorer::SetDataOrAccessorPropertyReference(
1978 PropertyKind kind, JSObject* parent_obj, int parent_entry,
1979 Name* reference_name, Object* child_obj, const char* name_format_string,
1980 int field_offset) {
1981 if (kind == kAccessor) {
1982 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
1983 child_obj, field_offset);
1984 } else {
1985 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
1986 name_format_string, field_offset);
1987 }
1988}
1989
1990
1991void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1992 int parent_entry,
1993 Name* reference_name,
1994 Object* child_obj,
1995 const char* name_format_string,
1996 int field_offset) {
1997 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1998 HeapEntry* child_entry = GetEntry(child_obj);
1999 if (child_entry != NULL) {
2000 HeapGraphEdge::Type type =
2001 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2002 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2003 const char* name = name_format_string != NULL && reference_name->IsString()
2004 ? names_->GetFormatted(
2005 name_format_string,
2006 String::cast(reference_name)->ToCString(
2007 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2008 names_->GetName(reference_name);
2009
2010 filler_->SetNamedReference(type,
2011 parent_entry,
2012 name,
2013 child_entry);
2014 MarkVisitedField(parent_obj, field_offset);
2015 }
2016}
2017
2018
2019void V8HeapExplorer::SetRootGcRootsReference() {
2020 filler_->SetIndexedAutoIndexReference(
2021 HeapGraphEdge::kElement,
2022 snapshot_->root()->index(),
2023 snapshot_->gc_roots());
2024}
2025
2026
2027void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2028 HeapEntry* child_entry = GetEntry(child_obj);
2029 DCHECK(child_entry != NULL);
2030 filler_->SetNamedAutoIndexReference(
2031 HeapGraphEdge::kShortcut,
2032 snapshot_->root()->index(),
2033 child_entry);
2034}
2035
2036
2037void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2038 filler_->SetIndexedAutoIndexReference(
2039 HeapGraphEdge::kElement,
2040 snapshot_->gc_roots()->index(),
2041 snapshot_->gc_subroot(tag));
2042}
2043
2044
2045void V8HeapExplorer::SetGcSubrootReference(
2046 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2047 HeapEntry* child_entry = GetEntry(child_obj);
2048 if (child_entry != NULL) {
2049 const char* name = GetStrongGcSubrootName(child_obj);
2050 if (name != NULL) {
2051 filler_->SetNamedReference(
2052 HeapGraphEdge::kInternal,
2053 snapshot_->gc_subroot(tag)->index(),
2054 name,
2055 child_entry);
2056 } else {
2057 if (is_weak) {
2058 filler_->SetNamedAutoIndexReference(
2059 HeapGraphEdge::kWeak,
2060 snapshot_->gc_subroot(tag)->index(),
2061 child_entry);
2062 } else {
2063 filler_->SetIndexedAutoIndexReference(
2064 HeapGraphEdge::kElement,
2065 snapshot_->gc_subroot(tag)->index(),
2066 child_entry);
2067 }
2068 }
2069
2070 // Add a shortcut to JS global object reference at snapshot root.
2071 if (child_obj->IsNativeContext()) {
2072 Context* context = Context::cast(child_obj);
2073 JSGlobalObject* global = context->global_object();
2074 if (global->IsJSGlobalObject()) {
2075 bool is_debug_object = false;
2076 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2077 if (!is_debug_object && !user_roots_.Contains(global)) {
2078 user_roots_.Insert(global);
2079 SetUserGlobalReference(global);
2080 }
2081 }
2082 }
2083 }
2084}
2085
2086
2087const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2088 if (strong_gc_subroot_names_.is_empty()) {
2089#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2090#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2091 STRONG_ROOT_LIST(ROOT_NAME)
2092#undef ROOT_NAME
2093#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2094 STRUCT_LIST(STRUCT_MAP_NAME)
2095#undef STRUCT_MAP_NAME
2096#define STRING_NAME(name, str) NAME_ENTRY(name)
2097 INTERNALIZED_STRING_LIST(STRING_NAME)
2098#undef STRING_NAME
2099#define SYMBOL_NAME(name) NAME_ENTRY(name)
2100 PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2101#undef SYMBOL_NAME
2102#define SYMBOL_NAME(name, description) NAME_ENTRY(name)
2103 PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2104 WELL_KNOWN_SYMBOL_LIST(SYMBOL_NAME)
2105#undef SYMBOL_NAME
2106#undef NAME_ENTRY
2107 CHECK(!strong_gc_subroot_names_.is_empty());
2108 }
2109 return strong_gc_subroot_names_.GetTag(object);
2110}
2111
2112
2113void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2114 if (IsEssentialObject(obj)) {
2115 HeapEntry* entry = GetEntry(obj);
2116 if (entry->name()[0] == '\0') {
2117 entry->set_name(tag);
2118 }
2119 }
2120}
2121
2122
2123void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2124 if (IsEssentialObject(object) && object->IsFixedArray()) {
2125 weak_containers_.Insert(object);
2126 }
2127}
2128
2129
2130class GlobalObjectsEnumerator : public ObjectVisitor {
2131 public:
2132 void VisitPointers(Object** start, Object** end) override {
2133 for (Object** p = start; p < end; p++) {
2134 if ((*p)->IsNativeContext()) {
2135 Context* context = Context::cast(*p);
2136 JSObject* proxy = context->global_proxy();
2137 if (proxy->IsJSGlobalProxy()) {
2138 Object* global = proxy->map()->prototype();
2139 if (global->IsJSGlobalObject()) {
2140 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2141 }
2142 }
2143 }
2144 }
2145 }
2146 int count() { return objects_.length(); }
2147 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2148
2149 private:
2150 List<Handle<JSGlobalObject> > objects_;
2151};
2152
2153
2154// Modifies heap. Must not be run during heap traversal.
2155void V8HeapExplorer::TagGlobalObjects() {
2156 Isolate* isolate = heap_->isolate();
2157 HandleScope scope(isolate);
2158 GlobalObjectsEnumerator enumerator;
2159 isolate->global_handles()->IterateAllRoots(&enumerator);
2160 const char** urls = NewArray<const char*>(enumerator.count());
2161 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2162 if (global_object_name_resolver_) {
2163 HandleScope scope(isolate);
2164 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2165 urls[i] = global_object_name_resolver_->GetName(
2166 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2167 } else {
2168 urls[i] = NULL;
2169 }
2170 }
2171
2172 DisallowHeapAllocation no_allocation;
2173 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2174 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2175 }
2176
2177 DeleteArray(urls);
2178}
2179
2180
2181class GlobalHandlesExtractor : public ObjectVisitor {
2182 public:
2183 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2184 : explorer_(explorer) {}
2185 ~GlobalHandlesExtractor() override {}
2186 void VisitPointers(Object** start, Object** end) override { UNREACHABLE(); }
2187 void VisitEmbedderReference(Object** p, uint16_t class_id) override {
2188 explorer_->VisitSubtreeWrapper(p, class_id);
2189 }
2190 private:
2191 NativeObjectsExplorer* explorer_;
2192};
2193
2194
2195class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2196 public:
2197 BasicHeapEntriesAllocator(
2198 HeapSnapshot* snapshot,
2199 HeapEntry::Type entries_type)
2200 : snapshot_(snapshot),
2201 names_(snapshot_->profiler()->names()),
2202 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2203 entries_type_(entries_type) {
2204 }
2205 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2206 private:
2207 HeapSnapshot* snapshot_;
2208 StringsStorage* names_;
2209 HeapObjectsMap* heap_object_map_;
2210 HeapEntry::Type entries_type_;
2211};
2212
2213
2214HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2215 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2216 intptr_t elements = info->GetElementCount();
2217 intptr_t size = info->GetSizeInBytes();
2218 const char* name = elements != -1
Ben Murdochc5610432016-08-08 18:44:38 +01002219 ? names_->GetFormatted("%s / %" V8PRIdPTR " entries",
2220 info->GetLabel(), elements)
2221 : names_->GetCopy(info->GetLabel());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002222 return snapshot_->AddEntry(
2223 entries_type_,
2224 name,
2225 heap_object_map_->GenerateId(info),
2226 size != -1 ? static_cast<int>(size) : 0,
2227 0);
2228}
2229
2230
2231NativeObjectsExplorer::NativeObjectsExplorer(
2232 HeapSnapshot* snapshot,
2233 SnapshottingProgressReportingInterface* progress)
2234 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2235 snapshot_(snapshot),
2236 names_(snapshot_->profiler()->names()),
2237 embedder_queried_(false),
2238 objects_by_info_(RetainedInfosMatch),
2239 native_groups_(StringsMatch),
2240 filler_(NULL) {
2241 synthetic_entries_allocator_ =
2242 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2243 native_entries_allocator_ =
2244 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2245}
2246
2247
2248NativeObjectsExplorer::~NativeObjectsExplorer() {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002249 for (base::HashMap::Entry* p = objects_by_info_.Start(); p != NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002250 p = objects_by_info_.Next(p)) {
2251 v8::RetainedObjectInfo* info =
2252 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2253 info->Dispose();
2254 List<HeapObject*>* objects =
2255 reinterpret_cast<List<HeapObject*>* >(p->value);
2256 delete objects;
2257 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01002258 for (base::HashMap::Entry* p = native_groups_.Start(); p != NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 p = native_groups_.Next(p)) {
2260 v8::RetainedObjectInfo* info =
2261 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2262 info->Dispose();
2263 }
2264 delete synthetic_entries_allocator_;
2265 delete native_entries_allocator_;
2266}
2267
2268
2269int NativeObjectsExplorer::EstimateObjectsCount() {
2270 FillRetainedObjects();
2271 return objects_by_info_.occupancy();
2272}
2273
2274
2275void NativeObjectsExplorer::FillRetainedObjects() {
2276 if (embedder_queried_) return;
2277 Isolate* isolate = isolate_;
2278 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2279 // Record objects that are joined into ObjectGroups.
2280 isolate->heap()->CallGCPrologueCallbacks(
2281 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2282 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2283 for (int i = 0; i < groups->length(); ++i) {
2284 ObjectGroup* group = groups->at(i);
2285 if (group->info == NULL) continue;
2286 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2287 for (size_t j = 0; j < group->length; ++j) {
2288 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2289 list->Add(obj);
2290 in_groups_.Insert(obj);
2291 }
2292 group->info = NULL; // Acquire info object ownership.
2293 }
2294 isolate->global_handles()->RemoveObjectGroups();
2295 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2296 // Record objects that are not in ObjectGroups, but have class ID.
2297 GlobalHandlesExtractor extractor(this);
2298 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2299 embedder_queried_ = true;
2300}
2301
2302
2303void NativeObjectsExplorer::FillImplicitReferences() {
2304 Isolate* isolate = isolate_;
2305 List<ImplicitRefGroup*>* groups =
2306 isolate->global_handles()->implicit_ref_groups();
2307 for (int i = 0; i < groups->length(); ++i) {
2308 ImplicitRefGroup* group = groups->at(i);
2309 HeapObject* parent = *group->parent;
2310 int parent_entry =
2311 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2312 DCHECK(parent_entry != HeapEntry::kNoEntry);
2313 Object*** children = group->children;
2314 for (size_t j = 0; j < group->length; ++j) {
2315 Object* child = *children[j];
2316 HeapEntry* child_entry =
2317 filler_->FindOrAddEntry(child, native_entries_allocator_);
2318 filler_->SetNamedReference(
2319 HeapGraphEdge::kInternal,
2320 parent_entry,
2321 "native",
2322 child_entry);
2323 }
2324 }
2325 isolate->global_handles()->RemoveImplicitRefGroups();
2326}
2327
2328List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2329 v8::RetainedObjectInfo* info) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002330 base::HashMap::Entry* entry =
2331 objects_by_info_.LookupOrInsert(info, InfoHash(info));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 if (entry->value != NULL) {
2333 info->Dispose();
2334 } else {
2335 entry->value = new List<HeapObject*>(4);
2336 }
2337 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2338}
2339
2340
2341bool NativeObjectsExplorer::IterateAndExtractReferences(
2342 SnapshotFiller* filler) {
2343 filler_ = filler;
2344 FillRetainedObjects();
2345 FillImplicitReferences();
2346 if (EstimateObjectsCount() > 0) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002347 for (base::HashMap::Entry* p = objects_by_info_.Start(); p != NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002348 p = objects_by_info_.Next(p)) {
2349 v8::RetainedObjectInfo* info =
2350 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2351 SetNativeRootReference(info);
2352 List<HeapObject*>* objects =
2353 reinterpret_cast<List<HeapObject*>* >(p->value);
2354 for (int i = 0; i < objects->length(); ++i) {
2355 SetWrapperNativeReferences(objects->at(i), info);
2356 }
2357 }
2358 SetRootNativeRootsReference();
2359 }
2360 filler_ = NULL;
2361 return true;
2362}
2363
2364
2365class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2366 public:
2367 explicit NativeGroupRetainedObjectInfo(const char* label)
2368 : disposed_(false),
2369 hash_(reinterpret_cast<intptr_t>(label)),
2370 label_(label) {
2371 }
2372
2373 virtual ~NativeGroupRetainedObjectInfo() {}
2374 virtual void Dispose() {
2375 CHECK(!disposed_);
2376 disposed_ = true;
2377 delete this;
2378 }
2379 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2380 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2381 }
2382 virtual intptr_t GetHash() { return hash_; }
2383 virtual const char* GetLabel() { return label_; }
2384
2385 private:
2386 bool disposed_;
2387 intptr_t hash_;
2388 const char* label_;
2389};
2390
2391
2392NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2393 const char* label) {
2394 const char* label_copy = names_->GetCopy(label);
2395 uint32_t hash = StringHasher::HashSequentialString(
2396 label_copy,
2397 static_cast<int>(strlen(label_copy)),
2398 isolate_->heap()->HashSeed());
Ben Murdoch61f157c2016-09-16 13:49:30 +01002399 base::HashMap::Entry* entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2401 if (entry->value == NULL) {
2402 entry->value = new NativeGroupRetainedObjectInfo(label);
2403 }
2404 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2405}
2406
2407
2408void NativeObjectsExplorer::SetNativeRootReference(
2409 v8::RetainedObjectInfo* info) {
2410 HeapEntry* child_entry =
2411 filler_->FindOrAddEntry(info, native_entries_allocator_);
2412 DCHECK(child_entry != NULL);
2413 NativeGroupRetainedObjectInfo* group_info =
2414 FindOrAddGroupInfo(info->GetGroupLabel());
2415 HeapEntry* group_entry =
2416 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2417 // |FindOrAddEntry| can move and resize the entries backing store. Reload
2418 // potentially-stale pointer.
2419 child_entry = filler_->FindEntry(info);
2420 filler_->SetNamedAutoIndexReference(
2421 HeapGraphEdge::kInternal,
2422 group_entry->index(),
2423 child_entry);
2424}
2425
2426
2427void NativeObjectsExplorer::SetWrapperNativeReferences(
2428 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2429 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2430 DCHECK(wrapper_entry != NULL);
2431 HeapEntry* info_entry =
2432 filler_->FindOrAddEntry(info, native_entries_allocator_);
2433 DCHECK(info_entry != NULL);
2434 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2435 wrapper_entry->index(),
2436 "native",
2437 info_entry);
2438 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2439 info_entry->index(),
2440 wrapper_entry);
2441}
2442
2443
2444void NativeObjectsExplorer::SetRootNativeRootsReference() {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002445 for (base::HashMap::Entry* entry = native_groups_.Start(); entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 entry = native_groups_.Next(entry)) {
2447 NativeGroupRetainedObjectInfo* group_info =
2448 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2449 HeapEntry* group_entry =
2450 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2451 DCHECK(group_entry != NULL);
2452 filler_->SetIndexedAutoIndexReference(
2453 HeapGraphEdge::kElement,
2454 snapshot_->root()->index(),
2455 group_entry);
2456 }
2457}
2458
2459
2460void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2461 if (in_groups_.Contains(*p)) return;
2462 Isolate* isolate = isolate_;
2463 v8::RetainedObjectInfo* info =
2464 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2465 if (info == NULL) return;
2466 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2467}
2468
2469
2470HeapSnapshotGenerator::HeapSnapshotGenerator(
2471 HeapSnapshot* snapshot,
2472 v8::ActivityControl* control,
2473 v8::HeapProfiler::ObjectNameResolver* resolver,
2474 Heap* heap)
2475 : snapshot_(snapshot),
2476 control_(control),
2477 v8_heap_explorer_(snapshot_, this, resolver),
2478 dom_explorer_(snapshot_, this),
2479 heap_(heap) {
2480}
2481
2482
2483bool HeapSnapshotGenerator::GenerateSnapshot() {
2484 v8_heap_explorer_.TagGlobalObjects();
2485
2486 // TODO(1562) Profiler assumes that any object that is in the heap after
2487 // full GC is reachable from the root when computing dominators.
2488 // This is not true for weakly reachable objects.
2489 // As a temporary solution we call GC twice.
2490 heap_->CollectAllGarbage(
2491 Heap::kMakeHeapIterableMask,
2492 "HeapSnapshotGenerator::GenerateSnapshot");
2493 heap_->CollectAllGarbage(
2494 Heap::kMakeHeapIterableMask,
2495 "HeapSnapshotGenerator::GenerateSnapshot");
2496
2497#ifdef VERIFY_HEAP
2498 Heap* debug_heap = heap_;
2499 if (FLAG_verify_heap) {
2500 debug_heap->Verify();
2501 }
2502#endif
2503
2504 SetProgressTotal(2); // 2 passes.
2505
2506#ifdef VERIFY_HEAP
2507 if (FLAG_verify_heap) {
2508 debug_heap->Verify();
2509 }
2510#endif
2511
2512 snapshot_->AddSyntheticRootEntries();
2513
2514 if (!FillReferences()) return false;
2515
2516 snapshot_->FillChildren();
2517 snapshot_->RememberLastJSObjectId();
2518
2519 progress_counter_ = progress_total_;
2520 if (!ProgressReport(true)) return false;
2521 return true;
2522}
2523
2524
2525void HeapSnapshotGenerator::ProgressStep() {
2526 ++progress_counter_;
2527}
2528
2529
2530bool HeapSnapshotGenerator::ProgressReport(bool force) {
2531 const int kProgressReportGranularity = 10000;
2532 if (control_ != NULL
2533 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2534 return
2535 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2536 v8::ActivityControl::kContinue;
2537 }
2538 return true;
2539}
2540
2541
2542void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2543 if (control_ == NULL) return;
2544 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2545 progress_total_ = iterations_count * (
2546 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2547 dom_explorer_.EstimateObjectsCount());
2548 progress_counter_ = 0;
2549}
2550
2551
2552bool HeapSnapshotGenerator::FillReferences() {
2553 SnapshotFiller filler(snapshot_, &entries_);
2554 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2555 && dom_explorer_.IterateAndExtractReferences(&filler);
2556}
2557
2558
2559template<int bytes> struct MaxDecimalDigitsIn;
2560template<> struct MaxDecimalDigitsIn<4> {
2561 static const int kSigned = 11;
2562 static const int kUnsigned = 10;
2563};
2564template<> struct MaxDecimalDigitsIn<8> {
2565 static const int kSigned = 20;
2566 static const int kUnsigned = 20;
2567};
2568
2569
2570class OutputStreamWriter {
2571 public:
2572 explicit OutputStreamWriter(v8::OutputStream* stream)
2573 : stream_(stream),
2574 chunk_size_(stream->GetChunkSize()),
2575 chunk_(chunk_size_),
2576 chunk_pos_(0),
2577 aborted_(false) {
2578 DCHECK(chunk_size_ > 0);
2579 }
2580 bool aborted() { return aborted_; }
2581 void AddCharacter(char c) {
2582 DCHECK(c != '\0');
2583 DCHECK(chunk_pos_ < chunk_size_);
2584 chunk_[chunk_pos_++] = c;
2585 MaybeWriteChunk();
2586 }
2587 void AddString(const char* s) {
2588 AddSubstring(s, StrLength(s));
2589 }
2590 void AddSubstring(const char* s, int n) {
2591 if (n <= 0) return;
2592 DCHECK(static_cast<size_t>(n) <= strlen(s));
2593 const char* s_end = s + n;
2594 while (s < s_end) {
2595 int s_chunk_size =
2596 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2597 DCHECK(s_chunk_size > 0);
2598 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2599 s += s_chunk_size;
2600 chunk_pos_ += s_chunk_size;
2601 MaybeWriteChunk();
2602 }
2603 }
2604 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2605 void Finalize() {
2606 if (aborted_) return;
2607 DCHECK(chunk_pos_ < chunk_size_);
2608 if (chunk_pos_ != 0) {
2609 WriteChunk();
2610 }
2611 stream_->EndOfStream();
2612 }
2613
2614 private:
2615 template<typename T>
2616 void AddNumberImpl(T n, const char* format) {
2617 // Buffer for the longest value plus trailing \0
2618 static const int kMaxNumberSize =
2619 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2620 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2621 int result = SNPrintF(
2622 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2623 DCHECK(result != -1);
2624 chunk_pos_ += result;
2625 MaybeWriteChunk();
2626 } else {
2627 EmbeddedVector<char, kMaxNumberSize> buffer;
2628 int result = SNPrintF(buffer, format, n);
2629 USE(result);
2630 DCHECK(result != -1);
2631 AddString(buffer.start());
2632 }
2633 }
2634 void MaybeWriteChunk() {
2635 DCHECK(chunk_pos_ <= chunk_size_);
2636 if (chunk_pos_ == chunk_size_) {
2637 WriteChunk();
2638 }
2639 }
2640 void WriteChunk() {
2641 if (aborted_) return;
2642 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2643 v8::OutputStream::kAbort) aborted_ = true;
2644 chunk_pos_ = 0;
2645 }
2646
2647 v8::OutputStream* stream_;
2648 int chunk_size_;
2649 ScopedVector<char> chunk_;
2650 int chunk_pos_;
2651 bool aborted_;
2652};
2653
2654
2655// type, name|index, to_node.
2656const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2657// type, name, id, self_size, edge_count, trace_node_id.
2658const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2659
2660void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2661 if (AllocationTracker* allocation_tracker =
2662 snapshot_->profiler()->allocation_tracker()) {
2663 allocation_tracker->PrepareForSerialization();
2664 }
2665 DCHECK(writer_ == NULL);
2666 writer_ = new OutputStreamWriter(stream);
2667 SerializeImpl();
2668 delete writer_;
2669 writer_ = NULL;
2670}
2671
2672
2673void HeapSnapshotJSONSerializer::SerializeImpl() {
2674 DCHECK(0 == snapshot_->root()->index());
2675 writer_->AddCharacter('{');
2676 writer_->AddString("\"snapshot\":{");
2677 SerializeSnapshot();
2678 if (writer_->aborted()) return;
2679 writer_->AddString("},\n");
2680 writer_->AddString("\"nodes\":[");
2681 SerializeNodes();
2682 if (writer_->aborted()) return;
2683 writer_->AddString("],\n");
2684 writer_->AddString("\"edges\":[");
2685 SerializeEdges();
2686 if (writer_->aborted()) return;
2687 writer_->AddString("],\n");
2688
2689 writer_->AddString("\"trace_function_infos\":[");
2690 SerializeTraceNodeInfos();
2691 if (writer_->aborted()) return;
2692 writer_->AddString("],\n");
2693 writer_->AddString("\"trace_tree\":[");
2694 SerializeTraceTree();
2695 if (writer_->aborted()) return;
2696 writer_->AddString("],\n");
2697
2698 writer_->AddString("\"samples\":[");
2699 SerializeSamples();
2700 if (writer_->aborted()) return;
2701 writer_->AddString("],\n");
2702
2703 writer_->AddString("\"strings\":[");
2704 SerializeStrings();
2705 if (writer_->aborted()) return;
2706 writer_->AddCharacter(']');
2707 writer_->AddCharacter('}');
2708 writer_->Finalize();
2709}
2710
2711
2712int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002713 base::HashMap::Entry* cache_entry =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002714 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2715 if (cache_entry->value == NULL) {
2716 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2717 }
2718 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2719}
2720
2721
2722namespace {
2723
2724template<size_t size> struct ToUnsigned;
2725
2726template<> struct ToUnsigned<4> {
2727 typedef uint32_t Type;
2728};
2729
2730template<> struct ToUnsigned<8> {
2731 typedef uint64_t Type;
2732};
2733
2734} // namespace
2735
2736
2737template<typename T>
2738static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2739 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2740 int number_of_digits = 0;
2741 T t = value;
2742 do {
2743 ++number_of_digits;
2744 } while (t /= 10);
2745
2746 buffer_pos += number_of_digits;
2747 int result = buffer_pos;
2748 do {
2749 int last_digit = static_cast<int>(value % 10);
2750 buffer[--buffer_pos] = '0' + last_digit;
2751 value /= 10;
2752 } while (value);
2753 return result;
2754}
2755
2756
2757template<typename T>
2758static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2759 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2760 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2761 return utoa_impl(unsigned_value, buffer, buffer_pos);
2762}
2763
2764
2765void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2766 bool first_edge) {
2767 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2768 static const int kBufferSize =
2769 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2770 EmbeddedVector<char, kBufferSize> buffer;
2771 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2772 || edge->type() == HeapGraphEdge::kHidden
2773 ? edge->index() : GetStringId(edge->name());
2774 int buffer_pos = 0;
2775 if (!first_edge) {
2776 buffer[buffer_pos++] = ',';
2777 }
2778 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2779 buffer[buffer_pos++] = ',';
2780 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2781 buffer[buffer_pos++] = ',';
2782 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2783 buffer[buffer_pos++] = '\n';
2784 buffer[buffer_pos++] = '\0';
2785 writer_->AddString(buffer.start());
2786}
2787
2788
2789void HeapSnapshotJSONSerializer::SerializeEdges() {
2790 List<HeapGraphEdge*>& edges = snapshot_->children();
2791 for (int i = 0; i < edges.length(); ++i) {
2792 DCHECK(i == 0 ||
2793 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2794 SerializeEdge(edges[i], i == 0);
2795 if (writer_->aborted()) return;
2796 }
2797}
2798
2799
2800void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2801 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2802 static const int kBufferSize =
2803 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2804 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2805 + 6 + 1 + 1;
2806 EmbeddedVector<char, kBufferSize> buffer;
2807 int buffer_pos = 0;
2808 if (entry_index(entry) != 0) {
2809 buffer[buffer_pos++] = ',';
2810 }
2811 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2812 buffer[buffer_pos++] = ',';
2813 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2814 buffer[buffer_pos++] = ',';
2815 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2816 buffer[buffer_pos++] = ',';
2817 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2818 buffer[buffer_pos++] = ',';
2819 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2820 buffer[buffer_pos++] = ',';
2821 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2822 buffer[buffer_pos++] = '\n';
2823 buffer[buffer_pos++] = '\0';
2824 writer_->AddString(buffer.start());
2825}
2826
2827
2828void HeapSnapshotJSONSerializer::SerializeNodes() {
2829 List<HeapEntry>& entries = snapshot_->entries();
2830 for (int i = 0; i < entries.length(); ++i) {
2831 SerializeNode(&entries[i]);
2832 if (writer_->aborted()) return;
2833 }
2834}
2835
2836
2837void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2838 writer_->AddString("\"meta\":");
2839 // The object describing node serialization layout.
2840 // We use a set of macros to improve readability.
2841#define JSON_A(s) "[" s "]"
2842#define JSON_O(s) "{" s "}"
2843#define JSON_S(s) "\"" s "\""
2844 writer_->AddString(JSON_O(
2845 JSON_S("node_fields") ":" JSON_A(
2846 JSON_S("type") ","
2847 JSON_S("name") ","
2848 JSON_S("id") ","
2849 JSON_S("self_size") ","
2850 JSON_S("edge_count") ","
2851 JSON_S("trace_node_id")) ","
2852 JSON_S("node_types") ":" JSON_A(
2853 JSON_A(
2854 JSON_S("hidden") ","
2855 JSON_S("array") ","
2856 JSON_S("string") ","
2857 JSON_S("object") ","
2858 JSON_S("code") ","
2859 JSON_S("closure") ","
2860 JSON_S("regexp") ","
2861 JSON_S("number") ","
2862 JSON_S("native") ","
2863 JSON_S("synthetic") ","
2864 JSON_S("concatenated string") ","
2865 JSON_S("sliced string")) ","
2866 JSON_S("string") ","
2867 JSON_S("number") ","
2868 JSON_S("number") ","
2869 JSON_S("number") ","
2870 JSON_S("number") ","
2871 JSON_S("number")) ","
2872 JSON_S("edge_fields") ":" JSON_A(
2873 JSON_S("type") ","
2874 JSON_S("name_or_index") ","
2875 JSON_S("to_node")) ","
2876 JSON_S("edge_types") ":" JSON_A(
2877 JSON_A(
2878 JSON_S("context") ","
2879 JSON_S("element") ","
2880 JSON_S("property") ","
2881 JSON_S("internal") ","
2882 JSON_S("hidden") ","
2883 JSON_S("shortcut") ","
2884 JSON_S("weak")) ","
2885 JSON_S("string_or_number") ","
2886 JSON_S("node")) ","
2887 JSON_S("trace_function_info_fields") ":" JSON_A(
2888 JSON_S("function_id") ","
2889 JSON_S("name") ","
2890 JSON_S("script_name") ","
2891 JSON_S("script_id") ","
2892 JSON_S("line") ","
2893 JSON_S("column")) ","
2894 JSON_S("trace_node_fields") ":" JSON_A(
2895 JSON_S("id") ","
2896 JSON_S("function_info_index") ","
2897 JSON_S("count") ","
2898 JSON_S("size") ","
2899 JSON_S("children")) ","
2900 JSON_S("sample_fields") ":" JSON_A(
2901 JSON_S("timestamp_us") ","
2902 JSON_S("last_assigned_id"))));
2903#undef JSON_S
2904#undef JSON_O
2905#undef JSON_A
2906 writer_->AddString(",\"node_count\":");
2907 writer_->AddNumber(snapshot_->entries().length());
2908 writer_->AddString(",\"edge_count\":");
2909 writer_->AddNumber(snapshot_->edges().length());
2910 writer_->AddString(",\"trace_function_count\":");
2911 uint32_t count = 0;
2912 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2913 if (tracker) {
2914 count = tracker->function_info_list().length();
2915 }
2916 writer_->AddNumber(count);
2917}
2918
2919
2920static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2921 static const char hex_chars[] = "0123456789ABCDEF";
2922 w->AddString("\\u");
2923 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2924 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2925 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2926 w->AddCharacter(hex_chars[u & 0xf]);
2927}
2928
2929
2930void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2931 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2932 if (!tracker) return;
2933 AllocationTraceTree* traces = tracker->trace_tree();
2934 SerializeTraceNode(traces->root());
2935}
2936
2937
2938void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2939 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2940 const int kBufferSize =
2941 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2942 + 4 + 1 + 1;
2943 EmbeddedVector<char, kBufferSize> buffer;
2944 int buffer_pos = 0;
2945 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2946 buffer[buffer_pos++] = ',';
2947 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2948 buffer[buffer_pos++] = ',';
2949 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2950 buffer[buffer_pos++] = ',';
2951 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2952 buffer[buffer_pos++] = ',';
2953 buffer[buffer_pos++] = '[';
2954 buffer[buffer_pos++] = '\0';
2955 writer_->AddString(buffer.start());
2956
2957 Vector<AllocationTraceNode*> children = node->children();
2958 for (int i = 0; i < children.length(); i++) {
2959 if (i > 0) {
2960 writer_->AddCharacter(',');
2961 }
2962 SerializeTraceNode(children[i]);
2963 }
2964 writer_->AddCharacter(']');
2965}
2966
2967
2968// 0-based position is converted to 1-based during the serialization.
2969static int SerializePosition(int position, const Vector<char>& buffer,
2970 int buffer_pos) {
2971 if (position == -1) {
2972 buffer[buffer_pos++] = '0';
2973 } else {
2974 DCHECK(position >= 0);
2975 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2976 }
2977 return buffer_pos;
2978}
2979
2980
2981void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2982 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2983 if (!tracker) return;
2984 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2985 const int kBufferSize =
2986 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2987 + 6 + 1 + 1;
2988 EmbeddedVector<char, kBufferSize> buffer;
2989 const List<AllocationTracker::FunctionInfo*>& list =
2990 tracker->function_info_list();
2991 for (int i = 0; i < list.length(); i++) {
2992 AllocationTracker::FunctionInfo* info = list[i];
2993 int buffer_pos = 0;
2994 if (i > 0) {
2995 buffer[buffer_pos++] = ',';
2996 }
2997 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
2998 buffer[buffer_pos++] = ',';
2999 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3000 buffer[buffer_pos++] = ',';
3001 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3002 buffer[buffer_pos++] = ',';
3003 // The cast is safe because script id is a non-negative Smi.
3004 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3005 buffer_pos);
3006 buffer[buffer_pos++] = ',';
3007 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3008 buffer[buffer_pos++] = ',';
3009 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3010 buffer[buffer_pos++] = '\n';
3011 buffer[buffer_pos++] = '\0';
3012 writer_->AddString(buffer.start());
3013 }
3014}
3015
3016
3017void HeapSnapshotJSONSerializer::SerializeSamples() {
3018 const List<HeapObjectsMap::TimeInterval>& samples =
3019 snapshot_->profiler()->heap_object_map()->samples();
3020 if (samples.is_empty()) return;
3021 base::TimeTicks start_time = samples[0].timestamp;
3022 // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3023 const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3024 base::TimeDelta().InMicroseconds())>::kUnsigned +
3025 MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3026 2 + 1 + 1;
3027 EmbeddedVector<char, kBufferSize> buffer;
3028 for (int i = 0; i < samples.length(); i++) {
3029 HeapObjectsMap::TimeInterval& sample = samples[i];
3030 int buffer_pos = 0;
3031 if (i > 0) {
3032 buffer[buffer_pos++] = ',';
3033 }
3034 base::TimeDelta time_delta = sample.timestamp - start_time;
3035 buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3036 buffer[buffer_pos++] = ',';
3037 buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3038 buffer[buffer_pos++] = '\n';
3039 buffer[buffer_pos++] = '\0';
3040 writer_->AddString(buffer.start());
3041 }
3042}
3043
3044
3045void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3046 writer_->AddCharacter('\n');
3047 writer_->AddCharacter('\"');
3048 for ( ; *s != '\0'; ++s) {
3049 switch (*s) {
3050 case '\b':
3051 writer_->AddString("\\b");
3052 continue;
3053 case '\f':
3054 writer_->AddString("\\f");
3055 continue;
3056 case '\n':
3057 writer_->AddString("\\n");
3058 continue;
3059 case '\r':
3060 writer_->AddString("\\r");
3061 continue;
3062 case '\t':
3063 writer_->AddString("\\t");
3064 continue;
3065 case '\"':
3066 case '\\':
3067 writer_->AddCharacter('\\');
3068 writer_->AddCharacter(*s);
3069 continue;
3070 default:
3071 if (*s > 31 && *s < 128) {
3072 writer_->AddCharacter(*s);
3073 } else if (*s <= 31) {
3074 // Special character with no dedicated literal.
3075 WriteUChar(writer_, *s);
3076 } else {
3077 // Convert UTF-8 into \u UTF-16 literal.
3078 size_t length = 1, cursor = 0;
3079 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3080 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3081 if (c != unibrow::Utf8::kBadChar) {
3082 WriteUChar(writer_, c);
3083 DCHECK(cursor != 0);
3084 s += cursor - 1;
3085 } else {
3086 writer_->AddCharacter('?');
3087 }
3088 }
3089 }
3090 }
3091 writer_->AddCharacter('\"');
3092}
3093
3094
3095void HeapSnapshotJSONSerializer::SerializeStrings() {
3096 ScopedVector<const unsigned char*> sorted_strings(
3097 strings_.occupancy() + 1);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003098 for (base::HashMap::Entry* entry = strings_.Start(); entry != NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003099 entry = strings_.Next(entry)) {
3100 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3101 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3102 }
3103 writer_->AddString("\"<dummy>\"");
3104 for (int i = 1; i < sorted_strings.length(); ++i) {
3105 writer_->AddCharacter(',');
3106 SerializeString(sorted_strings[i]);
3107 if (writer_->aborted()) return;
3108 }
3109}
3110
3111
3112} // namespace internal
3113} // namespace v8