blob: 89a2e8a508aecd1d5ec010d54723442cd594a67d [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2009-2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_PROFILER_H_
29#define V8_HEAP_PROFILER_H_
30
Steve Block44f0eee2011-05-26 01:26:41 +010031#include "isolate.h"
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010032#include "zone-inl.h"
Steve Block6ded16b2010-05-10 14:33:55 +010033
Steve Blocka7e24c12009-10-30 11:49:00 +000034namespace v8 {
35namespace internal {
36
37#ifdef ENABLE_LOGGING_AND_PROFILING
38
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010039class HeapSnapshot;
40class HeapSnapshotsCollection;
41
Steve Block44f0eee2011-05-26 01:26:41 +010042#define HEAP_PROFILE(heap, call) \
43 do { \
44 v8::internal::HeapProfiler* profiler = heap->isolate()->heap_profiler(); \
45 if (profiler != NULL && profiler->is_profiling()) { \
46 profiler->call; \
47 } \
Ben Murdoch3bec4d22010-07-22 14:51:16 +010048 } while (false)
49#else
Steve Block44f0eee2011-05-26 01:26:41 +010050#define HEAP_PROFILE(heap, call) ((void) 0)
Ben Murdoch3bec4d22010-07-22 14:51:16 +010051#endif // ENABLE_LOGGING_AND_PROFILING
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010052
Steve Blocka7e24c12009-10-30 11:49:00 +000053// The HeapProfiler writes data to the log files, which can be postprocessed
54// to generate .hp files for use by the GHC/Valgrind tool hp2ps.
55class HeapProfiler {
56 public:
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010057 static void Setup();
58 static void TearDown();
59
60#ifdef ENABLE_LOGGING_AND_PROFILING
Ben Murdochb0fe1622011-05-05 13:52:32 +010061 static HeapSnapshot* TakeSnapshot(const char* name,
62 int type,
63 v8::ActivityControl* control);
64 static HeapSnapshot* TakeSnapshot(String* name,
65 int type,
66 v8::ActivityControl* control);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010067 static int GetSnapshotsCount();
68 static HeapSnapshot* GetSnapshot(int index);
69 static HeapSnapshot* FindSnapshot(unsigned uid);
Steve Block44f0eee2011-05-26 01:26:41 +010070 static void DeleteAllSnapshots();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010071
Steve Block44f0eee2011-05-26 01:26:41 +010072 void ObjectMoveEvent(Address from, Address to);
Ben Murdoch3bec4d22010-07-22 14:51:16 +010073
Steve Block44f0eee2011-05-26 01:26:41 +010074 void DefineWrapperClass(
75 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback);
76
77 v8::RetainedObjectInfo* ExecuteWrapperClassCallback(uint16_t class_id,
78 Object** wrapper);
79 INLINE(bool is_profiling()) {
80 return snapshots_->is_tracking_objects();
Ben Murdoch3bec4d22010-07-22 14:51:16 +010081 }
82
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010083 // Obsolete interface.
Steve Blocka7e24c12009-10-30 11:49:00 +000084 // Write a single heap sample to the log file.
85 static void WriteSample();
86
87 private:
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010088 HeapProfiler();
89 ~HeapProfiler();
Ben Murdochb0fe1622011-05-05 13:52:32 +010090 HeapSnapshot* TakeSnapshotImpl(const char* name,
91 int type,
92 v8::ActivityControl* control);
93 HeapSnapshot* TakeSnapshotImpl(String* name,
94 int type,
95 v8::ActivityControl* control);
Steve Block44f0eee2011-05-26 01:26:41 +010096 void ResetSnapshots();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010097
98 HeapSnapshotsCollection* snapshots_;
99 unsigned next_snapshot_uid_;
Steve Block44f0eee2011-05-26 01:26:41 +0100100 List<v8::HeapProfiler::WrapperInfoCallback> wrapper_callbacks_;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100101
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100102#endif // ENABLE_LOGGING_AND_PROFILING
Steve Blocka7e24c12009-10-30 11:49:00 +0000103};
104
105
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100106#ifdef ENABLE_LOGGING_AND_PROFILING
107
Steve Blocka7e24c12009-10-30 11:49:00 +0000108// JSObjectsCluster describes a group of JS objects that are
109// considered equivalent in terms of a particular profile.
110class JSObjectsCluster BASE_EMBEDDED {
111 public:
112 // These special cases are used in retainer profile.
113 enum SpecialCase {
114 ROOTS = 1,
115 GLOBAL_PROPERTY = 2,
Steve Blockd0582a62009-12-15 09:54:21 +0000116 CODE = 3,
117 SELF = 100 // This case is used in ClustersCoarser only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000118 };
119
120 JSObjectsCluster() : constructor_(NULL), instance_(NULL) {}
121 explicit JSObjectsCluster(String* constructor)
122 : constructor_(constructor), instance_(NULL) {}
123 explicit JSObjectsCluster(SpecialCase special)
124 : constructor_(FromSpecialCase(special)), instance_(NULL) {}
125 JSObjectsCluster(String* constructor, Object* instance)
126 : constructor_(constructor), instance_(instance) {}
127
128 static int CompareConstructors(const JSObjectsCluster& a,
129 const JSObjectsCluster& b) {
130 // Strings are unique, so it is sufficient to compare their pointers.
131 return a.constructor_ == b.constructor_ ? 0
132 : (a.constructor_ < b.constructor_ ? -1 : 1);
133 }
134 static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) {
135 // Strings are unique, so it is sufficient to compare their pointers.
136 const int cons_cmp = CompareConstructors(a, b);
137 return cons_cmp == 0 ?
138 (a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1))
139 : cons_cmp;
140 }
141 static int Compare(const JSObjectsCluster* a, const JSObjectsCluster* b) {
142 return Compare(*a, *b);
143 }
144
145 bool is_null() const { return constructor_ == NULL; }
146 bool can_be_coarsed() const { return instance_ != NULL; }
147 String* constructor() const { return constructor_; }
Steve Block791712a2010-08-27 10:21:07 +0100148 Object* instance() const { return instance_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000149
Steve Block791712a2010-08-27 10:21:07 +0100150 const char* GetSpecialCaseName() const;
Steve Blocka7e24c12009-10-30 11:49:00 +0000151 void Print(StringStream* accumulator) const;
152 // Allows null clusters to be printed.
153 void DebugPrint(StringStream* accumulator) const;
154
155 private:
156 static String* FromSpecialCase(SpecialCase special) {
157 // We use symbols that are illegal JS identifiers to identify special cases.
158 // Their actual value is irrelevant for us.
159 switch (special) {
Steve Block44f0eee2011-05-26 01:26:41 +0100160 case ROOTS: return HEAP->result_symbol();
161 case GLOBAL_PROPERTY: return HEAP->code_symbol();
162 case CODE: return HEAP->arguments_shadow_symbol();
163 case SELF: return HEAP->catch_var_symbol();
Steve Blocka7e24c12009-10-30 11:49:00 +0000164 default:
165 UNREACHABLE();
166 return NULL;
167 }
168 }
169
170 String* constructor_;
171 Object* instance_;
172};
173
174
175struct JSObjectsClusterTreeConfig {
176 typedef JSObjectsCluster Key;
177 typedef NumberAndSizeInfo Value;
178 static const Key kNoKey;
179 static const Value kNoValue;
180 static int Compare(const Key& a, const Key& b) {
181 return Key::Compare(a, b);
182 }
183};
184typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree;
185
186
187// ConstructorHeapProfile is responsible for gathering and logging
188// "constructor profile" of JS objects allocated on heap.
189// It is run during garbage collection cycle, thus it doesn't need
190// to use handles.
191class ConstructorHeapProfile BASE_EMBEDDED {
192 public:
193 ConstructorHeapProfile();
194 virtual ~ConstructorHeapProfile() {}
195 void CollectStats(HeapObject* obj);
196 void PrintStats();
Steve Block791712a2010-08-27 10:21:07 +0100197
198 template<class Callback>
199 void ForEach(Callback* callback) { js_objects_info_tree_.ForEach(callback); }
Steve Blocka7e24c12009-10-30 11:49:00 +0000200 // Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests.
201 virtual void Call(const JSObjectsCluster& cluster,
202 const NumberAndSizeInfo& number_and_size);
203
204 private:
205 ZoneScope zscope_;
206 JSObjectsClusterTree js_objects_info_tree_;
207};
208
209
210// JSObjectsRetainerTree is used to represent retainer graphs using
211// adjacency list form:
212//
213// Cluster -> (Cluster -> NumberAndSizeInfo)
214//
215// Subordinate splay trees are stored by pointer. They are zone-allocated,
216// so it isn't needed to manage their lifetime.
217//
218struct JSObjectsRetainerTreeConfig {
219 typedef JSObjectsCluster Key;
220 typedef JSObjectsClusterTree* Value;
221 static const Key kNoKey;
222 static const Value kNoValue;
223 static int Compare(const Key& a, const Key& b) {
224 return Key::Compare(a, b);
225 }
226};
227typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree;
228
229
230class ClustersCoarser BASE_EMBEDDED {
231 public:
232 ClustersCoarser();
233
234 // Processes a given retainer graph.
235 void Process(JSObjectsRetainerTree* tree);
236
237 // Returns an equivalent cluster (can be the cluster itself).
238 // If the given cluster doesn't have an equivalent, returns null cluster.
239 JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster);
240 // Returns whether a cluster can be substitued with an equivalent and thus,
241 // skipped in some cases.
242 bool HasAnEquivalent(const JSObjectsCluster& cluster);
243
244 // Used by JSObjectsRetainerTree::ForEach.
245 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
246 void Call(const JSObjectsCluster& cluster,
247 const NumberAndSizeInfo& number_and_size);
248
249 private:
250 // Stores a list of back references for a cluster.
251 struct ClusterBackRefs {
252 explicit ClusterBackRefs(const JSObjectsCluster& cluster_);
253 ClusterBackRefs(const ClusterBackRefs& src);
254 ClusterBackRefs& operator=(const ClusterBackRefs& src);
255
256 static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b);
257 void SortRefs() { refs.Sort(JSObjectsCluster::Compare); }
258 static void SortRefsIterator(ClusterBackRefs* ref) { ref->SortRefs(); }
259
260 JSObjectsCluster cluster;
261 ZoneList<JSObjectsCluster> refs;
262 };
263 typedef ZoneList<ClusterBackRefs> SimilarityList;
264
265 // A tree for storing a list of equivalents for a cluster.
266 struct ClusterEqualityConfig {
267 typedef JSObjectsCluster Key;
268 typedef JSObjectsCluster Value;
269 static const Key kNoKey;
270 static const Value kNoValue;
271 static int Compare(const Key& a, const Key& b) {
272 return Key::Compare(a, b);
273 }
274 };
275 typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree;
276
277 static int ClusterBackRefsCmp(const ClusterBackRefs* a,
278 const ClusterBackRefs* b) {
279 return ClusterBackRefs::Compare(*a, *b);
280 }
281 int DoProcess(JSObjectsRetainerTree* tree);
282 int FillEqualityTree();
283
284 static const int kInitialBackrefsListCapacity = 2;
285 static const int kInitialSimilarityListCapacity = 2000;
286 // Number of passes for finding equivalents. Limits the length of paths
287 // that can be considered equivalent.
288 static const int kMaxPassesCount = 10;
289
290 ZoneScope zscope_;
291 SimilarityList sim_list_;
292 EqualityTree eq_tree_;
293 ClusterBackRefs* current_pair_;
294 JSObjectsRetainerTree* current_set_;
295 const JSObjectsCluster* self_;
296};
297
298
299// RetainerHeapProfile is responsible for gathering and logging
300// "retainer profile" of JS objects allocated on heap.
301// It is run during garbage collection cycle, thus it doesn't need
302// to use handles.
Steve Block791712a2010-08-27 10:21:07 +0100303class RetainerTreeAggregator;
304
Steve Blocka7e24c12009-10-30 11:49:00 +0000305class RetainerHeapProfile BASE_EMBEDDED {
306 public:
307 class Printer {
308 public:
309 virtual ~Printer() {}
310 virtual void PrintRetainers(const JSObjectsCluster& cluster,
311 const StringStream& retainers) = 0;
312 };
313
314 RetainerHeapProfile();
Steve Block791712a2010-08-27 10:21:07 +0100315 ~RetainerHeapProfile();
316
317 RetainerTreeAggregator* aggregator() { return aggregator_; }
318 ClustersCoarser* coarser() { return &coarser_; }
319 JSObjectsRetainerTree* retainers_tree() { return &retainers_tree_; }
320
Steve Blocka7e24c12009-10-30 11:49:00 +0000321 void CollectStats(HeapObject* obj);
Steve Block791712a2010-08-27 10:21:07 +0100322 void CoarseAndAggregate();
Steve Blocka7e24c12009-10-30 11:49:00 +0000323 void PrintStats();
324 void DebugPrintStats(Printer* printer);
325 void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref);
326
327 private:
328 ZoneScope zscope_;
329 JSObjectsRetainerTree retainers_tree_;
330 ClustersCoarser coarser_;
Steve Block791712a2010-08-27 10:21:07 +0100331 RetainerTreeAggregator* aggregator_;
332};
333
334
335class AggregatedHeapSnapshot {
336 public:
337 AggregatedHeapSnapshot();
338 ~AggregatedHeapSnapshot();
339
340 HistogramInfo* info() { return info_; }
341 ConstructorHeapProfile* js_cons_profile() { return &js_cons_profile_; }
342 RetainerHeapProfile* js_retainer_profile() { return &js_retainer_profile_; }
343
344 private:
345 HistogramInfo* info_;
346 ConstructorHeapProfile js_cons_profile_;
347 RetainerHeapProfile js_retainer_profile_;
348};
349
350
351class HeapEntriesMap;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100352class HeapEntriesAllocator;
Steve Block791712a2010-08-27 10:21:07 +0100353
354class AggregatedHeapSnapshotGenerator {
355 public:
356 explicit AggregatedHeapSnapshotGenerator(AggregatedHeapSnapshot* snapshot);
357 void GenerateSnapshot();
358 void FillHeapSnapshot(HeapSnapshot* snapshot);
359
360 static const int kAllStringsType = LAST_TYPE + 1;
361
362 private:
363 void CalculateStringsStats();
364 void CollectStats(HeapObject* obj);
365 template<class Iterator>
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100366 void IterateRetainers(
367 HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map);
Steve Block791712a2010-08-27 10:21:07 +0100368
369 AggregatedHeapSnapshot* agg_snapshot_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000370};
371
372
Steve Block44f0eee2011-05-26 01:26:41 +0100373class ProducerHeapProfile {
Steve Block3ce2e202009-11-05 08:53:23 +0000374 public:
Steve Block44f0eee2011-05-26 01:26:41 +0100375 void Setup();
376 void RecordJSObjectAllocation(Object* obj) {
Leon Clarkee46be812010-01-19 14:06:41 +0000377 if (FLAG_log_producers) DoRecordJSObjectAllocation(obj);
378 }
379
Steve Block3ce2e202009-11-05 08:53:23 +0000380 private:
Steve Block44f0eee2011-05-26 01:26:41 +0100381 ProducerHeapProfile() : can_log_(false) { }
382
383 void DoRecordJSObjectAllocation(Object* obj);
384 Isolate* isolate_;
385 bool can_log_;
386
387 friend class Isolate;
388
389 DISALLOW_COPY_AND_ASSIGN(ProducerHeapProfile);
Steve Block3ce2e202009-11-05 08:53:23 +0000390};
391
Steve Blocka7e24c12009-10-30 11:49:00 +0000392#endif // ENABLE_LOGGING_AND_PROFILING
393
394} } // namespace v8::internal
395
396#endif // V8_HEAP_PROFILER_H_