blob: e8ec1681876428ea65a160f615e95f5cee47fe00 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_GC_TRACER_H_
6#define V8_HEAP_GC_TRACER_H_
7
8#include "src/base/platform/platform.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/globals.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010
11namespace v8 {
12namespace internal {
13
14// A simple ring buffer class with maximum size known at compile time.
15// The class only implements the functionality required in GCTracer.
16template <typename T, size_t MAX_SIZE>
17class RingBuffer {
18 public:
19 class const_iterator {
20 public:
21 const_iterator() : index_(0), elements_(NULL) {}
22
23 const_iterator(size_t index, const T* elements)
24 : index_(index), elements_(elements) {}
25
26 bool operator==(const const_iterator& rhs) const {
27 return elements_ == rhs.elements_ && index_ == rhs.index_;
28 }
29
30 bool operator!=(const const_iterator& rhs) const {
31 return elements_ != rhs.elements_ || index_ != rhs.index_;
32 }
33
34 operator const T*() const { return elements_ + index_; }
35
36 const T* operator->() const { return elements_ + index_; }
37
38 const T& operator*() const { return elements_[index_]; }
39
40 const_iterator& operator++() {
41 index_ = (index_ + 1) % (MAX_SIZE + 1);
42 return *this;
43 }
44
45 const_iterator& operator--() {
46 index_ = (index_ + MAX_SIZE) % (MAX_SIZE + 1);
47 return *this;
48 }
49
50 private:
51 size_t index_;
52 const T* elements_;
53 };
54
55 RingBuffer() : begin_(0), end_(0) {}
56
57 bool empty() const { return begin_ == end_; }
58 size_t size() const {
59 return (end_ - begin_ + MAX_SIZE + 1) % (MAX_SIZE + 1);
60 }
61 const_iterator begin() const { return const_iterator(begin_, elements_); }
62 const_iterator end() const { return const_iterator(end_, elements_); }
63 const_iterator back() const { return --end(); }
64 void push_back(const T& element) {
65 elements_[end_] = element;
66 end_ = (end_ + 1) % (MAX_SIZE + 1);
67 if (end_ == begin_) begin_ = (begin_ + 1) % (MAX_SIZE + 1);
68 }
69 void push_front(const T& element) {
70 begin_ = (begin_ + MAX_SIZE) % (MAX_SIZE + 1);
71 if (begin_ == end_) end_ = (end_ + MAX_SIZE) % (MAX_SIZE + 1);
72 elements_[begin_] = element;
73 }
74
Emily Bernierd0a1eb72015-03-24 16:35:39 -040075 void reset() {
76 begin_ = 0;
77 end_ = 0;
78 }
79
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 private:
81 T elements_[MAX_SIZE + 1];
82 size_t begin_;
83 size_t end_;
84
85 DISALLOW_COPY_AND_ASSIGN(RingBuffer);
86};
87
88
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
90
91
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092// GCTracer collects and prints ONE line after each garbage collector
93// invocation IFF --trace_gc is used.
94// TODO(ernstm): Unit tests.
95class GCTracer {
96 public:
97 class Scope {
98 public:
99 enum ScopeId {
100 EXTERNAL,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000101 MC_CLEAR,
102 MC_CLEAR_CODE_FLUSH,
103 MC_CLEAR_DEPENDENT_CODE,
104 MC_CLEAR_GLOBAL_HANDLES,
105 MC_CLEAR_MAPS,
106 MC_CLEAR_SLOTS_BUFFER,
107 MC_CLEAR_STORE_BUFFER,
108 MC_CLEAR_STRING_TABLE,
109 MC_CLEAR_WEAK_CELLS,
110 MC_CLEAR_WEAK_COLLECTIONS,
111 MC_CLEAR_WEAK_LISTS,
112 MC_EVACUATE,
113 MC_EVACUATE_CANDIDATES,
114 MC_EVACUATE_CLEAN_UP,
115 MC_EVACUATE_NEW_SPACE,
116 MC_EVACUATE_UPDATE_POINTERS,
117 MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED,
118 MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED,
119 MC_EVACUATE_UPDATE_POINTERS_TO_NEW,
120 MC_EVACUATE_UPDATE_POINTERS_WEAK,
121 MC_FINISH,
122 MC_INCREMENTAL_FINALIZE,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 MC_MARK,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000124 MC_MARK_FINISH_INCREMENTAL,
125 MC_MARK_PREPARE_CODE_FLUSH,
126 MC_MARK_ROOTS,
127 MC_MARK_WEAK_CLOSURE,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 MC_SWEEP,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 MC_SWEEP_CODE,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000130 MC_SWEEP_MAP,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000131 MC_SWEEP_OLD,
132 SCAVENGER_CODE_FLUSH_CANDIDATES,
133 SCAVENGER_OBJECT_GROUPS,
134 SCAVENGER_OLD_TO_NEW_POINTERS,
135 SCAVENGER_ROOTS,
136 SCAVENGER_SCAVENGE,
137 SCAVENGER_SEMISPACE,
138 SCAVENGER_WEAK,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000139 NUMBER_OF_SCOPES
140 };
141
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 Scope(GCTracer* tracer, ScopeId scope);
143 ~Scope();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144
145 private:
146 GCTracer* tracer_;
147 ScopeId scope_;
148 double start_time_;
149
150 DISALLOW_COPY_AND_ASSIGN(Scope);
151 };
152
153
154 class AllocationEvent {
155 public:
156 // Default constructor leaves the event uninitialized.
157 AllocationEvent() {}
158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159 AllocationEvent(double duration, size_t allocation_in_bytes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161 // Time spent in the mutator during the end of the last sample to the
162 // beginning of the next sample.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 double duration_;
164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000165 // Memory allocated in the new space during the end of the last sample
166 // to the beginning of the next sample
167 size_t allocation_in_bytes_;
168 };
169
170
171 class CompactionEvent {
172 public:
173 CompactionEvent() : duration(0), live_bytes_compacted(0) {}
174
175 CompactionEvent(double duration, intptr_t live_bytes_compacted)
176 : duration(duration), live_bytes_compacted(live_bytes_compacted) {}
177
178 double duration;
179 intptr_t live_bytes_compacted;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 };
181
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400182
183 class ContextDisposalEvent {
184 public:
185 // Default constructor leaves the event uninitialized.
186 ContextDisposalEvent() {}
187
188 explicit ContextDisposalEvent(double time);
189
190 // Time when context disposal event happened.
191 double time_;
192 };
193
194
195 class SurvivalEvent {
196 public:
197 // Default constructor leaves the event uninitialized.
198 SurvivalEvent() {}
199
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 explicit SurvivalEvent(double survival_ratio);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400201
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000202 double promotion_ratio_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400203 };
204
205
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000206 class Event {
207 public:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400208 enum Type {
209 SCAVENGER = 0,
210 MARK_COMPACTOR = 1,
211 INCREMENTAL_MARK_COMPACTOR = 2,
212 START = 3
213 };
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214
215 // Default constructor leaves the event uninitialized.
216 Event() {}
217
218 Event(Type type, const char* gc_reason, const char* collector_reason);
219
220 // Returns a string describing the event type.
221 const char* TypeName(bool short_name) const;
222
223 // Type of event
224 Type type;
225
226 const char* gc_reason;
227 const char* collector_reason;
228
229 // Timestamp set in the constructor.
230 double start_time;
231
232 // Timestamp set in the destructor.
233 double end_time;
234
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000235 // Memory reduction flag set.
236 bool reduce_memory;
237
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 // Size of objects in heap set in constructor.
239 intptr_t start_object_size;
240
241 // Size of objects in heap set in destructor.
242 intptr_t end_object_size;
243
244 // Size of memory allocated from OS set in constructor.
245 intptr_t start_memory_size;
246
247 // Size of memory allocated from OS set in destructor.
248 intptr_t end_memory_size;
249
250 // Total amount of space either wasted or contained in one of free lists
251 // before the current GC.
252 intptr_t start_holes_size;
253
254 // Total amount of space either wasted or contained in one of free lists
255 // after the current GC.
256 intptr_t end_holes_size;
257
258 // Size of new space objects in constructor.
259 intptr_t new_space_object_size;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260 // Size of survived new space objects in desctructor.
261 intptr_t survived_new_space_object_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
263 // Number of incremental marking steps since creation of tracer.
264 // (value at start of event)
265 int cumulative_incremental_marking_steps;
266
267 // Incremental marking steps since
268 // - last event for SCAVENGER events
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400269 // - last INCREMENTAL_MARK_COMPACTOR event for INCREMENTAL_MARK_COMPACTOR
270 // events
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271 int incremental_marking_steps;
272
273 // Bytes marked since creation of tracer (value at start of event).
274 intptr_t cumulative_incremental_marking_bytes;
275
276 // Bytes marked since
277 // - last event for SCAVENGER events
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400278 // - last INCREMENTAL_MARK_COMPACTOR event for INCREMENTAL_MARK_COMPACTOR
279 // events
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 intptr_t incremental_marking_bytes;
281
282 // Cumulative duration of incremental marking steps since creation of
283 // tracer. (value at start of event)
284 double cumulative_incremental_marking_duration;
285
286 // Duration of incremental marking steps since
287 // - last event for SCAVENGER events
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400288 // - last INCREMENTAL_MARK_COMPACTOR event for INCREMENTAL_MARK_COMPACTOR
289 // events
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290 double incremental_marking_duration;
291
292 // Cumulative pure duration of incremental marking steps since creation of
293 // tracer. (value at start of event)
294 double cumulative_pure_incremental_marking_duration;
295
296 // Duration of pure incremental marking steps since
297 // - last event for SCAVENGER events
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400298 // - last INCREMENTAL_MARK_COMPACTOR event for INCREMENTAL_MARK_COMPACTOR
299 // events
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000300 double pure_incremental_marking_duration;
301
302 // Longest incremental marking step since start of marking.
303 // (value at start of event)
304 double longest_incremental_marking_step;
305
306 // Amounts of time spent in different scopes during GC.
307 double scopes[Scope::NUMBER_OF_SCOPES];
308 };
309
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400310 static const size_t kRingBufferMaxSize = 10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000311
312 typedef RingBuffer<Event, kRingBufferMaxSize> EventBuffer;
313
314 typedef RingBuffer<AllocationEvent, kRingBufferMaxSize> AllocationEventBuffer;
315
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400316 typedef RingBuffer<ContextDisposalEvent, kRingBufferMaxSize>
317 ContextDisposalEventBuffer;
318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 typedef RingBuffer<CompactionEvent, kRingBufferMaxSize> CompactionEventBuffer;
320
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400321 typedef RingBuffer<SurvivalEvent, kRingBufferMaxSize> SurvivalEventBuffer;
322
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000323 static const int kThroughputTimeFrameMs = 5000;
324
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325 explicit GCTracer(Heap* heap);
326
327 // Start collecting data.
328 void Start(GarbageCollector collector, const char* gc_reason,
329 const char* collector_reason);
330
331 // Stop collecting data and print results.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400332 void Stop(GarbageCollector collector);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 // Sample and accumulate bytes allocated since the last GC.
335 void SampleAllocation(double current_ms, size_t new_space_counter_bytes,
336 size_t old_generation_counter_bytes);
337
338 // Log the accumulated new space allocation bytes.
339 void AddAllocation(double current_ms);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000340
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341 void AddContextDisposalTime(double time);
342
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343 void AddCompactionEvent(double duration, intptr_t live_bytes_compacted);
344
345 void AddSurvivalRatio(double survival_ratio);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400346
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 // Log an incremental marking step.
348 void AddIncrementalMarkingStep(double duration, intptr_t bytes);
349
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000350 void AddIncrementalMarkingFinalizationStep(double duration);
351
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 // Log time spent in marking.
353 void AddMarkingTime(double duration) {
354 cumulative_marking_duration_ += duration;
355 }
356
357 // Time spent in marking.
358 double cumulative_marking_duration() const {
359 return cumulative_marking_duration_;
360 }
361
362 // Log time spent in sweeping on main thread.
363 void AddSweepingTime(double duration) {
364 cumulative_sweeping_duration_ += duration;
365 }
366
367 // Time spent in sweeping on main thread.
368 double cumulative_sweeping_duration() const {
369 return cumulative_sweeping_duration_;
370 }
371
372 // Compute the mean duration of the last scavenger events. Returns 0 if no
373 // events have been recorded.
374 double MeanScavengerDuration() const {
375 return MeanDuration(scavenger_events_);
376 }
377
378 // Compute the max duration of the last scavenger events. Returns 0 if no
379 // events have been recorded.
380 double MaxScavengerDuration() const { return MaxDuration(scavenger_events_); }
381
382 // Compute the mean duration of the last mark compactor events. Returns 0 if
383 // no events have been recorded.
384 double MeanMarkCompactorDuration() const {
385 return MeanDuration(mark_compactor_events_);
386 }
387
388 // Compute the max duration of the last mark compactor events. Return 0 if no
389 // events have been recorded.
390 double MaxMarkCompactorDuration() const {
391 return MaxDuration(mark_compactor_events_);
392 }
393
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400394 // Compute the mean duration of the last incremental mark compactor
395 // events. Returns 0 if no events have been recorded.
396 double MeanIncrementalMarkCompactorDuration() const {
397 return MeanDuration(incremental_mark_compactor_events_);
398 }
399
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 // Compute the mean step duration of the last incremental marking round.
401 // Returns 0 if no incremental marking round has been completed.
402 double MeanIncrementalMarkingDuration() const;
403
404 // Compute the max step duration of the last incremental marking round.
405 // Returns 0 if no incremental marking round has been completed.
406 double MaxIncrementalMarkingDuration() const;
407
408 // Compute the average incremental marking speed in bytes/millisecond.
409 // Returns 0 if no events have been recorded.
410 intptr_t IncrementalMarkingSpeedInBytesPerMillisecond() const;
411
412 // Compute the average scavenge speed in bytes/millisecond.
413 // Returns 0 if no events have been recorded.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000414 intptr_t ScavengeSpeedInBytesPerMillisecond(
415 ScavengeSpeedMode mode = kForAllObjects) const;
416
417 // Compute the average compaction speed in bytes/millisecond.
418 // Returns 0 if not enough events have been recorded.
419 intptr_t CompactionSpeedInBytesPerMillisecond() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400421 // Compute the average mark-sweep speed in bytes/millisecond.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000422 // Returns 0 if no events have been recorded.
423 intptr_t MarkCompactSpeedInBytesPerMillisecond() const;
424
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400425 // Compute the average incremental mark-sweep finalize speed in
426 // bytes/millisecond.
427 // Returns 0 if no events have been recorded.
428 intptr_t FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const;
429
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430 // Compute the overall mark compact speed including incremental steps
431 // and the final mark-compact step.
432 double CombinedMarkCompactSpeedInBytesPerMillisecond();
433
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 // Allocation throughput in the new space in bytes/millisecond.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435 // Returns 0 if no allocation events have been recorded.
436 size_t NewSpaceAllocationThroughputInBytesPerMillisecond(
437 double time_ms = 0) const;
438
439 // Allocation throughput in the old generation in bytes/millisecond in the
440 // last time_ms milliseconds.
441 // Returns 0 if no allocation events have been recorded.
442 size_t OldGenerationAllocationThroughputInBytesPerMillisecond(
443 double time_ms = 0) const;
444
445 // Allocation throughput in heap in bytes/millisecond in the last time_ms
446 // milliseconds.
447 // Returns 0 if no allocation events have been recorded.
448 size_t AllocationThroughputInBytesPerMillisecond(double time_ms) const;
449
450 // Allocation throughput in heap in bytes/milliseconds in the last
451 // kThroughputTimeFrameMs seconds.
452 // Returns 0 if no allocation events have been recorded.
453 size_t CurrentAllocationThroughputInBytesPerMillisecond() const;
454
455 // Allocation throughput in old generation in bytes/milliseconds in the last
456 // kThroughputTimeFrameMs seconds.
457 // Returns 0 if no allocation events have been recorded.
458 size_t CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000459
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400460 // Computes the context disposal rate in milliseconds. It takes the time
461 // frame of the first recorded context disposal to the current time and
462 // divides it by the number of recorded events.
463 // Returns 0 if no events have been recorded.
464 double ContextDisposalRateInMilliseconds() const;
465
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000466 // Computes the average survival ratio based on the last recorded survival
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400467 // events.
468 // Returns 0 if no events have been recorded.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 double AverageSurvivalRatio() const;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400470
471 // Returns true if at least one survival event was recorded.
472 bool SurvivalEventsRecorded() const;
473
474 // Discard all recorded survival events.
475 void ResetSurvivalEvents();
476
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477 private:
478 // Print one detailed trace line in name=value format.
479 // TODO(ernstm): Move to Heap.
480 void PrintNVP() const;
481
482 // Print one trace line.
483 // TODO(ernstm): Move to Heap.
484 void Print() const;
485
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000486 // Prints a line and also adds it to the heap's ring buffer so that
487 // it can be included in later crash dumps.
488 void Output(const char* format, ...) const;
489
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 // Compute the mean duration of the events in the given ring buffer.
491 double MeanDuration(const EventBuffer& events) const;
492
493 // Compute the max duration of the events in the given ring buffer.
494 double MaxDuration(const EventBuffer& events) const;
495
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400496 void ClearMarkCompactStatistics() {
497 cumulative_incremental_marking_steps_ = 0;
498 cumulative_incremental_marking_bytes_ = 0;
499 cumulative_incremental_marking_duration_ = 0;
500 cumulative_pure_incremental_marking_duration_ = 0;
501 longest_incremental_marking_step_ = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000502 cumulative_incremental_marking_finalization_steps_ = 0;
503 cumulative_incremental_marking_finalization_duration_ = 0;
504 longest_incremental_marking_finalization_step_ = 0;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400505 cumulative_marking_duration_ = 0;
506 cumulative_sweeping_duration_ = 0;
507 }
508
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 // Pointer to the heap that owns this tracer.
510 Heap* heap_;
511
512 // Current tracer event. Populated during Start/Stop cycle. Valid after Stop()
513 // has returned.
514 Event current_;
515
516 // Previous tracer event.
517 Event previous_;
518
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400519 // Previous INCREMENTAL_MARK_COMPACTOR event.
520 Event previous_incremental_mark_compactor_event_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521
522 // RingBuffers for SCAVENGER events.
523 EventBuffer scavenger_events_;
524
525 // RingBuffers for MARK_COMPACTOR events.
526 EventBuffer mark_compactor_events_;
527
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400528 // RingBuffers for INCREMENTAL_MARK_COMPACTOR events.
529 EventBuffer incremental_mark_compactor_events_;
530
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000531 // RingBuffer for allocation events.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532 AllocationEventBuffer new_space_allocation_events_;
533 AllocationEventBuffer old_generation_allocation_events_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400535 // RingBuffer for context disposal events.
536 ContextDisposalEventBuffer context_disposal_events_;
537
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000538 // RingBuffer for compaction events.
539 CompactionEventBuffer compaction_events_;
540
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400541 // RingBuffer for survival events.
542 SurvivalEventBuffer survival_events_;
543
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 // Cumulative number of incremental marking steps since creation of tracer.
545 int cumulative_incremental_marking_steps_;
546
547 // Cumulative size of incremental marking steps (in bytes) since creation of
548 // tracer.
549 intptr_t cumulative_incremental_marking_bytes_;
550
551 // Cumulative duration of incremental marking steps since creation of tracer.
552 double cumulative_incremental_marking_duration_;
553
554 // Cumulative duration of pure incremental marking steps since creation of
555 // tracer.
556 double cumulative_pure_incremental_marking_duration_;
557
558 // Longest incremental marking step since start of marking.
559 double longest_incremental_marking_step_;
560
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561 // Cumulative number of incremental marking finalization steps since creation
562 // of tracer.
563 int cumulative_incremental_marking_finalization_steps_;
564
565 // Cumulative duration of incremental marking finalization steps since
566 // creation of tracer.
567 double cumulative_incremental_marking_finalization_duration_;
568
569 // Longest incremental marking finalization step since start of marking.
570 double longest_incremental_marking_finalization_step_;
571
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000572 // Total marking time.
573 // This timer is precise when run with --print-cumulative-gc-stat
574 double cumulative_marking_duration_;
575
576 // Total sweeping time on the main thread.
577 // This timer is precise when run with --print-cumulative-gc-stat
578 // TODO(hpayer): Account for sweeping time on sweeper threads. Add a
579 // different field for that.
580 // TODO(hpayer): This timer right now just holds the sweeping time
581 // of the initial atomic sweeping pause. Make sure that it accumulates
582 // all sweeping operations performed on the main thread.
583 double cumulative_sweeping_duration_;
584
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000585 // Timestamp and allocation counter at the last sampled allocation event.
586 double allocation_time_ms_;
587 size_t new_space_allocation_counter_bytes_;
588 size_t old_generation_allocation_counter_bytes_;
589
590 // Accumulated duration and allocated bytes since the last GC.
591 double allocation_duration_since_gc_;
592 size_t new_space_allocation_in_bytes_since_gc_;
593 size_t old_generation_allocation_in_bytes_since_gc_;
594
595 double combined_mark_compact_speed_cache_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400597 // Counts how many tracers were started without stopping.
598 int start_counter_;
599
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 DISALLOW_COPY_AND_ASSIGN(GCTracer);
601};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602} // namespace internal
603} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604
605#endif // V8_HEAP_GC_TRACER_H_