blob: 651cf54be9a69564b302cc9fb11459a4570ce91c [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
5#ifndef V8_COUNTERS_H_
6#define V8_COUNTERS_H_
7
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "include/v8.h"
9#include "src/allocation.h"
10#include "src/base/platform/elapsed-timer.h"
11#include "src/globals.h"
12#include "src/objects.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010013
Steve Blocka7e24c12009-10-30 11:49:00 +000014namespace v8 {
15namespace internal {
16
17// StatsCounters is an interface for plugging into external
18// counters for monitoring. Counters can be looked up and
19// manipulated by name.
20
Steve Block44f0eee2011-05-26 01:26:41 +010021class StatsTable {
Steve Blocka7e24c12009-10-30 11:49:00 +000022 public:
23 // Register an application-defined function where
24 // counters can be looked up.
Steve Block44f0eee2011-05-26 01:26:41 +010025 void SetCounterFunction(CounterLookupCallback f) {
Steve Blocka7e24c12009-10-30 11:49:00 +000026 lookup_function_ = f;
27 }
28
29 // Register an application-defined function to create
30 // a histogram for passing to the AddHistogramSample function
Steve Block44f0eee2011-05-26 01:26:41 +010031 void SetCreateHistogramFunction(CreateHistogramCallback f) {
Steve Blocka7e24c12009-10-30 11:49:00 +000032 create_histogram_function_ = f;
33 }
34
35 // Register an application-defined function to add a sample
36 // to a histogram created with CreateHistogram function
Steve Block44f0eee2011-05-26 01:26:41 +010037 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
Steve Blocka7e24c12009-10-30 11:49:00 +000038 add_histogram_sample_function_ = f;
39 }
40
Steve Block44f0eee2011-05-26 01:26:41 +010041 bool HasCounterFunction() const {
Steve Blocka7e24c12009-10-30 11:49:00 +000042 return lookup_function_ != NULL;
43 }
44
45 // Lookup the location of a counter by name. If the lookup
46 // is successful, returns a non-NULL pointer for writing the
47 // value of the counter. Each thread calling this function
48 // may receive a different location to store it's counter.
49 // The return value must not be cached and re-used across
50 // threads, although a single thread is free to cache it.
Steve Block44f0eee2011-05-26 01:26:41 +010051 int* FindLocation(const char* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +000052 if (!lookup_function_) return NULL;
53 return lookup_function_(name);
54 }
55
56 // Create a histogram by name. If the create is successful,
57 // returns a non-NULL pointer for use with AddHistogramSample
58 // function. min and max define the expected minimum and maximum
59 // sample values. buckets is the maximum number of buckets
60 // that the samples will be grouped into.
Steve Block44f0eee2011-05-26 01:26:41 +010061 void* CreateHistogram(const char* name,
62 int min,
63 int max,
64 size_t buckets) {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 if (!create_histogram_function_) return NULL;
66 return create_histogram_function_(name, min, max, buckets);
67 }
68
69 // Add a sample to a histogram created with the CreateHistogram
70 // function.
Steve Block44f0eee2011-05-26 01:26:41 +010071 void AddHistogramSample(void* histogram, int sample) {
Steve Blocka7e24c12009-10-30 11:49:00 +000072 if (!add_histogram_sample_function_) return;
73 return add_histogram_sample_function_(histogram, sample);
74 }
75
76 private:
Steve Block44f0eee2011-05-26 01:26:41 +010077 StatsTable();
78
79 CounterLookupCallback lookup_function_;
80 CreateHistogramCallback create_histogram_function_;
81 AddHistogramSampleCallback add_histogram_sample_function_;
82
83 friend class Isolate;
84
85 DISALLOW_COPY_AND_ASSIGN(StatsTable);
Steve Blocka7e24c12009-10-30 11:49:00 +000086};
87
88// StatsCounters are dynamically created values which can be tracked in
89// the StatsTable. They are designed to be lightweight to create and
90// easy to use.
91//
92// Internally, a counter represents a value in a row of a StatsTable.
93// The row has a 32bit value for each process/thread in the table and also
94// a name (stored in the table metadata). Since the storage location can be
95// thread-specific, this class cannot be shared across threads.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096class StatsCounter {
97 public:
98 StatsCounter() { }
99 explicit StatsCounter(Isolate* isolate, const char* name)
100 : isolate_(isolate), name_(name), ptr_(NULL), lookup_done_(false) { }
Steve Blocka7e24c12009-10-30 11:49:00 +0000101
102 // Sets the counter to a specific value.
103 void Set(int value) {
104 int* loc = GetPtr();
105 if (loc) *loc = value;
106 }
107
108 // Increments the counter.
109 void Increment() {
110 int* loc = GetPtr();
111 if (loc) (*loc)++;
112 }
113
114 void Increment(int value) {
115 int* loc = GetPtr();
116 if (loc)
117 (*loc) += value;
118 }
119
120 // Decrements the counter.
121 void Decrement() {
122 int* loc = GetPtr();
123 if (loc) (*loc)--;
124 }
125
126 void Decrement(int value) {
127 int* loc = GetPtr();
128 if (loc) (*loc) -= value;
129 }
130
131 // Is this counter enabled?
132 // Returns false if table is full.
133 bool Enabled() {
134 return GetPtr() != NULL;
135 }
136
137 // Get the internal pointer to the counter. This is used
138 // by the code generator to emit code that manipulates a
139 // given counter without calling the runtime system.
140 int* GetInternalPointer() {
141 int* loc = GetPtr();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 DCHECK(loc != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000143 return loc;
144 }
145
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 // Reset the cached internal pointer.
147 void Reset() { lookup_done_ = false; }
148
Steve Blocka7e24c12009-10-30 11:49:00 +0000149 protected:
150 // Returns the cached address of this counter location.
151 int* GetPtr() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 if (lookup_done_) return ptr_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000153 lookup_done_ = true;
Steve Block44f0eee2011-05-26 01:26:41 +0100154 ptr_ = FindLocationInStatsTable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 return ptr_;
156 }
Steve Block44f0eee2011-05-26 01:26:41 +0100157
158 private:
159 int* FindLocationInStatsTable() const;
Steve Blocka7e24c12009-10-30 11:49:00 +0000160
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 Isolate* isolate_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000162 const char* name_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 int* ptr_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000164 bool lookup_done_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165};
Steve Blocka7e24c12009-10-30 11:49:00 +0000166
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167// A Histogram represents a dynamically created histogram in the StatsTable.
168// It will be registered with the histogram system on first use.
169class Histogram {
170 public:
171 Histogram() { }
172 Histogram(const char* name,
173 int min,
174 int max,
175 int num_buckets,
176 Isolate* isolate)
177 : name_(name),
178 min_(min),
179 max_(max),
180 num_buckets_(num_buckets),
181 histogram_(NULL),
182 lookup_done_(false),
183 isolate_(isolate) { }
Steve Blocka7e24c12009-10-30 11:49:00 +0000184
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 // Add a single sample to this histogram.
186 void AddSample(int sample);
Steve Blocka7e24c12009-10-30 11:49:00 +0000187
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000188 // Returns true if this histogram is enabled.
189 bool Enabled() {
190 return GetHistogram() != NULL;
191 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000192
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 // Reset the cached internal pointer.
194 void Reset() {
195 lookup_done_ = false;
Steve Blocka7e24c12009-10-30 11:49:00 +0000196 }
197
198 protected:
199 // Returns the handle to the histogram.
200 void* GetHistogram() {
201 if (!lookup_done_) {
202 lookup_done_ = true;
Steve Block44f0eee2011-05-26 01:26:41 +0100203 histogram_ = CreateHistogram();
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 }
205 return histogram_;
206 }
Steve Block44f0eee2011-05-26 01:26:41 +0100207
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 const char* name() { return name_; }
209 Isolate* isolate() const { return isolate_; }
210
Steve Block44f0eee2011-05-26 01:26:41 +0100211 private:
212 void* CreateHistogram() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213
214 const char* name_;
215 int min_;
216 int max_;
217 int num_buckets_;
218 void* histogram_;
219 bool lookup_done_;
220 Isolate* isolate_;
221};
222
223// A HistogramTimer allows distributions of results to be created.
224class HistogramTimer : public Histogram {
225 public:
226 HistogramTimer() { }
227 HistogramTimer(const char* name,
228 int min,
229 int max,
230 int num_buckets,
231 Isolate* isolate)
232 : Histogram(name, min, max, num_buckets, isolate) {}
233
234 // Start the timer.
235 void Start();
236
237 // Stop the timer and record the results.
238 void Stop();
239
240 // Returns true if the timer is running.
241 bool Running() {
242 return Enabled() && timer_.IsStarted();
243 }
244
245 // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
246#ifdef DEBUG
247 base::ElapsedTimer* timer() { return &timer_; }
248#endif
249
250 private:
251 base::ElapsedTimer timer_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000252};
253
254// Helper class for scoping a HistogramTimer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255// TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
256// Parser is currently reentrant (when it throws an error, we call back
257// into JavaScript and all bets are off), but ElapsedTimer is not
258// reentry-safe. Fix this properly and remove |allow_nesting|.
Steve Blocka7e24c12009-10-30 11:49:00 +0000259class HistogramTimerScope BASE_EMBEDDED {
260 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000261 explicit HistogramTimerScope(HistogramTimer* timer,
262 bool allow_nesting = false)
263#ifdef DEBUG
264 : timer_(timer),
265 skipped_timer_start_(false) {
266 if (timer_->timer()->IsStarted() && allow_nesting) {
267 skipped_timer_start_ = true;
268 } else {
269 timer_->Start();
270 }
271 }
272#else
273 : timer_(timer) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000274 timer_->Start();
275 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000277 ~HistogramTimerScope() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278#ifdef DEBUG
279 if (!skipped_timer_start_) {
280 timer_->Stop();
281 }
282#else
Steve Blocka7e24c12009-10-30 11:49:00 +0000283 timer_->Stop();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000285 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000286
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 private:
288 HistogramTimer* timer_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289#ifdef DEBUG
290 bool skipped_timer_start_;
291#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000292};
293
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000294#define HISTOGRAM_RANGE_LIST(HR) \
295 /* Generic range histograms */ \
296 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \
297 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \
298 HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000, 101)
299
300#define HISTOGRAM_TIMER_LIST(HT) \
301 /* Garbage collection timers. */ \
302 HT(gc_compactor, V8.GCCompactor) \
303 HT(gc_scavenger, V8.GCScavenger) \
304 HT(gc_context, V8.GCContext) /* GC context cleanup time */ \
305 HT(gc_idle_notification, V8.GCIdleNotification) \
306 HT(gc_incremental_marking, V8.GCIncrementalMarking) \
307 HT(gc_low_memory_notification, V8.GCLowMemoryNotification) \
308 /* Parsing timers. */ \
309 HT(parse, V8.Parse) \
310 HT(parse_lazy, V8.ParseLazy) \
311 HT(pre_parse, V8.PreParse) \
312 /* Total compilation times. */ \
313 HT(compile, V8.Compile) \
314 HT(compile_eval, V8.CompileEval) \
315 /* Serialization as part of compilation (code caching) */ \
316 HT(compile_serialize, V8.CompileSerialize) \
317 HT(compile_deserialize, V8.CompileDeserialize)
318
319
320#define HISTOGRAM_PERCENTAGE_LIST(HP) \
321 /* Heap fragmentation. */ \
322 HP(external_fragmentation_total, \
323 V8.MemoryExternalFragmentationTotal) \
324 HP(external_fragmentation_old_pointer_space, \
325 V8.MemoryExternalFragmentationOldPointerSpace) \
326 HP(external_fragmentation_old_data_space, \
327 V8.MemoryExternalFragmentationOldDataSpace) \
328 HP(external_fragmentation_code_space, \
329 V8.MemoryExternalFragmentationCodeSpace) \
330 HP(external_fragmentation_map_space, \
331 V8.MemoryExternalFragmentationMapSpace) \
332 HP(external_fragmentation_cell_space, \
333 V8.MemoryExternalFragmentationCellSpace) \
334 HP(external_fragmentation_property_cell_space, \
335 V8.MemoryExternalFragmentationPropertyCellSpace) \
336 HP(external_fragmentation_lo_space, \
337 V8.MemoryExternalFragmentationLoSpace) \
338 /* Percentages of heap committed to each space. */ \
339 HP(heap_fraction_new_space, \
340 V8.MemoryHeapFractionNewSpace) \
341 HP(heap_fraction_old_pointer_space, \
342 V8.MemoryHeapFractionOldPointerSpace) \
343 HP(heap_fraction_old_data_space, \
344 V8.MemoryHeapFractionOldDataSpace) \
345 HP(heap_fraction_code_space, \
346 V8.MemoryHeapFractionCodeSpace) \
347 HP(heap_fraction_map_space, \
348 V8.MemoryHeapFractionMapSpace) \
349 HP(heap_fraction_cell_space, \
350 V8.MemoryHeapFractionCellSpace) \
351 HP(heap_fraction_property_cell_space, \
352 V8.MemoryHeapFractionPropertyCellSpace) \
353 HP(heap_fraction_lo_space, \
354 V8.MemoryHeapFractionLoSpace) \
355 /* Percentage of crankshafted codegen. */ \
356 HP(codegen_fraction_crankshaft, \
357 V8.CodegenFractionCrankshaft) \
358
359
360#define HISTOGRAM_MEMORY_LIST(HM) \
361 HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
362 HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
363 HM(heap_sample_map_space_committed, \
364 V8.MemoryHeapSampleMapSpaceCommitted) \
365 HM(heap_sample_cell_space_committed, \
366 V8.MemoryHeapSampleCellSpaceCommitted) \
367 HM(heap_sample_property_cell_space_committed, \
368 V8.MemoryHeapSamplePropertyCellSpaceCommitted) \
369 HM(heap_sample_code_space_committed, \
370 V8.MemoryHeapSampleCodeSpaceCommitted) \
371 HM(heap_sample_maximum_committed, \
372 V8.MemoryHeapSampleMaximumCommitted) \
373
374
375// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
376// Intellisense to crash. It was broken into two macros (each of length 40
377// lines) rather than one macro (of length about 80 lines) to work around
378// this problem. Please avoid using recursive macros of this length when
379// possible.
380#define STATS_COUNTER_LIST_1(SC) \
381 /* Global Handle Count*/ \
382 SC(global_handles, V8.GlobalHandles) \
383 /* OS Memory allocated */ \
384 SC(memory_allocated, V8.OsMemoryAllocated) \
385 SC(normalized_maps, V8.NormalizedMaps) \
386 SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
387 SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
388 SC(alive_after_last_gc, V8.AliveAfterLastGC) \
389 SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
390 SC(objs_since_last_full, V8.ObjsSinceLastFull) \
391 SC(string_table_capacity, V8.StringTableCapacity) \
392 SC(number_of_symbols, V8.NumberOfSymbols) \
393 SC(script_wrappers, V8.ScriptWrappers) \
394 SC(call_initialize_stubs, V8.CallInitializeStubs) \
395 SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
396 SC(call_normal_stubs, V8.CallNormalStubs) \
397 SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
398 SC(inlined_copied_elements, V8.InlinedCopiedElements) \
399 SC(arguments_adaptors, V8.ArgumentsAdaptors) \
400 SC(compilation_cache_hits, V8.CompilationCacheHits) \
401 SC(compilation_cache_misses, V8.CompilationCacheMisses) \
402 SC(string_ctor_calls, V8.StringConstructorCalls) \
403 SC(string_ctor_conversions, V8.StringConstructorConversions) \
404 SC(string_ctor_cached_number, V8.StringConstructorCachedNumber) \
405 SC(string_ctor_string_value, V8.StringConstructorStringValue) \
406 SC(string_ctor_gc_required, V8.StringConstructorGCRequired) \
407 /* Amount of evaled source code. */ \
408 SC(total_eval_size, V8.TotalEvalSize) \
409 /* Amount of loaded source code. */ \
410 SC(total_load_size, V8.TotalLoadSize) \
411 /* Amount of parsed source code. */ \
412 SC(total_parse_size, V8.TotalParseSize) \
413 /* Amount of source code skipped over using preparsing. */ \
414 SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
415 /* Number of symbol lookups skipped using preparsing */ \
416 SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped) \
417 /* Amount of compiled source code. */ \
418 SC(total_compile_size, V8.TotalCompileSize) \
419 /* Amount of source code compiled with the full codegen. */ \
420 SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
421 /* Number of contexts created from scratch. */ \
422 SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
423 /* Number of contexts created by partial snapshot. */ \
424 SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
425 /* Number of code objects found from pc. */ \
426 SC(pc_to_code, V8.PcToCode) \
427 SC(pc_to_code_cached, V8.PcToCodeCached) \
428 /* The store-buffer implementation of the write barrier. */ \
429 SC(store_buffer_compactions, V8.StoreBufferCompactions) \
430 SC(store_buffer_overflows, V8.StoreBufferOverflows)
431
432
433#define STATS_COUNTER_LIST_2(SC) \
434 /* Number of code stubs. */ \
435 SC(code_stubs, V8.CodeStubs) \
436 /* Amount of stub code. */ \
437 SC(total_stubs_code_size, V8.TotalStubsCodeSize) \
438 /* Amount of (JS) compiled code. */ \
439 SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
440 SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
441 SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
442 SC(gc_compactor_caused_by_oldspace_exhaustion, \
443 V8.GCCompactorCausedByOldspaceExhaustion) \
444 SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
445 SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
446 /* How is the generic keyed-load stub used? */ \
447 SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \
448 SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \
449 SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \
450 SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \
451 SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \
452 SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \
453 /* How is the generic keyed-call stub used? */ \
454 SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \
455 SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \
456 SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \
457 SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \
458 SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \
459 SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \
460 SC(named_load_global_stub, V8.NamedLoadGlobalStub) \
461 SC(named_store_global_inline, V8.NamedStoreGlobalInline) \
462 SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \
463 SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \
464 SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \
465 SC(store_normal_miss, V8.StoreNormalMiss) \
466 SC(store_normal_hit, V8.StoreNormalHit) \
467 SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \
468 SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \
469 SC(cow_arrays_converted, V8.COWArraysConverted) \
470 SC(call_miss, V8.CallMiss) \
471 SC(keyed_call_miss, V8.KeyedCallMiss) \
472 SC(load_miss, V8.LoadMiss) \
473 SC(keyed_load_miss, V8.KeyedLoadMiss) \
474 SC(call_const, V8.CallConst) \
475 SC(call_const_fast_api, V8.CallConstFastApi) \
476 SC(call_const_interceptor, V8.CallConstInterceptor) \
477 SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \
478 SC(call_global_inline, V8.CallGlobalInline) \
479 SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \
480 SC(constructed_objects, V8.ConstructedObjects) \
481 SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
482 SC(negative_lookups, V8.NegativeLookups) \
483 SC(negative_lookups_miss, V8.NegativeLookupsMiss) \
484 SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
485 SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
486 SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
487 SC(array_function_runtime, V8.ArrayFunctionRuntime) \
488 SC(array_function_native, V8.ArrayFunctionNative) \
489 SC(for_in, V8.ForIn) \
490 SC(enum_cache_hits, V8.EnumCacheHits) \
491 SC(enum_cache_misses, V8.EnumCacheMisses) \
492 SC(zone_segment_bytes, V8.ZoneSegmentBytes) \
493 SC(fast_new_closure_total, V8.FastNewClosureTotal) \
494 SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \
495 SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \
496 SC(string_add_runtime, V8.StringAddRuntime) \
497 SC(string_add_native, V8.StringAddNative) \
498 SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \
499 SC(sub_string_runtime, V8.SubStringRuntime) \
500 SC(sub_string_native, V8.SubStringNative) \
501 SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \
502 SC(string_compare_native, V8.StringCompareNative) \
503 SC(string_compare_runtime, V8.StringCompareRuntime) \
504 SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
505 SC(regexp_entry_native, V8.RegExpEntryNative) \
506 SC(number_to_string_native, V8.NumberToStringNative) \
507 SC(number_to_string_runtime, V8.NumberToStringRuntime) \
508 SC(math_acos, V8.MathAcos) \
509 SC(math_asin, V8.MathAsin) \
510 SC(math_atan, V8.MathAtan) \
511 SC(math_atan2, V8.MathAtan2) \
512 SC(math_exp, V8.MathExp) \
513 SC(math_floor, V8.MathFloor) \
514 SC(math_log, V8.MathLog) \
515 SC(math_pow, V8.MathPow) \
516 SC(math_round, V8.MathRound) \
517 SC(math_sqrt, V8.MathSqrt) \
518 SC(stack_interrupts, V8.StackInterrupts) \
519 SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
520 SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
521 SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
522 SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
523 SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
524 SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
525 /* Number of write barriers in generated code. */ \
526 SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
527 SC(write_barriers_static, V8.WriteBarriersStatic) \
528 SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
529 SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
530 SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
531 SC(old_pointer_space_bytes_available, \
532 V8.MemoryOldPointerSpaceBytesAvailable) \
533 SC(old_pointer_space_bytes_committed, \
534 V8.MemoryOldPointerSpaceBytesCommitted) \
535 SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \
536 SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \
537 SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \
538 SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \
539 SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
540 SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
541 SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
542 SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
543 SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
544 SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
545 SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \
546 SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \
547 SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \
548 SC(property_cell_space_bytes_available, \
549 V8.MemoryPropertyCellSpaceBytesAvailable) \
550 SC(property_cell_space_bytes_committed, \
551 V8.MemoryPropertyCellSpaceBytesCommitted) \
552 SC(property_cell_space_bytes_used, V8.MemoryPropertyCellSpaceBytesUsed) \
553 SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
554 SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
555 SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)
556
557
558// This file contains all the v8 counters that are in use.
559class Counters {
560 public:
561#define HR(name, caption, min, max, num_buckets) \
562 Histogram* name() { return &name##_; }
563 HISTOGRAM_RANGE_LIST(HR)
564#undef HR
565
566#define HT(name, caption) \
567 HistogramTimer* name() { return &name##_; }
568 HISTOGRAM_TIMER_LIST(HT)
569#undef HT
570
571#define HP(name, caption) \
572 Histogram* name() { return &name##_; }
573 HISTOGRAM_PERCENTAGE_LIST(HP)
574#undef HP
575
576#define HM(name, caption) \
577 Histogram* name() { return &name##_; }
578 HISTOGRAM_MEMORY_LIST(HM)
579#undef HM
580
581#define SC(name, caption) \
582 StatsCounter* name() { return &name##_; }
583 STATS_COUNTER_LIST_1(SC)
584 STATS_COUNTER_LIST_2(SC)
585#undef SC
586
587#define SC(name) \
588 StatsCounter* count_of_##name() { return &count_of_##name##_; } \
589 StatsCounter* size_of_##name() { return &size_of_##name##_; }
590 INSTANCE_TYPE_LIST(SC)
591#undef SC
592
593#define SC(name) \
594 StatsCounter* count_of_CODE_TYPE_##name() \
595 { return &count_of_CODE_TYPE_##name##_; } \
596 StatsCounter* size_of_CODE_TYPE_##name() \
597 { return &size_of_CODE_TYPE_##name##_; }
598 CODE_KIND_LIST(SC)
599#undef SC
600
601#define SC(name) \
602 StatsCounter* count_of_FIXED_ARRAY_##name() \
603 { return &count_of_FIXED_ARRAY_##name##_; } \
604 StatsCounter* size_of_FIXED_ARRAY_##name() \
605 { return &size_of_FIXED_ARRAY_##name##_; }
606 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
607#undef SC
608
609#define SC(name) \
610 StatsCounter* count_of_CODE_AGE_##name() \
611 { return &count_of_CODE_AGE_##name##_; } \
612 StatsCounter* size_of_CODE_AGE_##name() \
613 { return &size_of_CODE_AGE_##name##_; }
614 CODE_AGE_LIST_COMPLETE(SC)
615#undef SC
616
617 enum Id {
618#define RATE_ID(name, caption) k_##name,
619 HISTOGRAM_TIMER_LIST(RATE_ID)
620#undef RATE_ID
621#define PERCENTAGE_ID(name, caption) k_##name,
622 HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
623#undef PERCENTAGE_ID
624#define MEMORY_ID(name, caption) k_##name,
625 HISTOGRAM_MEMORY_LIST(MEMORY_ID)
626#undef MEMORY_ID
627#define COUNTER_ID(name, caption) k_##name,
628 STATS_COUNTER_LIST_1(COUNTER_ID)
629 STATS_COUNTER_LIST_2(COUNTER_ID)
630#undef COUNTER_ID
631#define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
632 INSTANCE_TYPE_LIST(COUNTER_ID)
633#undef COUNTER_ID
634#define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
635 kSizeOfCODE_TYPE_##name,
636 CODE_KIND_LIST(COUNTER_ID)
637#undef COUNTER_ID
638#define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
639 kSizeOfFIXED_ARRAY__##name,
640 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
641#undef COUNTER_ID
642#define COUNTER_ID(name) kCountOfCODE_AGE__##name, \
643 kSizeOfCODE_AGE__##name,
644 CODE_AGE_LIST_COMPLETE(COUNTER_ID)
645#undef COUNTER_ID
646 stats_counter_count
647 };
648
649 void ResetCounters();
650 void ResetHistograms();
651
652 private:
653#define HR(name, caption, min, max, num_buckets) Histogram name##_;
654 HISTOGRAM_RANGE_LIST(HR)
655#undef HR
656
657#define HT(name, caption) \
658 HistogramTimer name##_;
659 HISTOGRAM_TIMER_LIST(HT)
660#undef HT
661
662#define HP(name, caption) \
663 Histogram name##_;
664 HISTOGRAM_PERCENTAGE_LIST(HP)
665#undef HP
666
667#define HM(name, caption) \
668 Histogram name##_;
669 HISTOGRAM_MEMORY_LIST(HM)
670#undef HM
671
672#define SC(name, caption) \
673 StatsCounter name##_;
674 STATS_COUNTER_LIST_1(SC)
675 STATS_COUNTER_LIST_2(SC)
676#undef SC
677
678#define SC(name) \
679 StatsCounter size_of_##name##_; \
680 StatsCounter count_of_##name##_;
681 INSTANCE_TYPE_LIST(SC)
682#undef SC
683
684#define SC(name) \
685 StatsCounter size_of_CODE_TYPE_##name##_; \
686 StatsCounter count_of_CODE_TYPE_##name##_;
687 CODE_KIND_LIST(SC)
688#undef SC
689
690#define SC(name) \
691 StatsCounter size_of_FIXED_ARRAY_##name##_; \
692 StatsCounter count_of_FIXED_ARRAY_##name##_;
693 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
694#undef SC
695
696#define SC(name) \
697 StatsCounter size_of_CODE_AGE_##name##_; \
698 StatsCounter count_of_CODE_AGE_##name##_;
699 CODE_AGE_LIST_COMPLETE(SC)
700#undef SC
701
702 friend class Isolate;
703
704 explicit Counters(Isolate* isolate);
705
706 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
707};
Steve Blocka7e24c12009-10-30 11:49:00 +0000708
709} } // namespace v8::internal
710
711#endif // V8_COUNTERS_H_