V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
counters.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COUNTERS_H_
6 #define V8_COUNTERS_H_
7 
8 #include "include/v8.h"
9 #include "src/allocation.h"
10 #include "src/base/atomic-utils.h"
11 #include "src/base/platform/elapsed-timer.h"
12 #include "src/base/platform/time.h"
13 #include "src/globals.h"
14 #include "src/heap-symbols.h"
15 #include "src/isolate.h"
16 #include "src/objects.h"
17 #include "src/runtime/runtime.h"
18 #include "src/tracing/trace-event.h"
19 #include "src/tracing/traced-value.h"
20 #include "src/tracing/tracing-category-observer.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 // StatsCounters is an interface for plugging into external
26 // counters for monitoring. Counters can be looked up and
27 // manipulated by name.
28 
29 class Counters;
30 
31 class StatsTable {
32  public:
33  // Register an application-defined function for recording
34  // subsequent counter statistics.
35  void SetCounterFunction(CounterLookupCallback f);
36 
37  // Register an application-defined function to create histograms for
38  // recording subsequent histogram samples.
39  void SetCreateHistogramFunction(CreateHistogramCallback f) {
40  create_histogram_function_ = f;
41  }
42 
43  // Register an application-defined function to add a sample
44  // to a histogram created with CreateHistogram function.
45  void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
46  add_histogram_sample_function_ = f;
47  }
48 
49  bool HasCounterFunction() const { return lookup_function_ != nullptr; }
50 
51  // Lookup the location of a counter by name. If the lookup
52  // is successful, returns a non-nullptr pointer for writing the
53  // value of the counter. Each thread calling this function
54  // may receive a different location to store it's counter.
55  // The return value must not be cached and re-used across
56  // threads, although a single thread is free to cache it.
57  int* FindLocation(const char* name) {
58  if (!lookup_function_) return nullptr;
59  return lookup_function_(name);
60  }
61 
62  // Create a histogram by name. If the create is successful,
63  // returns a non-nullptr pointer for use with AddHistogramSample
64  // function. min and max define the expected minimum and maximum
65  // sample values. buckets is the maximum number of buckets
66  // that the samples will be grouped into.
67  void* CreateHistogram(const char* name,
68  int min,
69  int max,
70  size_t buckets) {
71  if (!create_histogram_function_) return nullptr;
72  return create_histogram_function_(name, min, max, buckets);
73  }
74 
75  // Add a sample to a histogram created with the CreateHistogram
76  // function.
77  void AddHistogramSample(void* histogram, int sample) {
78  if (!add_histogram_sample_function_) return;
79  return add_histogram_sample_function_(histogram, sample);
80  }
81 
82  private:
83  friend class Counters;
84 
85  explicit StatsTable(Counters* counters);
86 
87  CounterLookupCallback lookup_function_;
88  CreateHistogramCallback create_histogram_function_;
89  AddHistogramSampleCallback add_histogram_sample_function_;
90 
91  DISALLOW_COPY_AND_ASSIGN(StatsTable);
92 };
93 
94 // Base class for stats counters.
96  protected:
97  Counters* counters_;
98  const char* name_;
99  int* ptr_;
100 
101  StatsCounterBase() = default;
102  StatsCounterBase(Counters* counters, const char* name)
103  : counters_(counters), name_(name), ptr_(nullptr) {}
104 
105  void SetLoc(int* loc, int value) { *loc = value; }
106  void IncrementLoc(int* loc) { (*loc)++; }
107  void IncrementLoc(int* loc, int value) { (*loc) += value; }
108  void DecrementLoc(int* loc) { (*loc)--; }
109  void DecrementLoc(int* loc, int value) { (*loc) -= value; }
110 
111  int* FindLocationInStatsTable() const;
112 };
113 
114 // StatsCounters are dynamically created values which can be tracked in
115 // the StatsTable. They are designed to be lightweight to create and
116 // easy to use.
117 //
118 // Internally, a counter represents a value in a row of a StatsTable.
119 // The row has a 32bit value for each process/thread in the table and also
120 // a name (stored in the table metadata). Since the storage location can be
121 // thread-specific, this class cannot be shared across threads. Note: This
122 // class is not thread safe.
124  public:
125  // Sets the counter to a specific value.
126  void Set(int value) {
127  if (int* loc = GetPtr()) SetLoc(loc, value);
128  }
129 
130  // Increments the counter.
131  void Increment() {
132  if (int* loc = GetPtr()) IncrementLoc(loc);
133  }
134 
135  void Increment(int value) {
136  if (int* loc = GetPtr()) IncrementLoc(loc, value);
137  }
138 
139  // Decrements the counter.
140  void Decrement() {
141  if (int* loc = GetPtr()) DecrementLoc(loc);
142  }
143 
144  void Decrement(int value) {
145  if (int* loc = GetPtr()) DecrementLoc(loc, value);
146  }
147 
148  // Is this counter enabled?
149  // Returns false if table is full.
150  bool Enabled() { return GetPtr() != nullptr; }
151 
152  // Get the internal pointer to the counter. This is used
153  // by the code generator to emit code that manipulates a
154  // given counter without calling the runtime system.
155  int* GetInternalPointer() {
156  int* loc = GetPtr();
157  DCHECK_NOT_NULL(loc);
158  return loc;
159  }
160 
161  private:
162  friend class Counters;
163 
164  StatsCounter() = default;
165  StatsCounter(Counters* counters, const char* name)
166  : StatsCounterBase(counters, name), lookup_done_(false) {}
167 
168  // Reset the cached internal pointer.
169  void Reset() { lookup_done_ = false; }
170 
171  // Returns the cached address of this counter location.
172  int* GetPtr() {
173  if (lookup_done_) return ptr_;
174  lookup_done_ = true;
175  ptr_ = FindLocationInStatsTable();
176  return ptr_;
177  }
178 
179  bool lookup_done_;
180 };
181 
182 // Thread safe version of StatsCounter.
184  public:
185  void Set(int Value);
186  void Increment();
187  void Increment(int value);
188  void Decrement();
189  void Decrement(int value);
190  bool Enabled() { return ptr_ != nullptr; }
191  int* GetInternalPointer() {
192  DCHECK_NOT_NULL(ptr_);
193  return ptr_;
194  }
195 
196  private:
197  friend class Counters;
198 
199  StatsCounterThreadSafe(Counters* counters, const char* name);
200  void Reset() { ptr_ = FindLocationInStatsTable(); }
201 
202  base::Mutex mutex_;
203 
204  DISALLOW_IMPLICIT_CONSTRUCTORS(StatsCounterThreadSafe);
205 };
206 
207 // A Histogram represents a dynamically created histogram in the
208 // StatsTable. Note: This class is thread safe.
209 class Histogram {
210  public:
211  // Add a single sample to this histogram.
212  void AddSample(int sample);
213 
214  // Returns true if this histogram is enabled.
215  bool Enabled() { return histogram_ != nullptr; }
216 
217  const char* name() { return name_; }
218 
219  int min() const { return min_; }
220  int max() const { return max_; }
221  int num_buckets() const { return num_buckets_; }
222 
223  // Asserts that |expected_counters| are the same as the Counters this
224  // Histogram reports to.
225  void AssertReportsToCounters(Counters* expected_counters) {
226  DCHECK_EQ(counters_, expected_counters);
227  }
228 
229  protected:
230  Histogram() = default;
231  Histogram(const char* name, int min, int max, int num_buckets,
232  Counters* counters)
233  : name_(name),
234  min_(min),
235  max_(max),
236  num_buckets_(num_buckets),
237  histogram_(nullptr),
238  counters_(counters) {
239  DCHECK(counters_);
240  }
241 
242  Counters* counters() const { return counters_; }
243 
244  // Reset the cached internal pointer.
245  void Reset() { histogram_ = CreateHistogram(); }
246 
247  private:
248  friend class Counters;
249 
250  void* CreateHistogram() const;
251 
252  const char* name_;
253  int min_;
254  int max_;
255  int num_buckets_;
256  void* histogram_;
257  Counters* counters_;
258 };
259 
260 enum class HistogramTimerResolution { MILLISECOND, MICROSECOND };
261 
262 // A thread safe histogram timer. It also allows distributions of
263 // nested timed results.
264 class TimedHistogram : public Histogram {
265  public:
266  // Start the timer. Log if isolate non-null.
267  void Start(base::ElapsedTimer* timer, Isolate* isolate);
268 
269  // Stop the timer and record the results. Log if isolate non-null.
270  void Stop(base::ElapsedTimer* timer, Isolate* isolate);
271 
272  // Records a TimeDelta::Max() result. Useful to record percentage of tasks
273  // that never got to run in a given scenario. Log if isolate non-null.
274  void RecordAbandon(base::ElapsedTimer* timer, Isolate* isolate);
275 
276  protected:
277  friend class Counters;
278  HistogramTimerResolution resolution_;
279 
280  TimedHistogram() = default;
281  TimedHistogram(const char* name, int min, int max,
282  HistogramTimerResolution resolution, int num_buckets,
283  Counters* counters)
284  : Histogram(name, min, max, num_buckets, counters),
285  resolution_(resolution) {}
286  void AddTimeSample();
287 };
288 
289 // Helper class for scoping a TimedHistogram.
291  public:
292  explicit TimedHistogramScope(TimedHistogram* histogram,
293  Isolate* isolate = nullptr)
294  : histogram_(histogram), isolate_(isolate) {
295  histogram_->Start(&timer_, isolate);
296  }
297 
298  ~TimedHistogramScope() { histogram_->Stop(&timer_, isolate_); }
299 
300  private:
301  base::ElapsedTimer timer_;
302  TimedHistogram* histogram_;
303  Isolate* isolate_;
304 
305  DISALLOW_IMPLICIT_CONSTRUCTORS(TimedHistogramScope);
306 };
307 
308 enum class OptionalTimedHistogramScopeMode { TAKE_TIME, DONT_TAKE_TIME };
309 
310 // Helper class for scoping a TimedHistogram.
311 // It will not take time for mode = DONT_TAKE_TIME.
313  public:
315  OptionalTimedHistogramScopeMode mode)
316  : histogram_(histogram), isolate_(isolate), mode_(mode) {
317  if (mode == OptionalTimedHistogramScopeMode::TAKE_TIME) {
318  histogram_->Start(&timer_, isolate);
319  }
320  }
321 
323  if (mode_ == OptionalTimedHistogramScopeMode::TAKE_TIME) {
324  histogram_->Stop(&timer_, isolate_);
325  }
326  }
327 
328  private:
329  base::ElapsedTimer timer_;
330  TimedHistogram* const histogram_;
331  Isolate* const isolate_;
332  const OptionalTimedHistogramScopeMode mode_;
333  DISALLOW_IMPLICIT_CONSTRUCTORS(OptionalTimedHistogramScope);
334 };
335 
336 // Helper class for recording a TimedHistogram asynchronously with manual
337 // controls (it will not generate a report if destroyed without explicitly
338 // triggering a report). |async_counters| should be a shared_ptr to
339 // |histogram->counters()|, making it is safe to report to an
340 // AsyncTimedHistogram after the associated isolate has been destroyed.
341 // AsyncTimedHistogram can be moved/copied to avoid computing Now() multiple
342 // times when the times of multiple tasks are identical; each copy will generate
343 // its own report.
345  public:
346  explicit AsyncTimedHistogram(TimedHistogram* histogram,
347  std::shared_ptr<Counters> async_counters)
348  : histogram_(histogram), async_counters_(std::move(async_counters)) {
349  histogram_->AssertReportsToCounters(async_counters_.get());
350  histogram_->Start(&timer_, nullptr);
351  }
352 
353  ~AsyncTimedHistogram() = default;
354 
355  AsyncTimedHistogram(const AsyncTimedHistogram& other) = default;
356  AsyncTimedHistogram& operator=(const AsyncTimedHistogram& other) = default;
357  AsyncTimedHistogram(AsyncTimedHistogram&& other) = default;
358  AsyncTimedHistogram& operator=(AsyncTimedHistogram&& other) = default;
359 
360  // Records the time elapsed to |histogram_| and stops |timer_|.
361  void RecordDone() { histogram_->Stop(&timer_, nullptr); }
362 
363  // Records TimeDelta::Max() to |histogram_| and stops |timer_|.
364  void RecordAbandon() { histogram_->RecordAbandon(&timer_, nullptr); }
365 
366  private:
367  base::ElapsedTimer timer_;
368  TimedHistogram* histogram_;
369  std::shared_ptr<Counters> async_counters_;
370 };
371 
372 // Helper class for scoping a TimedHistogram, where the histogram is selected at
373 // stop time rather than start time.
374 // TODO(leszeks): This is heavily reliant on TimedHistogram::Start() doing
375 // nothing but starting the timer, and TimedHistogram::Stop() logging the sample
376 // correctly even if Start() was not called. This happens to be true iff Stop()
377 // is passed a null isolate, but that's an implementation detail of
378 // TimedHistogram, and we shouldn't rely on it.
380  public:
381  LazyTimedHistogramScope() : histogram_(nullptr) { timer_.Start(); }
383  // We should set the histogram before this scope exits.
384  DCHECK_NOT_NULL(histogram_);
385  histogram_->Stop(&timer_, nullptr);
386  }
387 
388  void set_histogram(TimedHistogram* histogram) { histogram_ = histogram; }
389 
390  private:
391  base::ElapsedTimer timer_;
392  TimedHistogram* histogram_;
393 };
394 
395 // A HistogramTimer allows distributions of non-nested timed results
396 // to be created. WARNING: This class is not thread safe and can only
397 // be run on the foreground thread.
399  public:
400  // Note: public for testing purposes only.
401  HistogramTimer(const char* name, int min, int max,
402  HistogramTimerResolution resolution, int num_buckets,
403  Counters* counters)
404  : TimedHistogram(name, min, max, resolution, num_buckets, counters) {}
405 
406  inline void Start();
407  inline void Stop();
408 
409  // Returns true if the timer is running.
410  bool Running() {
411  return Enabled() && timer_.IsStarted();
412  }
413 
414  // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
415 #ifdef DEBUG
416  base::ElapsedTimer* timer() { return &timer_; }
417 #endif
418 
419  private:
420  friend class Counters;
421 
422  base::ElapsedTimer timer_;
423 
424  HistogramTimer() = default;
425 };
426 
427 // Helper class for scoping a HistogramTimer.
428 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
429 // Parser is currently reentrant (when it throws an error, we call back
430 // into JavaScript and all bets are off), but ElapsedTimer is not
431 // reentry-safe. Fix this properly and remove |allow_nesting|.
433  public:
434  explicit HistogramTimerScope(HistogramTimer* timer,
435  bool allow_nesting = false)
436 #ifdef DEBUG
437  : timer_(timer), skipped_timer_start_(false) {
438  if (timer_->timer()->IsStarted() && allow_nesting) {
439  skipped_timer_start_ = true;
440  } else {
441  timer_->Start();
442  }
443  }
444 #else
445  : timer_(timer) {
446  timer_->Start();
447  }
448 #endif
450 #ifdef DEBUG
451  if (!skipped_timer_start_) {
452  timer_->Stop();
453  }
454 #else
455  timer_->Stop();
456 #endif
457  }
458 
459  private:
460  HistogramTimer* timer_;
461 #ifdef DEBUG
462  bool skipped_timer_start_;
463 #endif
464 };
465 
466 // A histogram timer that can aggregate events within a larger scope.
467 //
468 // Intended use of this timer is to have an outer (aggregating) and an inner
469 // (to be aggregated) scope, where the inner scope measure the time of events,
470 // and all those inner scope measurements will be summed up by the outer scope.
471 // An example use might be to aggregate the time spent in lazy compilation
472 // while running a script.
473 //
474 // Helpers:
475 // - AggregatingHistogramTimerScope, the "outer" scope within which
476 // times will be summed up.
477 // - AggregatedHistogramTimerScope, the "inner" scope which defines the
478 // events to be timed.
480  public:
481  // Start/stop the "outer" scope.
482  void Start() { time_ = base::TimeDelta(); }
483  void Stop() {
484  if (time_ != base::TimeDelta()) {
485  // Only add non-zero samples, since zero samples represent situations
486  // where there were no aggregated samples added.
487  AddSample(static_cast<int>(time_.InMicroseconds()));
488  }
489  }
490 
491  // Add a time value ("inner" scope).
492  void Add(base::TimeDelta other) { time_ += other; }
493 
494  private:
495  friend class Counters;
496 
497  AggregatableHistogramTimer() = default;
498  AggregatableHistogramTimer(const char* name, int min, int max,
499  int num_buckets, Counters* counters)
500  : Histogram(name, min, max, num_buckets, counters) {}
501 
502  base::TimeDelta time_;
503 };
504 
505 // A helper class for use with AggregatableHistogramTimer. This is the
506 // // outer-most timer scope used with an AggregatableHistogramTimer. It will
507 // // aggregate the information from the inner AggregatedHistogramTimerScope.
509  public:
511  : histogram_(histogram) {
512  histogram_->Start();
513  }
514  ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
515 
516  private:
517  AggregatableHistogramTimer* histogram_;
518 };
519 
520 // A helper class for use with AggregatableHistogramTimer, the "inner" scope
521 // // which defines the events to be timed.
523  public:
525  : histogram_(histogram) {
526  timer_.Start();
527  }
528  ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
529 
530  private:
531  base::ElapsedTimer timer_;
532  AggregatableHistogramTimer* histogram_;
533 };
534 
535 
536 // AggretatedMemoryHistogram collects (time, value) sample pairs and turns
537 // them into time-uniform samples for the backing historgram, such that the
538 // backing histogram receives one sample every T ms, where the T is controlled
539 // by the FLAG_histogram_interval.
540 //
541 // More formally: let F be a real-valued function that maps time to sample
542 // values. We define F as a linear interpolation between adjacent samples. For
543 // each time interval [x; x + T) the backing histogram gets one sample value
544 // that is the average of F(t) in the interval.
545 template <typename Histogram>
547  public:
548  // Note: public for testing purposes only.
549  explicit AggregatedMemoryHistogram(Histogram* backing_histogram)
551  backing_histogram_ = backing_histogram;
552  }
553 
554  // Invariants that hold before and after AddSample if
555  // is_initialized_ is true:
556  //
557  // 1) For we processed samples that came in before start_ms_ and sent the
558  // corresponding aggregated samples to backing histogram.
559  // 2) (last_ms_, last_value_) is the last received sample.
560  // 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
561  // 4) aggregate_value_ is the average of the function that is constructed by
562  // linearly interpolating samples received between start_ms_ and last_ms_.
563  void AddSample(double current_ms, double current_value);
564 
565  private:
566  friend class Counters;
567 
569  : is_initialized_(false),
570  start_ms_(0.0),
571  last_ms_(0.0),
572  aggregate_value_(0.0),
573  last_value_(0.0),
574  backing_histogram_(nullptr) {}
575  double Aggregate(double current_ms, double current_value);
576 
577  bool is_initialized_;
578  double start_ms_;
579  double last_ms_;
580  double aggregate_value_;
581  double last_value_;
582  Histogram* backing_histogram_;
583 };
584 
585 
586 template <typename Histogram>
588  double current_value) {
589  if (!is_initialized_) {
590  aggregate_value_ = current_value;
591  start_ms_ = current_ms;
592  last_value_ = current_value;
593  last_ms_ = current_ms;
594  is_initialized_ = true;
595  } else {
596  const double kEpsilon = 1e-6;
597  const int kMaxSamples = 1000;
598  if (current_ms < last_ms_ + kEpsilon) {
599  // Two samples have the same time, remember the last one.
600  last_value_ = current_value;
601  } else {
602  double sample_interval_ms = FLAG_histogram_interval;
603  double end_ms = start_ms_ + sample_interval_ms;
604  if (end_ms <= current_ms + kEpsilon) {
605  // Linearly interpolate between the last_ms_ and the current_ms.
606  double slope = (current_value - last_value_) / (current_ms - last_ms_);
607  int i;
608  // Send aggregated samples to the backing histogram from the start_ms
609  // to the current_ms.
610  for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) {
611  double end_value = last_value_ + (end_ms - last_ms_) * slope;
612  double sample_value;
613  if (i == 0) {
614  // Take aggregate_value_ into account.
615  sample_value = Aggregate(end_ms, end_value);
616  } else {
617  // There is no aggregate_value_ for i > 0.
618  sample_value = (last_value_ + end_value) / 2;
619  }
620  backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5));
621  last_value_ = end_value;
622  last_ms_ = end_ms;
623  end_ms += sample_interval_ms;
624  }
625  if (i == kMaxSamples) {
626  // We hit the sample limit, ignore the remaining samples.
627  aggregate_value_ = current_value;
628  start_ms_ = current_ms;
629  } else {
630  aggregate_value_ = last_value_;
631  start_ms_ = last_ms_;
632  }
633  }
634  aggregate_value_ = current_ms > start_ms_ + kEpsilon
635  ? Aggregate(current_ms, current_value)
636  : aggregate_value_;
637  last_value_ = current_value;
638  last_ms_ = current_ms;
639  }
640  }
641 }
642 
643 
644 template <typename Histogram>
645 double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
646  double current_value) {
647  double interval_ms = current_ms - start_ms_;
648  double value = (current_value + last_value_) / 2;
649  // The aggregate_value_ is the average for [start_ms_; last_ms_].
650  // The value is the average for [last_ms_; current_ms].
651  // Return the weighted average of the aggregate_value_ and the value.
652  return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
653  value * ((current_ms - last_ms_) / interval_ms);
654 }
655 
656 class RuntimeCallCounter final {
657  public:
658  RuntimeCallCounter() : RuntimeCallCounter(nullptr) {}
659  explicit RuntimeCallCounter(const char* name)
660  : name_(name), count_(0), time_(0) {}
661  V8_NOINLINE void Reset();
662  V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
663  void Add(RuntimeCallCounter* other);
664 
665  const char* name() const { return name_; }
666  int64_t count() const { return count_; }
667  base::TimeDelta time() const {
668  return base::TimeDelta::FromMicroseconds(time_);
669  }
670  void Increment() { count_++; }
671  void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); }
672 
673  private:
674  friend class RuntimeCallStats;
675 
676  const char* name_;
677  int64_t count_;
678  // Stored as int64_t so that its initialization can be deferred.
679  int64_t time_;
680 };
681 
682 // RuntimeCallTimer is used to keep track of the stack of currently active
683 // timers used for properly measuring the own time of a RuntimeCallCounter.
684 class RuntimeCallTimer final {
685  public:
686  RuntimeCallCounter* counter() { return counter_; }
687  void set_counter(RuntimeCallCounter* counter) { counter_ = counter; }
688  RuntimeCallTimer* parent() const { return parent_.Value(); }
689  void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); }
690  const char* name() const { return counter_->name(); }
691 
692  inline bool IsStarted();
693 
694  inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent);
695  void Snapshot();
696  inline RuntimeCallTimer* Stop();
697 
698  // Make the time source configurable for testing purposes.
699  V8_EXPORT_PRIVATE static base::TimeTicks (*Now)();
700 
701  private:
702  inline void Pause(base::TimeTicks now);
703  inline void Resume(base::TimeTicks now);
704  inline void CommitTimeToCounter();
705 
706  RuntimeCallCounter* counter_ = nullptr;
708  base::TimeTicks start_ticks_;
709  base::TimeDelta elapsed_;
710 };
711 
712 #define FOR_EACH_GC_COUNTER(V) \
713  TRACER_SCOPES(V) \
714  TRACER_BACKGROUND_SCOPES(V)
715 
716 #define FOR_EACH_API_COUNTER(V) \
717  V(ArrayBuffer_Cast) \
718  V(ArrayBuffer_Neuter) \
719  V(ArrayBuffer_New) \
720  V(Array_CloneElementAt) \
721  V(Array_New) \
722  V(BigInt_NewFromWords) \
723  V(BigInt64Array_New) \
724  V(BigUint64Array_New) \
725  V(BigIntObject_New) \
726  V(BigIntObject_BigIntValue) \
727  V(BooleanObject_BooleanValue) \
728  V(BooleanObject_New) \
729  V(Context_New) \
730  V(Context_NewRemoteContext) \
731  V(DataView_New) \
732  V(Date_DateTimeConfigurationChangeNotification) \
733  V(Date_New) \
734  V(Date_NumberValue) \
735  V(Debug_Call) \
736  V(Error_New) \
737  V(External_New) \
738  V(Float32Array_New) \
739  V(Float64Array_New) \
740  V(Function_Call) \
741  V(Function_New) \
742  V(Function_NewInstance) \
743  V(FunctionTemplate_GetFunction) \
744  V(FunctionTemplate_New) \
745  V(FunctionTemplate_NewRemoteInstance) \
746  V(FunctionTemplate_NewWithCache) \
747  V(FunctionTemplate_NewWithFastHandler) \
748  V(Int16Array_New) \
749  V(Int32Array_New) \
750  V(Int8Array_New) \
751  V(JSON_Parse) \
752  V(JSON_Stringify) \
753  V(Map_AsArray) \
754  V(Map_Clear) \
755  V(Map_Delete) \
756  V(Map_Get) \
757  V(Map_Has) \
758  V(Map_New) \
759  V(Map_Set) \
760  V(WeakMap_Get) \
761  V(WeakMap_Set) \
762  V(WeakMap_New) \
763  V(Message_GetEndColumn) \
764  V(Message_GetLineNumber) \
765  V(Message_GetSourceLine) \
766  V(Message_GetStartColumn) \
767  V(Module_Evaluate) \
768  V(Module_InstantiateModule) \
769  V(NumberObject_New) \
770  V(NumberObject_NumberValue) \
771  V(Object_CallAsConstructor) \
772  V(Object_CallAsFunction) \
773  V(Object_CreateDataProperty) \
774  V(Object_DefineOwnProperty) \
775  V(Object_DefineProperty) \
776  V(Object_Delete) \
777  V(Object_DeleteProperty) \
778  V(Object_ForceSet) \
779  V(Object_Get) \
780  V(Object_GetOwnPropertyDescriptor) \
781  V(Object_GetOwnPropertyNames) \
782  V(Object_GetPropertyAttributes) \
783  V(Object_GetPropertyNames) \
784  V(Object_GetRealNamedProperty) \
785  V(Object_GetRealNamedPropertyAttributes) \
786  V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \
787  V(Object_GetRealNamedPropertyInPrototypeChain) \
788  V(Object_Has) \
789  V(Object_HasOwnProperty) \
790  V(Object_HasRealIndexedProperty) \
791  V(Object_HasRealNamedCallbackProperty) \
792  V(Object_HasRealNamedProperty) \
793  V(Object_New) \
794  V(Object_ObjectProtoToString) \
795  V(Object_Set) \
796  V(Object_SetAccessor) \
797  V(Object_SetIntegrityLevel) \
798  V(Object_SetPrivate) \
799  V(Object_SetPrototype) \
800  V(ObjectTemplate_New) \
801  V(ObjectTemplate_NewInstance) \
802  V(Object_ToArrayIndex) \
803  V(Object_ToBigInt) \
804  V(Object_ToDetailString) \
805  V(Object_ToInt32) \
806  V(Object_ToInteger) \
807  V(Object_ToNumber) \
808  V(Object_ToObject) \
809  V(Object_ToString) \
810  V(Object_ToUint32) \
811  V(Persistent_New) \
812  V(Private_New) \
813  V(Promise_Catch) \
814  V(Promise_Chain) \
815  V(Promise_HasRejectHandler) \
816  V(Promise_Resolver_New) \
817  V(Promise_Resolver_Resolve) \
818  V(Promise_Resolver_Reject) \
819  V(Promise_Result) \
820  V(Promise_Status) \
821  V(Promise_Then) \
822  V(Proxy_New) \
823  V(RangeError_New) \
824  V(ReferenceError_New) \
825  V(RegExp_New) \
826  V(ScriptCompiler_Compile) \
827  V(ScriptCompiler_CompileFunctionInContext) \
828  V(ScriptCompiler_CompileUnbound) \
829  V(Script_Run) \
830  V(Set_Add) \
831  V(Set_AsArray) \
832  V(Set_Clear) \
833  V(Set_Delete) \
834  V(Set_Has) \
835  V(Set_New) \
836  V(SharedArrayBuffer_New) \
837  V(String_Concat) \
838  V(String_NewExternalOneByte) \
839  V(String_NewExternalTwoByte) \
840  V(String_NewFromOneByte) \
841  V(String_NewFromTwoByte) \
842  V(String_NewFromUtf8) \
843  V(StringObject_New) \
844  V(StringObject_StringValue) \
845  V(String_Write) \
846  V(String_WriteUtf8) \
847  V(Symbol_New) \
848  V(SymbolObject_New) \
849  V(SymbolObject_SymbolValue) \
850  V(SyntaxError_New) \
851  V(TryCatch_StackTrace) \
852  V(TypeError_New) \
853  V(Uint16Array_New) \
854  V(Uint32Array_New) \
855  V(Uint8Array_New) \
856  V(Uint8ClampedArray_New) \
857  V(UnboundScript_GetId) \
858  V(UnboundScript_GetLineNumber) \
859  V(UnboundScript_GetName) \
860  V(UnboundScript_GetSourceMappingURL) \
861  V(UnboundScript_GetSourceURL) \
862  V(Value_InstanceOf) \
863  V(Value_IntegerValue) \
864  V(Value_Int32Value) \
865  V(Value_NumberValue) \
866  V(Value_TypeOf) \
867  V(Value_Uint32Value) \
868  V(ValueDeserializer_ReadHeader) \
869  V(ValueDeserializer_ReadValue) \
870  V(ValueSerializer_WriteValue)
871 
872 #define FOR_EACH_MANUAL_COUNTER(V) \
873  V(AccessorGetterCallback) \
874  V(AccessorSetterCallback) \
875  V(ArrayLengthGetter) \
876  V(ArrayLengthSetter) \
877  V(BoundFunctionNameGetter) \
878  V(BoundFunctionLengthGetter) \
879  V(CompileBackgroundAnalyse) \
880  V(CompileBackgroundCompileTask) \
881  V(CompileBackgroundEval) \
882  V(CompileBackgroundFunction) \
883  V(CompileBackgroundIgnition) \
884  V(CompileBackgroundScript) \
885  V(CompileBackgroundRewriteReturnResult) \
886  V(CompileBackgroundScopeAnalysis) \
887  V(CompileDeserialize) \
888  V(CompileEval) \
889  V(CompileAnalyse) \
890  V(CompileEnqueueOnDispatcher) \
891  V(CompileFinalizeBackgroundCompileTask) \
892  V(CompileFinishNowOnDispatcher) \
893  V(CompileFunction) \
894  V(CompileGetFromOptimizedCodeMap) \
895  V(CompileIgnition) \
896  V(CompileIgnitionFinalization) \
897  V(CompileRewriteReturnResult) \
898  V(CompileScopeAnalysis) \
899  V(CompileScript) \
900  V(CompileSerialize) \
901  V(CompileWaitForDispatcher) \
902  V(DeoptimizeCode) \
903  V(FunctionCallback) \
904  V(FunctionPrototypeGetter) \
905  V(FunctionPrototypeSetter) \
906  V(FunctionLengthGetter) \
907  V(GC_Custom_AllAvailableGarbage) \
908  V(GC_Custom_IncrementalMarkingObserver) \
909  V(GC_Custom_SlowAllocateRaw) \
910  V(GCEpilogueCallback) \
911  V(GCPrologueCallback) \
912  V(GetMoreDataCallback) \
913  V(NamedDefinerCallback) \
914  V(NamedDeleterCallback) \
915  V(NamedDescriptorCallback) \
916  V(NamedQueryCallback) \
917  V(NamedSetterCallback) \
918  V(NamedGetterCallback) \
919  V(NamedEnumeratorCallback) \
920  V(IndexedDefinerCallback) \
921  V(IndexedDeleterCallback) \
922  V(IndexedDescriptorCallback) \
923  V(IndexedGetterCallback) \
924  V(IndexedQueryCallback) \
925  V(IndexedSetterCallback) \
926  V(IndexedEnumeratorCallback) \
927  V(InvokeApiInterruptCallbacks) \
928  V(InvokeFunctionCallback) \
929  V(JS_Execution) \
930  V(Map_SetPrototype) \
931  V(Map_TransitionToAccessorProperty) \
932  V(Map_TransitionToDataProperty) \
933  V(Object_DeleteProperty) \
934  V(OptimizeCode) \
935  V(ParseArrowFunctionLiteral) \
936  V(ParseBackgroundArrowFunctionLiteral) \
937  V(ParseBackgroundFunctionLiteral) \
938  V(ParseBackgroundProgram) \
939  V(ParseEval) \
940  V(ParseFunction) \
941  V(ParseFunctionLiteral) \
942  V(ParseProgram) \
943  V(PreParseArrowFunctionLiteral) \
944  V(PreParseBackgroundArrowFunctionLiteral) \
945  V(PreParseBackgroundWithVariableResolution) \
946  V(PreParseWithVariableResolution) \
947  V(PropertyCallback) \
948  V(PrototypeMap_TransitionToAccessorProperty) \
949  V(PrototypeMap_TransitionToDataProperty) \
950  V(PrototypeObject_DeleteProperty) \
951  V(RecompileConcurrent) \
952  V(RecompileSynchronous) \
953  V(ReconfigureToDataProperty) \
954  V(StringLengthGetter) \
955  V(TestCounter1) \
956  V(TestCounter2) \
957  V(TestCounter3)
958 
959 #define FOR_EACH_HANDLER_COUNTER(V) \
960  V(KeyedLoadIC_LoadIndexedInterceptorStub) \
961  V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \
962  V(KeyedLoadIC_LoadElementDH) \
963  V(KeyedLoadIC_LoadIndexedStringDH) \
964  V(KeyedLoadIC_SlowStub) \
965  V(KeyedStoreIC_ElementsTransitionAndStoreStub) \
966  V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \
967  V(KeyedStoreIC_SlowStub) \
968  V(KeyedStoreIC_StoreFastElementStub) \
969  V(KeyedStoreIC_StoreElementStub) \
970  V(StoreInArrayLiteralIC_SlowStub) \
971  V(LoadGlobalIC_LoadScriptContextField) \
972  V(LoadGlobalIC_SlowStub) \
973  V(LoadIC_FunctionPrototypeStub) \
974  V(LoadIC_HandlerCacheHit_Accessor) \
975  V(LoadIC_LoadAccessorDH) \
976  V(LoadIC_LoadAccessorFromPrototypeDH) \
977  V(LoadIC_LoadApiGetterFromPrototypeDH) \
978  V(LoadIC_LoadCallback) \
979  V(LoadIC_LoadConstantDH) \
980  V(LoadIC_LoadConstantFromPrototypeDH) \
981  V(LoadIC_LoadFieldDH) \
982  V(LoadIC_LoadFieldFromPrototypeDH) \
983  V(LoadIC_LoadGlobalDH) \
984  V(LoadIC_LoadGlobalFromPrototypeDH) \
985  V(LoadIC_LoadIntegerIndexedExoticDH) \
986  V(LoadIC_LoadInterceptorDH) \
987  V(LoadIC_LoadNonMaskingInterceptorDH) \
988  V(LoadIC_LoadInterceptorFromPrototypeDH) \
989  V(LoadIC_LoadNativeDataPropertyDH) \
990  V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \
991  V(LoadIC_LoadNonexistentDH) \
992  V(LoadIC_LoadNormalDH) \
993  V(LoadIC_LoadNormalFromPrototypeDH) \
994  V(LoadIC_NonReceiver) \
995  V(LoadIC_Premonomorphic) \
996  V(LoadIC_SlowStub) \
997  V(LoadIC_StringLength) \
998  V(LoadIC_StringWrapperLength) \
999  V(StoreGlobalIC_StoreScriptContextField) \
1000  V(StoreGlobalIC_SlowStub) \
1001  V(StoreIC_HandlerCacheHit_Accessor) \
1002  V(StoreIC_NonReceiver) \
1003  V(StoreIC_Premonomorphic) \
1004  V(StoreIC_SlowStub) \
1005  V(StoreIC_StoreAccessorDH) \
1006  V(StoreIC_StoreAccessorOnPrototypeDH) \
1007  V(StoreIC_StoreApiSetterOnPrototypeDH) \
1008  V(StoreIC_StoreFieldDH) \
1009  V(StoreIC_StoreGlobalDH) \
1010  V(StoreIC_StoreGlobalTransitionDH) \
1011  V(StoreIC_StoreInterceptorStub) \
1012  V(StoreIC_StoreNativeDataPropertyDH) \
1013  V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \
1014  V(StoreIC_StoreNormalDH) \
1015  V(StoreIC_StoreTransitionDH)
1016 
1017 enum RuntimeCallCounterId {
1018 #define CALL_RUNTIME_COUNTER(name) kGC_##name,
1019  FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER)
1020 #undef CALL_RUNTIME_COUNTER
1021 #define CALL_RUNTIME_COUNTER(name) k##name,
1022  FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER)
1023 #undef CALL_RUNTIME_COUNTER
1024 #define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
1025  FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER)
1026 #undef CALL_RUNTIME_COUNTER
1027 #define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
1028  BUILTIN_LIST_C(CALL_BUILTIN_COUNTER)
1029 #undef CALL_BUILTIN_COUNTER
1030 #define CALL_BUILTIN_COUNTER(name) kAPI_##name,
1031  FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER)
1032 #undef CALL_BUILTIN_COUNTER
1033 #define CALL_BUILTIN_COUNTER(name) kHandler_##name,
1034  FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER)
1035 #undef CALL_BUILTIN_COUNTER
1036  kNumberOfCounters
1037 };
1038 
1039 class RuntimeCallStats final {
1040  public:
1041  V8_EXPORT_PRIVATE RuntimeCallStats();
1042 
1043  // Starting measuring the time for a function. This will establish the
1044  // connection to the parent counter for properly calculating the own times.
1045  V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer,
1046  RuntimeCallCounterId counter_id);
1047 
1048  // Leave a scope for a measured runtime function. This will properly add
1049  // the time delta to the current_counter and subtract the delta from its
1050  // parent.
1051  V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer);
1052 
1053  // Set counter id for the innermost measurement. It can be used to refine
1054  // event kind when a runtime entry counter is too generic.
1055  V8_EXPORT_PRIVATE void CorrectCurrentCounterId(
1056  RuntimeCallCounterId counter_id);
1057 
1058  V8_EXPORT_PRIVATE void Reset();
1059  // Add all entries from another stats object.
1060  void Add(RuntimeCallStats* other);
1061  V8_EXPORT_PRIVATE void Print(std::ostream& os);
1062  V8_EXPORT_PRIVATE void Print();
1063  V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
1064 
1065  ThreadId thread_id() const { return thread_id_; }
1066  RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
1067  RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
1068  bool InUse() { return in_use_; }
1069  bool IsCalledOnTheSameThread();
1070 
1071  static const int kNumberOfCounters =
1072  static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
1073  RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
1074  return &counters_[static_cast<int>(counter_id)];
1075  }
1076  RuntimeCallCounter* GetCounter(int counter_id) {
1077  return &counters_[counter_id];
1078  }
1079 
1080  private:
1081  // Top of a stack of active timers.
1082  base::AtomicValue<RuntimeCallTimer*> current_timer_;
1083  // Active counter object associated with current timer.
1084  base::AtomicValue<RuntimeCallCounter*> current_counter_;
1085  // Used to track nested tracing scopes.
1086  bool in_use_;
1087  ThreadId thread_id_;
1088  RuntimeCallCounter counters_[kNumberOfCounters];
1089 };
1090 
1092  public:
1095 
1096  // Returns the TLS key associated with this WorkerThreadRuntimeCallStats.
1097  base::Thread::LocalStorageKey GetKey() const { return tls_key_; }
1098 
1099  // Returns a new worker thread runtime call stats table managed by this
1100  // WorkerThreadRuntimeCallStats.
1101  RuntimeCallStats* NewTable();
1102 
1103  // Adds the counters from the worker thread tables to |main_call_stats|.
1104  void AddToMainTable(RuntimeCallStats* main_call_stats);
1105 
1106  private:
1107  base::Mutex mutex_;
1108  std::vector<std::unique_ptr<RuntimeCallStats>> tables_;
1109  base::Thread::LocalStorageKey tls_key_;
1110 };
1111 
1112 // Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
1113 // runtime call stats table, and will dump the table to an immediate trace event
1114 // when it is destroyed.
1116  public:
1118  WorkerThreadRuntimeCallStats* off_thread_stats);
1120 
1121  RuntimeCallStats* Get() const { return table_; }
1122 
1123  private:
1124  RuntimeCallStats* table_;
1125 };
1126 
1127 #define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \
1128  do { \
1129  if (V8_UNLIKELY(FLAG_runtime_stats) && runtime_call_stats) { \
1130  runtime_call_stats->CorrectCurrentCounterId(counter_id); \
1131  } \
1132  } while (false)
1133 
1134 #define TRACE_HANDLER_STATS(isolate, counter_name) \
1135  CHANGE_CURRENT_RUNTIME_COUNTER( \
1136  isolate->counters()->runtime_call_stats(), \
1137  RuntimeCallCounterId::kHandler_##counter_name)
1138 
1139 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
1140 // the time of C++ scope.
1142  public:
1143  inline RuntimeCallTimerScope(Isolate* isolate,
1144  RuntimeCallCounterId counter_id);
1145  // This constructor is here just to avoid calling GetIsolate() when the
1146  // stats are disabled and the isolate is not directly available.
1147  inline RuntimeCallTimerScope(Isolate* isolate, HeapObject* heap_object,
1148  RuntimeCallCounterId counter_id);
1150  RuntimeCallCounterId counter_id) {
1151  if (V8_LIKELY(!FLAG_runtime_stats || stats == nullptr)) return;
1152  stats_ = stats;
1153  stats_->Enter(&timer_, counter_id);
1154  }
1155 
1156  inline ~RuntimeCallTimerScope() {
1157  if (V8_UNLIKELY(stats_ != nullptr)) {
1158  stats_->Leave(&timer_);
1159  }
1160  }
1161 
1162  private:
1163  RuntimeCallStats* stats_ = nullptr;
1164  RuntimeCallTimer timer_;
1165 
1166  DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
1167 };
1168 
1169 #define HISTOGRAM_RANGE_LIST(HR) \
1170  /* Generic range histograms: HR(name, caption, min, max, num_buckets) */ \
1171  HR(background_marking, V8.GCBackgroundMarking, 0, 10000, 101) \
1172  HR(background_scavenger, V8.GCBackgroundScavenger, 0, 10000, 101) \
1173  HR(background_sweeping, V8.GCBackgroundSweeping, 0, 10000, 101) \
1174  HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \
1175  HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \
1176  HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \
1177  HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \
1178  HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22) \
1179  HR(incremental_marking_sum, V8.GCIncrementalMarkingSum, 0, 10000, 101) \
1180  HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22) \
1181  HR(gc_finalize_clear, V8.GCFinalizeMC.Clear, 0, 10000, 101) \
1182  HR(gc_finalize_epilogue, V8.GCFinalizeMC.Epilogue, 0, 10000, 101) \
1183  HR(gc_finalize_evacuate, V8.GCFinalizeMC.Evacuate, 0, 10000, 101) \
1184  HR(gc_finalize_finish, V8.GCFinalizeMC.Finish, 0, 10000, 101) \
1185  HR(gc_finalize_mark, V8.GCFinalizeMC.Mark, 0, 10000, 101) \
1186  HR(gc_finalize_prologue, V8.GCFinalizeMC.Prologue, 0, 10000, 101) \
1187  HR(gc_finalize_sweep, V8.GCFinalizeMC.Sweep, 0, 10000, 101) \
1188  HR(gc_scavenger_scavenge_main, V8.GCScavenger.ScavengeMain, 0, 10000, 101) \
1189  HR(gc_scavenger_scavenge_roots, V8.GCScavenger.ScavengeRoots, 0, 10000, 101) \
1190  HR(gc_mark_compactor, V8.GCMarkCompactor, 0, 10000, 101) \
1191  HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22) \
1192  HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \
1193  /* Asm/Wasm. */ \
1194  HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 100000, \
1195  51) \
1196  HR(wasm_functions_per_wasm_module, V8.WasmFunctionsPerModule.wasm, 1, \
1197  100000, 51) \
1198  HR(array_buffer_big_allocations, V8.ArrayBufferLargeAllocations, 0, 4096, \
1199  13) \
1200  HR(array_buffer_new_size_failures, V8.ArrayBufferNewSizeFailures, 0, 4096, \
1201  13) \
1202  HR(shared_array_allocations, V8.SharedArrayAllocationSizes, 0, 4096, 13) \
1203  HR(wasm_asm_function_size_bytes, V8.WasmFunctionSizeBytes.asm, 1, GB, 51) \
1204  HR(wasm_wasm_function_size_bytes, V8.WasmFunctionSizeBytes.wasm, 1, GB, 51) \
1205  HR(wasm_asm_module_size_bytes, V8.WasmModuleSizeBytes.asm, 1, GB, 51) \
1206  HR(wasm_wasm_module_size_bytes, V8.WasmModuleSizeBytes.wasm, 1, GB, 51) \
1207  HR(wasm_asm_min_mem_pages_count, V8.WasmMinMemPagesCount.asm, 1, 2 << 16, \
1208  51) \
1209  HR(wasm_wasm_min_mem_pages_count, V8.WasmMinMemPagesCount.wasm, 1, 2 << 16, \
1210  51) \
1211  HR(wasm_wasm_max_mem_pages_count, V8.WasmMaxMemPagesCount.wasm, 1, 2 << 16, \
1212  51) \
1213  HR(wasm_decode_asm_module_peak_memory_bytes, \
1214  V8.WasmDecodeModulePeakMemoryBytes.asm, 1, GB, 51) \
1215  HR(wasm_decode_wasm_module_peak_memory_bytes, \
1216  V8.WasmDecodeModulePeakMemoryBytes.wasm, 1, GB, 51) \
1217  HR(asm_wasm_translation_peak_memory_bytes, \
1218  V8.AsmWasmTranslationPeakMemoryBytes, 1, GB, 51) \
1219  HR(wasm_compile_function_peak_memory_bytes, \
1220  V8.WasmCompileFunctionPeakMemoryBytes, 1, GB, 51) \
1221  HR(asm_module_size_bytes, V8.AsmModuleSizeBytes, 1, GB, 51) \
1222  HR(asm_wasm_translation_throughput, V8.AsmWasmTranslationThroughput, 1, 100, \
1223  20) \
1224  HR(wasm_lazy_compilation_throughput, V8.WasmLazyCompilationThroughput, 1, \
1225  10000, 50) \
1226  HR(compile_script_cache_behaviour, V8.CompileScript.CacheBehaviour, 0, 20, \
1227  21) \
1228  HR(wasm_memory_allocation_result, V8.WasmMemoryAllocationResult, 0, 3, 4) \
1229  HR(wasm_address_space_usage_mb, V8.WasmAddressSpaceUsageMiB, 0, 1 << 20, \
1230  128) \
1231  HR(wasm_module_code_size_mb, V8.WasmModuleCodeSizeMiB, 0, 1024, 64)
1232 
1233 #define HISTOGRAM_TIMER_LIST(HT) \
1234  /* Garbage collection timers. */ \
1235  HT(gc_context, V8.GCContext, 10000, \
1236  MILLISECOND) /* GC context cleanup time */ \
1237  HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND) \
1238  HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \
1239  HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \
1240  MILLISECOND) \
1241  HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \
1242  MILLISECOND) \
1243  HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \
1244  MILLISECOND) \
1245  /* Compilation times. */ \
1246  HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND) \
1247  HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND) \
1248  /* Serialization as part of compilation (code caching) */ \
1249  HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
1250  HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000, \
1251  MICROSECOND) \
1252  /* Total compilation time incl. caching/parsing */ \
1253  HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND) \
1254  /* Total JavaScript execution time (including callbacks and runtime calls */ \
1255  HT(execute, V8.Execute, 1000000, MICROSECOND) \
1256  /* Asm/Wasm */ \
1257  HT(asm_wasm_translation_time, V8.AsmWasmTranslationMicroSeconds, 1000000, \
1258  MICROSECOND) \
1259  HT(wasm_lazy_compilation_time, V8.WasmLazyCompilationMicroSeconds, 1000000, \
1260  MICROSECOND) \
1261  HT(wasm_execution_time, V8.WasmExecutionTimeMicroSeconds, 10000000, \
1262  MICROSECOND)
1263 
1264 #define TIMED_HISTOGRAM_LIST(HT) \
1265  /* Garbage collection timers. */ \
1266  HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND) \
1267  HT(gc_compactor_background, V8.GCCompactorBackground, 10000, MILLISECOND) \
1268  HT(gc_compactor_foreground, V8.GCCompactorForeground, 10000, MILLISECOND) \
1269  HT(gc_finalize, V8.GCFinalizeMC, 10000, MILLISECOND) \
1270  HT(gc_finalize_background, V8.GCFinalizeMCBackground, 10000, MILLISECOND) \
1271  HT(gc_finalize_foreground, V8.GCFinalizeMCForeground, 10000, MILLISECOND) \
1272  HT(gc_finalize_reduce_memory, V8.GCFinalizeMCReduceMemory, 10000, \
1273  MILLISECOND) \
1274  HT(gc_finalize_reduce_memory_background, \
1275  V8.GCFinalizeMCReduceMemoryBackground, 10000, MILLISECOND) \
1276  HT(gc_finalize_reduce_memory_foreground, \
1277  V8.GCFinalizeMCReduceMemoryForeground, 10000, MILLISECOND) \
1278  HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND) \
1279  HT(gc_scavenger_background, V8.GCScavengerBackground, 10000, MILLISECOND) \
1280  HT(gc_scavenger_foreground, V8.GCScavengerForeground, 10000, MILLISECOND) \
1281  /* Wasm timers. */ \
1282  HT(wasm_decode_asm_module_time, V8.WasmDecodeModuleMicroSeconds.asm, \
1283  1000000, MICROSECOND) \
1284  HT(wasm_decode_wasm_module_time, V8.WasmDecodeModuleMicroSeconds.wasm, \
1285  1000000, MICROSECOND) \
1286  HT(wasm_decode_asm_function_time, V8.WasmDecodeFunctionMicroSeconds.asm, \
1287  1000000, MICROSECOND) \
1288  HT(wasm_decode_wasm_function_time, V8.WasmDecodeFunctionMicroSeconds.wasm, \
1289  1000000, MICROSECOND) \
1290  HT(wasm_compile_asm_module_time, V8.WasmCompileModuleMicroSeconds.asm, \
1291  10000000, MICROSECOND) \
1292  HT(wasm_compile_wasm_module_time, V8.WasmCompileModuleMicroSeconds.wasm, \
1293  10000000, MICROSECOND) \
1294  HT(wasm_compile_asm_function_time, V8.WasmCompileFunctionMicroSeconds.asm, \
1295  1000000, MICROSECOND) \
1296  HT(wasm_compile_wasm_function_time, V8.WasmCompileFunctionMicroSeconds.wasm, \
1297  1000000, MICROSECOND) \
1298  HT(liftoff_compile_time, V8.LiftoffCompileMicroSeconds, 10000000, \
1299  MICROSECOND) \
1300  HT(wasm_instantiate_wasm_module_time, \
1301  V8.WasmInstantiateModuleMicroSeconds.wasm, 10000000, MICROSECOND) \
1302  HT(wasm_instantiate_asm_module_time, \
1303  V8.WasmInstantiateModuleMicroSeconds.asm, 10000000, MICROSECOND) \
1304  /* Total compilation time incl. caching/parsing for various cache states. */ \
1305  HT(compile_script_with_produce_cache, \
1306  V8.CompileScriptMicroSeconds.ProduceCache, 1000000, MICROSECOND) \
1307  HT(compile_script_with_isolate_cache_hit, \
1308  V8.CompileScriptMicroSeconds.IsolateCacheHit, 1000000, MICROSECOND) \
1309  HT(compile_script_with_consume_cache, \
1310  V8.CompileScriptMicroSeconds.ConsumeCache, 1000000, MICROSECOND) \
1311  HT(compile_script_consume_failed, \
1312  V8.CompileScriptMicroSeconds.ConsumeCache.Failed, 1000000, MICROSECOND) \
1313  HT(compile_script_no_cache_other, \
1314  V8.CompileScriptMicroSeconds.NoCache.Other, 1000000, MICROSECOND) \
1315  HT(compile_script_no_cache_because_inline_script, \
1316  V8.CompileScriptMicroSeconds.NoCache.InlineScript, 1000000, MICROSECOND) \
1317  HT(compile_script_no_cache_because_script_too_small, \
1318  V8.CompileScriptMicroSeconds.NoCache.ScriptTooSmall, 1000000, \
1319  MICROSECOND) \
1320  HT(compile_script_no_cache_because_cache_too_cold, \
1321  V8.CompileScriptMicroSeconds.NoCache.CacheTooCold, 1000000, MICROSECOND) \
1322  HT(compile_script_on_background, \
1323  V8.CompileScriptMicroSeconds.BackgroundThread, 1000000, MICROSECOND) \
1324  HT(compile_function_on_background, \
1325  V8.CompileFunctionMicroSeconds.BackgroundThread, 1000000, MICROSECOND) \
1326  HT(gc_parallel_task_latency, V8.GC.ParallelTaskLatencyMicroSeconds, 1000000, \
1327  MICROSECOND)
1328 
1329 #define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
1330  AHT(compile_lazy, V8.CompileLazyMicroSeconds)
1331 
1332 #define HISTOGRAM_PERCENTAGE_LIST(HP) \
1333  /* Heap fragmentation. */ \
1334  HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal) \
1335  HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
1336  HP(external_fragmentation_code_space, \
1337  V8.MemoryExternalFragmentationCodeSpace) \
1338  HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
1339  HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)
1340 
1341 // Note: These use Histogram with options (min=1000, max=500000, buckets=50).
1342 #define HISTOGRAM_LEGACY_MEMORY_LIST(HM) \
1343  HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
1344  HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
1345  HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted) \
1346  HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \
1347  HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted)
1348 
1349 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
1350 // Intellisense to crash. It was broken into two macros (each of length 40
1351 // lines) rather than one macro (of length about 80 lines) to work around
1352 // this problem. Please avoid using recursive macros of this length when
1353 // possible.
1354 #define STATS_COUNTER_LIST_1(SC) \
1355  /* Global Handle Count*/ \
1356  SC(global_handles, V8.GlobalHandles) \
1357  /* OS Memory allocated */ \
1358  SC(memory_allocated, V8.OsMemoryAllocated) \
1359  SC(maps_normalized, V8.MapsNormalized) \
1360  SC(maps_created, V8.MapsCreated) \
1361  SC(elements_transitions, V8.ObjectElementsTransitions) \
1362  SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
1363  SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
1364  SC(alive_after_last_gc, V8.AliveAfterLastGC) \
1365  SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
1366  SC(objs_since_last_full, V8.ObjsSinceLastFull) \
1367  SC(string_table_capacity, V8.StringTableCapacity) \
1368  SC(number_of_symbols, V8.NumberOfSymbols) \
1369  SC(inlined_copied_elements, V8.InlinedCopiedElements) \
1370  SC(compilation_cache_hits, V8.CompilationCacheHits) \
1371  SC(compilation_cache_misses, V8.CompilationCacheMisses) \
1372  /* Amount of evaled source code. */ \
1373  SC(total_eval_size, V8.TotalEvalSize) \
1374  /* Amount of loaded source code. */ \
1375  SC(total_load_size, V8.TotalLoadSize) \
1376  /* Amount of parsed source code. */ \
1377  SC(total_parse_size, V8.TotalParseSize) \
1378  /* Amount of source code skipped over using preparsing. */ \
1379  SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
1380  /* Amount of compiled source code. */ \
1381  SC(total_compile_size, V8.TotalCompileSize) \
1382  /* Amount of source code compiled with the full codegen. */ \
1383  SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
1384  /* Number of contexts created from scratch. */ \
1385  SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
1386  /* Number of contexts created by partial snapshot. */ \
1387  SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
1388  /* Number of code objects found from pc. */ \
1389  SC(pc_to_code, V8.PcToCode) \
1390  SC(pc_to_code_cached, V8.PcToCodeCached) \
1391  /* The store-buffer implementation of the write barrier. */ \
1392  SC(store_buffer_overflows, V8.StoreBufferOverflows)
1393 
1394 #define STATS_COUNTER_LIST_2(SC) \
1395  /* Number of code stubs. */ \
1396  SC(code_stubs, V8.CodeStubs) \
1397  /* Amount of stub code. */ \
1398  SC(total_stubs_code_size, V8.TotalStubsCodeSize) \
1399  /* Amount of (JS) compiled code. */ \
1400  SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
1401  SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
1402  SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
1403  SC(gc_compactor_caused_by_oldspace_exhaustion, \
1404  V8.GCCompactorCausedByOldspaceExhaustion) \
1405  SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
1406  SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
1407  SC(ic_keyed_load_generic_smi, V8.ICKeyedLoadGenericSmi) \
1408  SC(ic_keyed_load_generic_symbol, V8.ICKeyedLoadGenericSymbol) \
1409  SC(ic_keyed_load_generic_slow, V8.ICKeyedLoadGenericSlow) \
1410  SC(ic_named_load_global_stub, V8.ICNamedLoadGlobalStub) \
1411  SC(ic_store_normal_miss, V8.ICStoreNormalMiss) \
1412  SC(ic_store_normal_hit, V8.ICStoreNormalHit) \
1413  SC(ic_binary_op_miss, V8.ICBinaryOpMiss) \
1414  SC(ic_compare_miss, V8.ICCompareMiss) \
1415  SC(ic_call_miss, V8.ICCallMiss) \
1416  SC(ic_keyed_call_miss, V8.ICKeyedCallMiss) \
1417  SC(ic_store_miss, V8.ICStoreMiss) \
1418  SC(ic_keyed_store_miss, V8.ICKeyedStoreMiss) \
1419  SC(cow_arrays_converted, V8.COWArraysConverted) \
1420  SC(constructed_objects, V8.ConstructedObjects) \
1421  SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
1422  SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
1423  SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
1424  SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
1425  SC(enum_cache_hits, V8.EnumCacheHits) \
1426  SC(enum_cache_misses, V8.EnumCacheMisses) \
1427  SC(fast_new_closure_total, V8.FastNewClosureTotal) \
1428  SC(string_add_runtime, V8.StringAddRuntime) \
1429  SC(string_add_native, V8.StringAddNative) \
1430  SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \
1431  SC(sub_string_runtime, V8.SubStringRuntime) \
1432  SC(sub_string_native, V8.SubStringNative) \
1433  SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
1434  SC(regexp_entry_native, V8.RegExpEntryNative) \
1435  SC(math_exp_runtime, V8.MathExpRuntime) \
1436  SC(math_log_runtime, V8.MathLogRuntime) \
1437  SC(math_pow_runtime, V8.MathPowRuntime) \
1438  SC(stack_interrupts, V8.StackInterrupts) \
1439  SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
1440  SC(runtime_calls, V8.RuntimeCalls) \
1441  SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
1442  SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
1443  SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
1444  SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
1445  SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
1446  /* Number of write barriers in generated code. */ \
1447  SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
1448  SC(write_barriers_static, V8.WriteBarriersStatic) \
1449  SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
1450  SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
1451  SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
1452  SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable) \
1453  SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted) \
1454  SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed) \
1455  SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
1456  SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
1457  SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
1458  SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
1459  SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
1460  SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
1461  SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
1462  SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
1463  SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) \
1464  /* Total code size (including metadata) of baseline code or bytecode. */ \
1465  SC(total_baseline_code_size, V8.TotalBaselineCodeSize) \
1466  /* Total count of functions compiled using the baseline compiler. */ \
1467  SC(total_baseline_compile_count, V8.TotalBaselineCompileCount)
1468 
1469 #define STATS_COUNTER_TS_LIST(SC) \
1470  SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \
1471  SC(wasm_reloc_size, V8.WasmRelocBytes) \
1472  SC(wasm_lazily_compiled_functions, V8.WasmLazilyCompiledFunctions) \
1473  SC(liftoff_compiled_functions, V8.LiftoffCompiledFunctions) \
1474  SC(liftoff_unsupported_functions, V8.LiftoffUnsupportedFunctions)
1475 
1476 // This file contains all the v8 counters that are in use.
1477 class Counters : public std::enable_shared_from_this<Counters> {
1478  public:
1479  explicit Counters(Isolate* isolate);
1480 
1481  // Register an application-defined function for recording
1482  // subsequent counter statistics. Note: Must be called on the main
1483  // thread.
1484  void ResetCounterFunction(CounterLookupCallback f);
1485 
1486  // Register an application-defined function to create histograms for
1487  // recording subsequent histogram samples. Note: Must be called on
1488  // the main thread.
1489  void ResetCreateHistogramFunction(CreateHistogramCallback f);
1490 
1491  // Register an application-defined function to add a sample
1492  // to a histogram. Will be used in all subsequent sample additions.
1493  // Note: Must be called on the main thread.
1494  void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
1495  stats_table_.SetAddHistogramSampleFunction(f);
1496  }
1497 
1498 #define HR(name, caption, min, max, num_buckets) \
1499  Histogram* name() { return &name##_; }
1500  HISTOGRAM_RANGE_LIST(HR)
1501 #undef HR
1502 
1503 #define HT(name, caption, max, res) \
1504  HistogramTimer* name() { return &name##_; }
1505  HISTOGRAM_TIMER_LIST(HT)
1506 #undef HT
1507 
1508 #define HT(name, caption, max, res) \
1509  TimedHistogram* name() { return &name##_; }
1510  TIMED_HISTOGRAM_LIST(HT)
1511 #undef HT
1512 
1513 #define AHT(name, caption) \
1514  AggregatableHistogramTimer* name() { return &name##_; }
1515  AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1516 #undef AHT
1517 
1518 #define HP(name, caption) \
1519  Histogram* name() { return &name##_; }
1520  HISTOGRAM_PERCENTAGE_LIST(HP)
1521 #undef HP
1522 
1523 #define HM(name, caption) \
1524  Histogram* name() { return &name##_; }
1525  HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1526 #undef HM
1527 
1528 #define SC(name, caption) \
1529  StatsCounter* name() { return &name##_; }
1530  STATS_COUNTER_LIST_1(SC)
1531  STATS_COUNTER_LIST_2(SC)
1532 #undef SC
1533 
1534 #define SC(name, caption) \
1535  StatsCounterThreadSafe* name() { return &name##_; }
1536  STATS_COUNTER_TS_LIST(SC)
1537 #undef SC
1538 
1539  // clang-format off
1540  enum Id {
1541 #define RATE_ID(name, caption, max, res) k_##name,
1542  HISTOGRAM_TIMER_LIST(RATE_ID)
1543  TIMED_HISTOGRAM_LIST(RATE_ID)
1544 #undef RATE_ID
1545 #define AGGREGATABLE_ID(name, caption) k_##name,
1546  AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
1547 #undef AGGREGATABLE_ID
1548 #define PERCENTAGE_ID(name, caption) k_##name,
1549  HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
1550 #undef PERCENTAGE_ID
1551 #define MEMORY_ID(name, caption) k_##name,
1552  HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID)
1553 #undef MEMORY_ID
1554 #define COUNTER_ID(name, caption) k_##name,
1555  STATS_COUNTER_LIST_1(COUNTER_ID)
1556  STATS_COUNTER_LIST_2(COUNTER_ID)
1557  STATS_COUNTER_TS_LIST(COUNTER_ID)
1558 #undef COUNTER_ID
1559 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
1560  INSTANCE_TYPE_LIST(COUNTER_ID)
1561 #undef COUNTER_ID
1562 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
1563  kSizeOfCODE_TYPE_##name,
1564  CODE_KIND_LIST(COUNTER_ID)
1565 #undef COUNTER_ID
1566 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
1567  kSizeOfFIXED_ARRAY__##name,
1568  FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
1569 #undef COUNTER_ID
1570  stats_counter_count
1571  };
1572  // clang-format on
1573 
1574  RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; }
1575 
1576  WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats() {
1577  return &worker_thread_runtime_call_stats_;
1578  }
1579 
1580  private:
1581  friend class StatsTable;
1582  friend class StatsCounterBase;
1583  friend class Histogram;
1584  friend class HistogramTimer;
1585 
1586  Isolate* isolate_;
1587  StatsTable stats_table_;
1588 
1589  int* FindLocation(const char* name) {
1590  return stats_table_.FindLocation(name);
1591  }
1592 
1593  void* CreateHistogram(const char* name, int min, int max, size_t buckets) {
1594  return stats_table_.CreateHistogram(name, min, max, buckets);
1595  }
1596 
1597  void AddHistogramSample(void* histogram, int sample) {
1598  stats_table_.AddHistogramSample(histogram, sample);
1599  }
1600 
1601  Isolate* isolate() { return isolate_; }
1602 
1603 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
1604  HISTOGRAM_RANGE_LIST(HR)
1605 #undef HR
1606 
1607 #define HT(name, caption, max, res) HistogramTimer name##_;
1608  HISTOGRAM_TIMER_LIST(HT)
1609 #undef HT
1610 
1611 #define HT(name, caption, max, res) TimedHistogram name##_;
1612  TIMED_HISTOGRAM_LIST(HT)
1613 #undef HT
1614 
1615 #define AHT(name, caption) \
1616  AggregatableHistogramTimer name##_;
1617  AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1618 #undef AHT
1619 
1620 #define HP(name, caption) \
1621  Histogram name##_;
1622  HISTOGRAM_PERCENTAGE_LIST(HP)
1623 #undef HP
1624 
1625 #define HM(name, caption) \
1626  Histogram name##_;
1627  HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1628 #undef HM
1629 
1630 #define SC(name, caption) \
1631  StatsCounter name##_;
1632  STATS_COUNTER_LIST_1(SC)
1633  STATS_COUNTER_LIST_2(SC)
1634 #undef SC
1635 
1636 #define SC(name, caption) StatsCounterThreadSafe name##_;
1637  STATS_COUNTER_TS_LIST(SC)
1638 #undef SC
1639 
1640 #define SC(name) \
1641  StatsCounter size_of_##name##_; \
1642  StatsCounter count_of_##name##_;
1643  INSTANCE_TYPE_LIST(SC)
1644 #undef SC
1645 
1646 #define SC(name) \
1647  StatsCounter size_of_CODE_TYPE_##name##_; \
1648  StatsCounter count_of_CODE_TYPE_##name##_;
1649  CODE_KIND_LIST(SC)
1650 #undef SC
1651 
1652 #define SC(name) \
1653  StatsCounter size_of_FIXED_ARRAY_##name##_; \
1654  StatsCounter count_of_FIXED_ARRAY_##name##_;
1655  FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
1656 #undef SC
1657 
1658  RuntimeCallStats runtime_call_stats_;
1659  WorkerThreadRuntimeCallStats worker_thread_runtime_call_stats_;
1660 
1661  DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
1662 };
1663 
1664 void HistogramTimer::Start() {
1665  TimedHistogram::Start(&timer_, counters()->isolate());
1666 }
1667 
1668 void HistogramTimer::Stop() {
1669  TimedHistogram::Stop(&timer_, counters()->isolate());
1670 }
1671 
1672 RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate,
1673  RuntimeCallCounterId counter_id) {
1674  if (V8_LIKELY(!FLAG_runtime_stats)) return;
1675  stats_ = isolate->counters()->runtime_call_stats();
1676  stats_->Enter(&timer_, counter_id);
1677 }
1678 
1679 } // namespace internal
1680 } // namespace v8
1681 
1682 #endif // V8_COUNTERS_H_
Definition: v8.h:2119
Definition: libplatform.h:13
Definition: v8.h:3740