1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COUNTERS_H_
6#define V8_COUNTERS_H_
7
8#include "include/v8.h"
9#include "src/allocation.h"
10#include "src/base/atomic-utils.h"
11#include "src/base/optional.h"
12#include "src/base/platform/elapsed-timer.h"
13#include "src/base/platform/time.h"
14#include "src/counters-definitions.h"
15#include "src/globals.h"
16#include "src/heap-symbols.h"
17#include "src/isolate.h"
18#include "src/objects.h"
19#include "src/runtime/runtime.h"
20#include "src/tracing/trace-event.h"
21#include "src/tracing/traced-value.h"
22#include "src/tracing/tracing-category-observer.h"
23
24namespace v8 {
25namespace internal {
26
27// This struct contains a set of flags that can be modified from multiple
28// threads at runtime unlike the normal FLAG_-like flags which are not modified
29// after V8 instance is initialized.
30
31struct TracingFlags {
32 static V8_EXPORT_PRIVATE std::atomic_uint runtime_stats;
33 static V8_EXPORT_PRIVATE std::atomic_uint gc_stats;
34 static V8_EXPORT_PRIVATE std::atomic_uint ic_stats;
35
36 static bool is_runtime_stats_enabled() {
37 return runtime_stats.load(std::memory_order_relaxed) != 0;
38 }
39
40 static bool is_gc_stats_enabled() {
41 return gc_stats.load(std::memory_order_relaxed) != 0;
42 }
43
44 static bool is_ic_stats_enabled() {
45 return ic_stats.load(std::memory_order_relaxed) != 0;
46 }
47};
48
49// StatsCounters is an interface for plugging into external
50// counters for monitoring. Counters can be looked up and
51// manipulated by name.
52
53class Counters;
54
55class StatsTable {
56 public:
57 // Register an application-defined function for recording
58 // subsequent counter statistics.
59 void SetCounterFunction(CounterLookupCallback f);
60
61 // Register an application-defined function to create histograms for
62 // recording subsequent histogram samples.
63 void SetCreateHistogramFunction(CreateHistogramCallback f) {
64 create_histogram_function_ = f;
65 }
66
67 // Register an application-defined function to add a sample
68 // to a histogram created with CreateHistogram function.
69 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
70 add_histogram_sample_function_ = f;
71 }
72
73 bool HasCounterFunction() const { return lookup_function_ != nullptr; }
74
75 // Lookup the location of a counter by name. If the lookup
76 // is successful, returns a non-nullptr pointer for writing the
77 // value of the counter. Each thread calling this function
78 // may receive a different location to store it's counter.
79 // The return value must not be cached and re-used across
80 // threads, although a single thread is free to cache it.
81 int* FindLocation(const char* name) {
82 if (!lookup_function_) return nullptr;
83 return lookup_function_(name);
84 }
85
86 // Create a histogram by name. If the create is successful,
87 // returns a non-nullptr pointer for use with AddHistogramSample
88 // function. min and max define the expected minimum and maximum
89 // sample values. buckets is the maximum number of buckets
90 // that the samples will be grouped into.
91 void* CreateHistogram(const char* name,
92 int min,
93 int max,
94 size_t buckets) {
95 if (!create_histogram_function_) return nullptr;
96 return create_histogram_function_(name, min, max, buckets);
97 }
98
99 // Add a sample to a histogram created with the CreateHistogram
100 // function.
101 void AddHistogramSample(void* histogram, int sample) {
102 if (!add_histogram_sample_function_) return;
103 return add_histogram_sample_function_(histogram, sample);
104 }
105
106 private:
107 friend class Counters;
108
109 explicit StatsTable(Counters* counters);
110
111 CounterLookupCallback lookup_function_;
112 CreateHistogramCallback create_histogram_function_;
113 AddHistogramSampleCallback add_histogram_sample_function_;
114
115 DISALLOW_COPY_AND_ASSIGN(StatsTable);
116};
117
118// Base class for stats counters.
119class StatsCounterBase {
120 protected:
121 Counters* counters_;
122 const char* name_;
123 int* ptr_;
124
125 StatsCounterBase() = default;
126 StatsCounterBase(Counters* counters, const char* name)
127 : counters_(counters), name_(name), ptr_(nullptr) {}
128
129 void SetLoc(int* loc, int value) { *loc = value; }
130 void IncrementLoc(int* loc) { (*loc)++; }
131 void IncrementLoc(int* loc, int value) { (*loc) += value; }
132 void DecrementLoc(int* loc) { (*loc)--; }
133 void DecrementLoc(int* loc, int value) { (*loc) -= value; }
134
135 V8_EXPORT_PRIVATE int* FindLocationInStatsTable() const;
136};
137
138// StatsCounters are dynamically created values which can be tracked in
139// the StatsTable. They are designed to be lightweight to create and
140// easy to use.
141//
142// Internally, a counter represents a value in a row of a StatsTable.
143// The row has a 32bit value for each process/thread in the table and also
144// a name (stored in the table metadata). Since the storage location can be
145// thread-specific, this class cannot be shared across threads. Note: This
146// class is not thread safe.
147class StatsCounter : public StatsCounterBase {
148 public:
149 // Sets the counter to a specific value.
150 void Set(int value) {
151 if (int* loc = GetPtr()) SetLoc(loc, value);
152 }
153
154 // Increments the counter.
155 void Increment() {
156 if (int* loc = GetPtr()) IncrementLoc(loc);
157 }
158
159 void Increment(int value) {
160 if (int* loc = GetPtr()) IncrementLoc(loc, value);
161 }
162
163 // Decrements the counter.
164 void Decrement() {
165 if (int* loc = GetPtr()) DecrementLoc(loc);
166 }
167
168 void Decrement(int value) {
169 if (int* loc = GetPtr()) DecrementLoc(loc, value);
170 }
171
172 // Is this counter enabled?
173 // Returns false if table is full.
174 bool Enabled() { return GetPtr() != nullptr; }
175
176 // Get the internal pointer to the counter. This is used
177 // by the code generator to emit code that manipulates a
178 // given counter without calling the runtime system.
179 int* GetInternalPointer() {
180 int* loc = GetPtr();
181 DCHECK_NOT_NULL(loc);
182 return loc;
183 }
184
185 private:
186 friend class Counters;
187
188 StatsCounter() = default;
189 StatsCounter(Counters* counters, const char* name)
190 : StatsCounterBase(counters, name), lookup_done_(false) {}
191
192 // Reset the cached internal pointer.
193 void Reset() { lookup_done_ = false; }
194
195 // Returns the cached address of this counter location.
196 int* GetPtr() {
197 if (lookup_done_) return ptr_;
198 lookup_done_ = true;
199 ptr_ = FindLocationInStatsTable();
200 return ptr_;
201 }
202
203 bool lookup_done_;
204};
205
206// Thread safe version of StatsCounter.
207class V8_EXPORT_PRIVATE StatsCounterThreadSafe : public StatsCounterBase {
208 public:
209 void Set(int Value);
210 void Increment();
211 void Increment(int value);
212 void Decrement();
213 void Decrement(int value);
214 bool Enabled() { return ptr_ != nullptr; }
215 int* GetInternalPointer() {
216 DCHECK_NOT_NULL(ptr_);
217 return ptr_;
218 }
219
220 private:
221 friend class Counters;
222
223 StatsCounterThreadSafe(Counters* counters, const char* name);
224 void Reset() { ptr_ = FindLocationInStatsTable(); }
225
226 base::Mutex mutex_;
227
228 DISALLOW_IMPLICIT_CONSTRUCTORS(StatsCounterThreadSafe);
229};
230
231// A Histogram represents a dynamically created histogram in the
232// StatsTable. Note: This class is thread safe.
233class Histogram {
234 public:
235 // Add a single sample to this histogram.
236 void AddSample(int sample);
237
238 // Returns true if this histogram is enabled.
239 bool Enabled() { return histogram_ != nullptr; }
240
241 const char* name() { return name_; }
242
243 int min() const { return min_; }
244 int max() const { return max_; }
245 int num_buckets() const { return num_buckets_; }
246
247 // Asserts that |expected_counters| are the same as the Counters this
248 // Histogram reports to.
249 void AssertReportsToCounters(Counters* expected_counters) {
250 DCHECK_EQ(counters_, expected_counters);
251 }
252
253 protected:
254 Histogram() = default;
255 Histogram(const char* name, int min, int max, int num_buckets,
256 Counters* counters)
257 : name_(name),
258 min_(min),
259 max_(max),
260 num_buckets_(num_buckets),
261 histogram_(nullptr),
262 counters_(counters) {
263 DCHECK(counters_);
264 }
265
266 Counters* counters() const { return counters_; }
267
268 // Reset the cached internal pointer.
269 void Reset() { histogram_ = CreateHistogram(); }
270
271 private:
272 friend class Counters;
273
274 void* CreateHistogram() const;
275
276 const char* name_;
277 int min_;
278 int max_;
279 int num_buckets_;
280 void* histogram_;
281 Counters* counters_;
282};
283
284enum class HistogramTimerResolution { MILLISECOND, MICROSECOND };
285
286// A thread safe histogram timer. It also allows distributions of
287// nested timed results.
288class TimedHistogram : public Histogram {
289 public:
290 // Start the timer. Log if isolate non-null.
291 V8_EXPORT_PRIVATE void Start(base::ElapsedTimer* timer, Isolate* isolate);
292
293 // Stop the timer and record the results. Log if isolate non-null.
294 V8_EXPORT_PRIVATE void Stop(base::ElapsedTimer* timer, Isolate* isolate);
295
296 // Records a TimeDelta::Max() result. Useful to record percentage of tasks
297 // that never got to run in a given scenario. Log if isolate non-null.
298 void RecordAbandon(base::ElapsedTimer* timer, Isolate* isolate);
299
300 protected:
301 friend class Counters;
302 HistogramTimerResolution resolution_;
303
304 TimedHistogram() = default;
305 TimedHistogram(const char* name, int min, int max,
306 HistogramTimerResolution resolution, int num_buckets,
307 Counters* counters)
308 : Histogram(name, min, max, num_buckets, counters),
309 resolution_(resolution) {}
310 void AddTimeSample();
311};
312
313// Helper class for scoping a TimedHistogram.
314class TimedHistogramScope {
315 public:
316 explicit TimedHistogramScope(TimedHistogram* histogram,
317 Isolate* isolate = nullptr)
318 : histogram_(histogram), isolate_(isolate) {
319 histogram_->Start(&timer_, isolate);
320 }
321
322 ~TimedHistogramScope() { histogram_->Stop(&timer_, isolate_); }
323
324 private:
325 base::ElapsedTimer timer_;
326 TimedHistogram* histogram_;
327 Isolate* isolate_;
328
329 DISALLOW_IMPLICIT_CONSTRUCTORS(TimedHistogramScope);
330};
331
332enum class OptionalTimedHistogramScopeMode { TAKE_TIME, DONT_TAKE_TIME };
333
334// Helper class for scoping a TimedHistogram.
335// It will not take time for mode = DONT_TAKE_TIME.
336class OptionalTimedHistogramScope {
337 public:
338 OptionalTimedHistogramScope(TimedHistogram* histogram, Isolate* isolate,
339 OptionalTimedHistogramScopeMode mode)
340 : histogram_(histogram), isolate_(isolate), mode_(mode) {
341 if (mode == OptionalTimedHistogramScopeMode::TAKE_TIME) {
342 histogram_->Start(&timer_, isolate);
343 }
344 }
345
346 ~OptionalTimedHistogramScope() {
347 if (mode_ == OptionalTimedHistogramScopeMode::TAKE_TIME) {
348 histogram_->Stop(&timer_, isolate_);
349 }
350 }
351
352 private:
353 base::ElapsedTimer timer_;
354 TimedHistogram* const histogram_;
355 Isolate* const isolate_;
356 const OptionalTimedHistogramScopeMode mode_;
357 DISALLOW_IMPLICIT_CONSTRUCTORS(OptionalTimedHistogramScope);
358};
359
360// Helper class for recording a TimedHistogram asynchronously with manual
361// controls (it will not generate a report if destroyed without explicitly
362// triggering a report). |async_counters| should be a shared_ptr to
363// |histogram->counters()|, making it is safe to report to an
364// AsyncTimedHistogram after the associated isolate has been destroyed.
365// AsyncTimedHistogram can be moved/copied to avoid computing Now() multiple
366// times when the times of multiple tasks are identical; each copy will generate
367// its own report.
368class AsyncTimedHistogram {
369 public:
370 explicit AsyncTimedHistogram(TimedHistogram* histogram,
371 std::shared_ptr<Counters> async_counters)
372 : histogram_(histogram), async_counters_(std::move(async_counters)) {
373 histogram_->AssertReportsToCounters(async_counters_.get());
374 histogram_->Start(&timer_, nullptr);
375 }
376
377 // Records the time elapsed to |histogram_| and stops |timer_|.
378 void RecordDone() { histogram_->Stop(&timer_, nullptr); }
379
380 // Records TimeDelta::Max() to |histogram_| and stops |timer_|.
381 void RecordAbandon() { histogram_->RecordAbandon(&timer_, nullptr); }
382
383 private:
384 base::ElapsedTimer timer_;
385 TimedHistogram* histogram_;
386 std::shared_ptr<Counters> async_counters_;
387};
388
389// Helper class for scoping a TimedHistogram, where the histogram is selected at
390// stop time rather than start time.
391// TODO(leszeks): This is heavily reliant on TimedHistogram::Start() doing
392// nothing but starting the timer, and TimedHistogram::Stop() logging the sample
393// correctly even if Start() was not called. This happens to be true iff Stop()
394// is passed a null isolate, but that's an implementation detail of
395// TimedHistogram, and we shouldn't rely on it.
396class LazyTimedHistogramScope {
397 public:
398 LazyTimedHistogramScope() : histogram_(nullptr) { timer_.Start(); }
399 ~LazyTimedHistogramScope() {
400 // We should set the histogram before this scope exits.
401 DCHECK_NOT_NULL(histogram_);
402 histogram_->Stop(&timer_, nullptr);
403 }
404
405 void set_histogram(TimedHistogram* histogram) { histogram_ = histogram; }
406
407 private:
408 base::ElapsedTimer timer_;
409 TimedHistogram* histogram_;
410};
411
412// A HistogramTimer allows distributions of non-nested timed results
413// to be created. WARNING: This class is not thread safe and can only
414// be run on the foreground thread.
415class HistogramTimer : public TimedHistogram {
416 public:
417 // Note: public for testing purposes only.
418 HistogramTimer(const char* name, int min, int max,
419 HistogramTimerResolution resolution, int num_buckets,
420 Counters* counters)
421 : TimedHistogram(name, min, max, resolution, num_buckets, counters) {}
422
423 inline void Start();
424 inline void Stop();
425
426 // Returns true if the timer is running.
427 bool Running() {
428 return Enabled() && timer_.IsStarted();
429 }
430
431 // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
432#ifdef DEBUG
433 base::ElapsedTimer* timer() { return &timer_; }
434#endif
435
436 private:
437 friend class Counters;
438
439 base::ElapsedTimer timer_;
440
441 HistogramTimer() = default;
442};
443
444// Helper class for scoping a HistogramTimer.
445// TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
446// Parser is currently reentrant (when it throws an error, we call back
447// into JavaScript and all bets are off), but ElapsedTimer is not
448// reentry-safe. Fix this properly and remove |allow_nesting|.
449class HistogramTimerScope {
450 public:
451 explicit HistogramTimerScope(HistogramTimer* timer,
452 bool allow_nesting = false)
453#ifdef DEBUG
454 : timer_(timer), skipped_timer_start_(false) {
455 if (timer_->timer()->IsStarted() && allow_nesting) {
456 skipped_timer_start_ = true;
457 } else {
458 timer_->Start();
459 }
460 }
461#else
462 : timer_(timer) {
463 timer_->Start();
464 }
465#endif
466 ~HistogramTimerScope() {
467#ifdef DEBUG
468 if (!skipped_timer_start_) {
469 timer_->Stop();
470 }
471#else
472 timer_->Stop();
473#endif
474 }
475
476 private:
477 HistogramTimer* timer_;
478#ifdef DEBUG
479 bool skipped_timer_start_;
480#endif
481};
482
483// A histogram timer that can aggregate events within a larger scope.
484//
485// Intended use of this timer is to have an outer (aggregating) and an inner
486// (to be aggregated) scope, where the inner scope measure the time of events,
487// and all those inner scope measurements will be summed up by the outer scope.
488// An example use might be to aggregate the time spent in lazy compilation
489// while running a script.
490//
491// Helpers:
492// - AggregatingHistogramTimerScope, the "outer" scope within which
493// times will be summed up.
494// - AggregatedHistogramTimerScope, the "inner" scope which defines the
495// events to be timed.
496class AggregatableHistogramTimer : public Histogram {
497 public:
498 // Start/stop the "outer" scope.
499 void Start() { time_ = base::TimeDelta(); }
500 void Stop() {
501 if (time_ != base::TimeDelta()) {
502 // Only add non-zero samples, since zero samples represent situations
503 // where there were no aggregated samples added.
504 AddSample(static_cast<int>(time_.InMicroseconds()));
505 }
506 }
507
508 // Add a time value ("inner" scope).
509 void Add(base::TimeDelta other) { time_ += other; }
510
511 private:
512 friend class Counters;
513
514 AggregatableHistogramTimer() = default;
515 AggregatableHistogramTimer(const char* name, int min, int max,
516 int num_buckets, Counters* counters)
517 : Histogram(name, min, max, num_buckets, counters) {}
518
519 base::TimeDelta time_;
520};
521
522// A helper class for use with AggregatableHistogramTimer. This is the
523// // outer-most timer scope used with an AggregatableHistogramTimer. It will
524// // aggregate the information from the inner AggregatedHistogramTimerScope.
525class AggregatingHistogramTimerScope {
526 public:
527 explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
528 : histogram_(histogram) {
529 histogram_->Start();
530 }
531 ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
532
533 private:
534 AggregatableHistogramTimer* histogram_;
535};
536
537// A helper class for use with AggregatableHistogramTimer, the "inner" scope
538// // which defines the events to be timed.
539class AggregatedHistogramTimerScope {
540 public:
541 explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
542 : histogram_(histogram) {
543 timer_.Start();
544 }
545 ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
546
547 private:
548 base::ElapsedTimer timer_;
549 AggregatableHistogramTimer* histogram_;
550};
551
552
553// AggretatedMemoryHistogram collects (time, value) sample pairs and turns
554// them into time-uniform samples for the backing historgram, such that the
555// backing histogram receives one sample every T ms, where the T is controlled
556// by the FLAG_histogram_interval.
557//
558// More formally: let F be a real-valued function that maps time to sample
559// values. We define F as a linear interpolation between adjacent samples. For
560// each time interval [x; x + T) the backing histogram gets one sample value
561// that is the average of F(t) in the interval.
562template <typename Histogram>
563class AggregatedMemoryHistogram {
564 public:
565 // Note: public for testing purposes only.
566 explicit AggregatedMemoryHistogram(Histogram* backing_histogram)
567 : AggregatedMemoryHistogram() {
568 backing_histogram_ = backing_histogram;
569 }
570
571 // Invariants that hold before and after AddSample if
572 // is_initialized_ is true:
573 //
574 // 1) For we processed samples that came in before start_ms_ and sent the
575 // corresponding aggregated samples to backing histogram.
576 // 2) (last_ms_, last_value_) is the last received sample.
577 // 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
578 // 4) aggregate_value_ is the average of the function that is constructed by
579 // linearly interpolating samples received between start_ms_ and last_ms_.
580 void AddSample(double current_ms, double current_value);
581
582 private:
583 friend class Counters;
584
585 AggregatedMemoryHistogram()
586 : is_initialized_(false),
587 start_ms_(0.0),
588 last_ms_(0.0),
589 aggregate_value_(0.0),
590 last_value_(0.0),
591 backing_histogram_(nullptr) {}
592 double Aggregate(double current_ms, double current_value);
593
594 bool is_initialized_;
595 double start_ms_;
596 double last_ms_;
597 double aggregate_value_;
598 double last_value_;
599 Histogram* backing_histogram_;
600};
601
602
603template <typename Histogram>
604void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms,
605 double current_value) {
606 if (!is_initialized_) {
607 aggregate_value_ = current_value;
608 start_ms_ = current_ms;
609 last_value_ = current_value;
610 last_ms_ = current_ms;
611 is_initialized_ = true;
612 } else {
613 const double kEpsilon = 1e-6;
614 const int kMaxSamples = 1000;
615 if (current_ms < last_ms_ + kEpsilon) {
616 // Two samples have the same time, remember the last one.
617 last_value_ = current_value;
618 } else {
619 double sample_interval_ms = FLAG_histogram_interval;
620 double end_ms = start_ms_ + sample_interval_ms;
621 if (end_ms <= current_ms + kEpsilon) {
622 // Linearly interpolate between the last_ms_ and the current_ms.
623 double slope = (current_value - last_value_) / (current_ms - last_ms_);
624 int i;
625 // Send aggregated samples to the backing histogram from the start_ms
626 // to the current_ms.
627 for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) {
628 double end_value = last_value_ + (end_ms - last_ms_) * slope;
629 double sample_value;
630 if (i == 0) {
631 // Take aggregate_value_ into account.
632 sample_value = Aggregate(end_ms, end_value);
633 } else {
634 // There is no aggregate_value_ for i > 0.
635 sample_value = (last_value_ + end_value) / 2;
636 }
637 backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5));
638 last_value_ = end_value;
639 last_ms_ = end_ms;
640 end_ms += sample_interval_ms;
641 }
642 if (i == kMaxSamples) {
643 // We hit the sample limit, ignore the remaining samples.
644 aggregate_value_ = current_value;
645 start_ms_ = current_ms;
646 } else {
647 aggregate_value_ = last_value_;
648 start_ms_ = last_ms_;
649 }
650 }
651 aggregate_value_ = current_ms > start_ms_ + kEpsilon
652 ? Aggregate(current_ms, current_value)
653 : aggregate_value_;
654 last_value_ = current_value;
655 last_ms_ = current_ms;
656 }
657 }
658}
659
660
661template <typename Histogram>
662double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
663 double current_value) {
664 double interval_ms = current_ms - start_ms_;
665 double value = (current_value + last_value_) / 2;
666 // The aggregate_value_ is the average for [start_ms_; last_ms_].
667 // The value is the average for [last_ms_; current_ms].
668 // Return the weighted average of the aggregate_value_ and the value.
669 return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
670 value * ((current_ms - last_ms_) / interval_ms);
671}
672
673class RuntimeCallCounter final {
674 public:
675 RuntimeCallCounter() : RuntimeCallCounter(nullptr) {}
676 explicit RuntimeCallCounter(const char* name)
677 : name_(name), count_(0), time_(0) {}
678 V8_NOINLINE void Reset();
679 V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
680 void Add(RuntimeCallCounter* other);
681
682 const char* name() const { return name_; }
683 int64_t count() const { return count_; }
684 base::TimeDelta time() const {
685 return base::TimeDelta::FromMicroseconds(time_);
686 }
687 void Increment() { count_++; }
688 void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); }
689
690 private:
691 friend class RuntimeCallStats;
692
693 const char* name_;
694 int64_t count_;
695 // Stored as int64_t so that its initialization can be deferred.
696 int64_t time_;
697};
698
699// RuntimeCallTimer is used to keep track of the stack of currently active
700// timers used for properly measuring the own time of a RuntimeCallCounter.
701class RuntimeCallTimer final {
702 public:
703 RuntimeCallCounter* counter() { return counter_; }
704 void set_counter(RuntimeCallCounter* counter) { counter_ = counter; }
705 RuntimeCallTimer* parent() const { return parent_.Value(); }
706 void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); }
707 const char* name() const { return counter_->name(); }
708
709 inline bool IsStarted();
710
711 inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent);
712 void Snapshot();
713 inline RuntimeCallTimer* Stop();
714
715 // Make the time source configurable for testing purposes.
716 V8_EXPORT_PRIVATE static base::TimeTicks (*Now)();
717
718 private:
719 inline void Pause(base::TimeTicks now);
720 inline void Resume(base::TimeTicks now);
721 inline void CommitTimeToCounter();
722
723 RuntimeCallCounter* counter_ = nullptr;
724 base::AtomicValue<RuntimeCallTimer*> parent_;
725 base::TimeTicks start_ticks_;
726 base::TimeDelta elapsed_;
727};
728
729#define FOR_EACH_GC_COUNTER(V) \
730 TRACER_SCOPES(V) \
731 TRACER_BACKGROUND_SCOPES(V)
732
733#define FOR_EACH_API_COUNTER(V) \
734 V(ArrayBuffer_Cast) \
735 V(ArrayBuffer_Detach) \
736 V(ArrayBuffer_New) \
737 V(Array_CloneElementAt) \
738 V(Array_New) \
739 V(BigInt64Array_New) \
740 V(BigInt_NewFromWords) \
741 V(BigIntObject_BigIntValue) \
742 V(BigIntObject_New) \
743 V(BigUint64Array_New) \
744 V(BooleanObject_BooleanValue) \
745 V(BooleanObject_New) \
746 V(Context_New) \
747 V(Context_NewRemoteContext) \
748 V(DataView_New) \
749 V(Date_New) \
750 V(Date_NumberValue) \
751 V(Debug_Call) \
752 V(debug_GetPrivateFields) \
753 V(Error_New) \
754 V(External_New) \
755 V(Float32Array_New) \
756 V(Float64Array_New) \
757 V(Function_Call) \
758 V(Function_New) \
759 V(Function_NewInstance) \
760 V(FunctionTemplate_GetFunction) \
761 V(FunctionTemplate_New) \
762 V(FunctionTemplate_NewRemoteInstance) \
763 V(FunctionTemplate_NewWithCache) \
764 V(FunctionTemplate_NewWithFastHandler) \
765 V(Int16Array_New) \
766 V(Int32Array_New) \
767 V(Int8Array_New) \
768 V(Isolate_DateTimeConfigurationChangeNotification) \
769 V(Isolate_LocaleConfigurationChangeNotification) \
770 V(JSON_Parse) \
771 V(JSON_Stringify) \
772 V(Map_AsArray) \
773 V(Map_Clear) \
774 V(Map_Delete) \
775 V(Map_Get) \
776 V(Map_Has) \
777 V(Map_New) \
778 V(Map_Set) \
779 V(Message_GetEndColumn) \
780 V(Message_GetLineNumber) \
781 V(Message_GetSourceLine) \
782 V(Message_GetStartColumn) \
783 V(Module_Evaluate) \
784 V(Module_InstantiateModule) \
785 V(NumberObject_New) \
786 V(NumberObject_NumberValue) \
787 V(Object_CallAsConstructor) \
788 V(Object_CallAsFunction) \
789 V(Object_CreateDataProperty) \
790 V(Object_DefineOwnProperty) \
791 V(Object_DefineProperty) \
792 V(Object_Delete) \
793 V(Object_DeleteProperty) \
794 V(Object_ForceSet) \
795 V(Object_Get) \
796 V(Object_GetOwnPropertyDescriptor) \
797 V(Object_GetOwnPropertyNames) \
798 V(Object_GetPropertyAttributes) \
799 V(Object_GetPropertyNames) \
800 V(Object_GetRealNamedProperty) \
801 V(Object_GetRealNamedPropertyAttributes) \
802 V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \
803 V(Object_GetRealNamedPropertyInPrototypeChain) \
804 V(Object_Has) \
805 V(Object_HasOwnProperty) \
806 V(Object_HasRealIndexedProperty) \
807 V(Object_HasRealNamedCallbackProperty) \
808 V(Object_HasRealNamedProperty) \
809 V(Object_New) \
810 V(Object_ObjectProtoToString) \
811 V(Object_Set) \
812 V(Object_SetAccessor) \
813 V(Object_SetIntegrityLevel) \
814 V(Object_SetPrivate) \
815 V(Object_SetPrototype) \
816 V(ObjectTemplate_New) \
817 V(ObjectTemplate_NewInstance) \
818 V(Object_ToArrayIndex) \
819 V(Object_ToBigInt) \
820 V(Object_ToDetailString) \
821 V(Object_ToInt32) \
822 V(Object_ToInteger) \
823 V(Object_ToNumber) \
824 V(Object_ToObject) \
825 V(Object_ToString) \
826 V(Object_ToUint32) \
827 V(Persistent_New) \
828 V(Private_New) \
829 V(Promise_Catch) \
830 V(Promise_Chain) \
831 V(Promise_HasRejectHandler) \
832 V(Promise_Resolver_New) \
833 V(Promise_Resolver_Reject) \
834 V(Promise_Resolver_Resolve) \
835 V(Promise_Result) \
836 V(Promise_Status) \
837 V(Promise_Then) \
838 V(Proxy_New) \
839 V(RangeError_New) \
840 V(ReferenceError_New) \
841 V(RegExp_New) \
842 V(ScriptCompiler_Compile) \
843 V(ScriptCompiler_CompileFunctionInContext) \
844 V(ScriptCompiler_CompileUnbound) \
845 V(Script_Run) \
846 V(Set_Add) \
847 V(Set_AsArray) \
848 V(Set_Clear) \
849 V(Set_Delete) \
850 V(Set_Has) \
851 V(Set_New) \
852 V(SharedArrayBuffer_New) \
853 V(String_Concat) \
854 V(String_NewExternalOneByte) \
855 V(String_NewExternalTwoByte) \
856 V(String_NewFromOneByte) \
857 V(String_NewFromTwoByte) \
858 V(String_NewFromUtf8) \
859 V(StringObject_New) \
860 V(StringObject_StringValue) \
861 V(String_Write) \
862 V(String_WriteUtf8) \
863 V(Symbol_New) \
864 V(SymbolObject_New) \
865 V(SymbolObject_SymbolValue) \
866 V(SyntaxError_New) \
867 V(TracedGlobal_New) \
868 V(TryCatch_StackTrace) \
869 V(TypeError_New) \
870 V(Uint16Array_New) \
871 V(Uint32Array_New) \
872 V(Uint8Array_New) \
873 V(Uint8ClampedArray_New) \
874 V(UnboundScript_GetId) \
875 V(UnboundScript_GetLineNumber) \
876 V(UnboundScript_GetName) \
877 V(UnboundScript_GetSourceMappingURL) \
878 V(UnboundScript_GetSourceURL) \
879 V(ValueDeserializer_ReadHeader) \
880 V(ValueDeserializer_ReadValue) \
881 V(ValueSerializer_WriteValue) \
882 V(Value_InstanceOf) \
883 V(Value_Int32Value) \
884 V(Value_IntegerValue) \
885 V(Value_NumberValue) \
886 V(Value_TypeOf) \
887 V(Value_Uint32Value) \
888 V(WeakMap_Get) \
889 V(WeakMap_New) \
890 V(WeakMap_Set)
891
892#define FOR_EACH_MANUAL_COUNTER(V) \
893 V(AccessorGetterCallback) \
894 V(AccessorSetterCallback) \
895 V(ArrayLengthGetter) \
896 V(ArrayLengthSetter) \
897 V(BoundFunctionLengthGetter) \
898 V(BoundFunctionNameGetter) \
899 V(CompileAnalyse) \
900 V(CompileBackgroundAnalyse) \
901 V(CompileBackgroundCompileTask) \
902 V(CompileBackgroundEval) \
903 V(CompileBackgroundFunction) \
904 V(CompileBackgroundIgnition) \
905 V(CompileBackgroundRewriteReturnResult) \
906 V(CompileBackgroundScopeAnalysis) \
907 V(CompileBackgroundScript) \
908 V(CompileCollectSourcePositions) \
909 V(CompileDeserialize) \
910 V(CompileEnqueueOnDispatcher) \
911 V(CompileEval) \
912 V(CompileFinalizeBackgroundCompileTask) \
913 V(CompileFinishNowOnDispatcher) \
914 V(CompileFunction) \
915 V(CompileGetFromOptimizedCodeMap) \
916 V(CompileIgnition) \
917 V(CompileIgnitionFinalization) \
918 V(CompileRewriteReturnResult) \
919 V(CompileScopeAnalysis) \
920 V(CompileScript) \
921 V(CompileSerialize) \
922 V(CompileWaitForDispatcher) \
923 V(DeoptimizeCode) \
924 V(DeserializeContext) \
925 V(DeserializeIsolate) \
926 V(FunctionCallback) \
927 V(FunctionLengthGetter) \
928 V(FunctionPrototypeGetter) \
929 V(FunctionPrototypeSetter) \
930 V(GC_Custom_AllAvailableGarbage) \
931 V(GC_Custom_IncrementalMarkingObserver) \
932 V(GC_Custom_SlowAllocateRaw) \
933 V(GCEpilogueCallback) \
934 V(GCPrologueCallback) \
935 V(Genesis) \
936 V(GetMoreDataCallback) \
937 V(IndexedDefinerCallback) \
938 V(IndexedDeleterCallback) \
939 V(IndexedDescriptorCallback) \
940 V(IndexedEnumeratorCallback) \
941 V(IndexedGetterCallback) \
942 V(IndexedQueryCallback) \
943 V(IndexedSetterCallback) \
944 V(Invoke) \
945 V(InvokeApiFunction) \
946 V(InvokeApiInterruptCallbacks) \
947 V(InvokeFunctionCallback) \
948 V(JS_Execution) \
949 V(Map_SetPrototype) \
950 V(Map_TransitionToAccessorProperty) \
951 V(Map_TransitionToDataProperty) \
952 V(MessageListenerCallback) \
953 V(NamedDefinerCallback) \
954 V(NamedDeleterCallback) \
955 V(NamedDescriptorCallback) \
956 V(NamedEnumeratorCallback) \
957 V(NamedGetterCallback) \
958 V(NamedQueryCallback) \
959 V(NamedSetterCallback) \
960 V(Object_DeleteProperty) \
961 V(ObjectVerify) \
962 V(OptimizeCode) \
963 V(ParseArrowFunctionLiteral) \
964 V(ParseBackgroundArrowFunctionLiteral) \
965 V(ParseBackgroundFunctionLiteral) \
966 V(ParseBackgroundProgram) \
967 V(ParseEval) \
968 V(ParseFunction) \
969 V(ParseFunctionLiteral) \
970 V(ParseProgram) \
971 V(PreParseArrowFunctionLiteral) \
972 V(PreParseBackgroundArrowFunctionLiteral) \
973 V(PreParseBackgroundWithVariableResolution) \
974 V(PreParseWithVariableResolution) \
975 V(PropertyCallback) \
976 V(PrototypeMap_TransitionToAccessorProperty) \
977 V(PrototypeMap_TransitionToDataProperty) \
978 V(PrototypeObject_DeleteProperty) \
979 V(RecompileConcurrent) \
980 V(RecompileSynchronous) \
981 V(ReconfigureToDataProperty) \
982 V(StringLengthGetter) \
983 V(TestCounter1) \
984 V(TestCounter2) \
985 V(TestCounter3)
986
987#define FOR_EACH_HANDLER_COUNTER(V) \
988 V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \
989 V(KeyedLoadIC_LoadElementDH) \
990 V(KeyedLoadIC_LoadIndexedInterceptorStub) \
991 V(KeyedLoadIC_LoadIndexedStringDH) \
992 V(KeyedLoadIC_SlowStub) \
993 V(KeyedStoreIC_ElementsTransitionAndStoreStub) \
994 V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \
995 V(KeyedStoreIC_SlowStub) \
996 V(KeyedStoreIC_StoreElementStub) \
997 V(KeyedStoreIC_StoreFastElementStub) \
998 V(LoadGlobalIC_LoadScriptContextField) \
999 V(LoadGlobalIC_SlowStub) \
1000 V(LoadIC_FunctionPrototypeStub) \
1001 V(LoadIC_HandlerCacheHit_Accessor) \
1002 V(LoadIC_LoadAccessorDH) \
1003 V(LoadIC_LoadAccessorFromPrototypeDH) \
1004 V(LoadIC_LoadApiGetterFromPrototypeDH) \
1005 V(LoadIC_LoadCallback) \
1006 V(LoadIC_LoadConstantDH) \
1007 V(LoadIC_LoadConstantFromPrototypeDH) \
1008 V(LoadIC_LoadFieldDH) \
1009 V(LoadIC_LoadFieldFromPrototypeDH) \
1010 V(LoadIC_LoadGlobalDH) \
1011 V(LoadIC_LoadGlobalFromPrototypeDH) \
1012 V(LoadIC_LoadIntegerIndexedExoticDH) \
1013 V(LoadIC_LoadInterceptorDH) \
1014 V(LoadIC_LoadInterceptorFromPrototypeDH) \
1015 V(LoadIC_LoadNativeDataPropertyDH) \
1016 V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \
1017 V(LoadIC_LoadNonexistentDH) \
1018 V(LoadIC_LoadNonMaskingInterceptorDH) \
1019 V(LoadIC_LoadNormalDH) \
1020 V(LoadIC_LoadNormalFromPrototypeDH) \
1021 V(LoadIC_NonReceiver) \
1022 V(LoadIC_Premonomorphic) \
1023 V(LoadIC_SlowStub) \
1024 V(LoadIC_StringLength) \
1025 V(LoadIC_StringWrapperLength) \
1026 V(StoreGlobalIC_SlowStub) \
1027 V(StoreGlobalIC_StoreScriptContextField) \
1028 V(StoreGlobalIC_Premonomorphic) \
1029 V(StoreIC_HandlerCacheHit_Accessor) \
1030 V(StoreIC_NonReceiver) \
1031 V(StoreIC_Premonomorphic) \
1032 V(StoreIC_SlowStub) \
1033 V(StoreIC_StoreAccessorDH) \
1034 V(StoreIC_StoreAccessorOnPrototypeDH) \
1035 V(StoreIC_StoreApiSetterOnPrototypeDH) \
1036 V(StoreIC_StoreFieldDH) \
1037 V(StoreIC_StoreGlobalDH) \
1038 V(StoreIC_StoreGlobalTransitionDH) \
1039 V(StoreIC_StoreInterceptorStub) \
1040 V(StoreIC_StoreNativeDataPropertyDH) \
1041 V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \
1042 V(StoreIC_StoreNormalDH) \
1043 V(StoreIC_StoreTransitionDH) \
1044 V(StoreInArrayLiteralIC_SlowStub)
1045
1046enum RuntimeCallCounterId {
1047#define CALL_RUNTIME_COUNTER(name) kGC_##name,
1048 FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER)
1049#undef CALL_RUNTIME_COUNTER
1050#define CALL_RUNTIME_COUNTER(name) k##name,
1051 FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER)
1052#undef CALL_RUNTIME_COUNTER
1053#define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
1054 FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER)
1055#undef CALL_RUNTIME_COUNTER
1056#define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
1057 BUILTIN_LIST_C(CALL_BUILTIN_COUNTER)
1058#undef CALL_BUILTIN_COUNTER
1059#define CALL_BUILTIN_COUNTER(name) kAPI_##name,
1060 FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER)
1061#undef CALL_BUILTIN_COUNTER
1062#define CALL_BUILTIN_COUNTER(name) kHandler_##name,
1063 FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER)
1064#undef CALL_BUILTIN_COUNTER
1065 kNumberOfCounters
1066};
1067
1068class RuntimeCallStats final {
1069 public:
1070 V8_EXPORT_PRIVATE RuntimeCallStats();
1071
1072 // Starting measuring the time for a function. This will establish the
1073 // connection to the parent counter for properly calculating the own times.
1074 V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer,
1075 RuntimeCallCounterId counter_id);
1076
1077 // Leave a scope for a measured runtime function. This will properly add
1078 // the time delta to the current_counter and subtract the delta from its
1079 // parent.
1080 V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer);
1081
1082 // Set counter id for the innermost measurement. It can be used to refine
1083 // event kind when a runtime entry counter is too generic.
1084 V8_EXPORT_PRIVATE void CorrectCurrentCounterId(
1085 RuntimeCallCounterId counter_id);
1086
1087 V8_EXPORT_PRIVATE void Reset();
1088 // Add all entries from another stats object.
1089 void Add(RuntimeCallStats* other);
1090 V8_EXPORT_PRIVATE void Print(std::ostream& os);
1091 V8_EXPORT_PRIVATE void Print();
1092 V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
1093
1094 ThreadId thread_id() const { return thread_id_; }
1095 RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
1096 RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
1097 bool InUse() { return in_use_; }
1098 bool IsCalledOnTheSameThread();
1099
1100 static const int kNumberOfCounters =
1101 static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
1102 RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
1103 return &counters_[static_cast<int>(counter_id)];
1104 }
1105 RuntimeCallCounter* GetCounter(int counter_id) {
1106 return &counters_[counter_id];
1107 }
1108
1109 private:
1110 // Top of a stack of active timers.
1111 base::AtomicValue<RuntimeCallTimer*> current_timer_;
1112 // Active counter object associated with current timer.
1113 base::AtomicValue<RuntimeCallCounter*> current_counter_;
1114 // Used to track nested tracing scopes.
1115 bool in_use_;
1116 ThreadId thread_id_;
1117 RuntimeCallCounter counters_[kNumberOfCounters];
1118};
1119
1120class WorkerThreadRuntimeCallStats final {
1121 public:
1122 WorkerThreadRuntimeCallStats();
1123 ~WorkerThreadRuntimeCallStats();
1124
1125 // Returns the TLS key associated with this WorkerThreadRuntimeCallStats.
1126 base::Thread::LocalStorageKey GetKey();
1127
1128 // Returns a new worker thread runtime call stats table managed by this
1129 // WorkerThreadRuntimeCallStats.
1130 RuntimeCallStats* NewTable();
1131
1132 // Adds the counters from the worker thread tables to |main_call_stats|.
1133 void AddToMainTable(RuntimeCallStats* main_call_stats);
1134
1135 private:
1136 base::Mutex mutex_;
1137 std::vector<std::unique_ptr<RuntimeCallStats>> tables_;
1138 base::Optional<base::Thread::LocalStorageKey> tls_key_;
1139};
1140
1141// Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
1142// runtime call stats table, and will dump the table to an immediate trace event
1143// when it is destroyed.
1144class WorkerThreadRuntimeCallStatsScope final {
1145 public:
1146 WorkerThreadRuntimeCallStatsScope(
1147 WorkerThreadRuntimeCallStats* off_thread_stats);
1148 ~WorkerThreadRuntimeCallStatsScope();
1149
1150 RuntimeCallStats* Get() const { return table_; }
1151
1152 private:
1153 RuntimeCallStats* table_;
1154};
1155
1156#define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \
1157 do { \
1158 if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled()) && \
1159 runtime_call_stats) { \
1160 runtime_call_stats->CorrectCurrentCounterId(counter_id); \
1161 } \
1162 } while (false)
1163
1164#define TRACE_HANDLER_STATS(isolate, counter_name) \
1165 CHANGE_CURRENT_RUNTIME_COUNTER( \
1166 isolate->counters()->runtime_call_stats(), \
1167 RuntimeCallCounterId::kHandler_##counter_name)
1168
1169// A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
1170// the time of C++ scope.
1171class RuntimeCallTimerScope {
1172 public:
1173 inline RuntimeCallTimerScope(Isolate* isolate,
1174 RuntimeCallCounterId counter_id);
1175 // This constructor is here just to avoid calling GetIsolate() when the
1176 // stats are disabled and the isolate is not directly available.
1177 inline RuntimeCallTimerScope(Isolate* isolate, HeapObject heap_object,
1178 RuntimeCallCounterId counter_id);
1179 inline RuntimeCallTimerScope(RuntimeCallStats* stats,
1180 RuntimeCallCounterId counter_id) {
1181 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled() ||
1182 stats == nullptr))
1183 return;
1184 stats_ = stats;
1185 stats_->Enter(&timer_, counter_id);
1186 }
1187
1188 inline ~RuntimeCallTimerScope() {
1189 if (V8_UNLIKELY(stats_ != nullptr)) {
1190 stats_->Leave(&timer_);
1191 }
1192 }
1193
1194 private:
1195 RuntimeCallStats* stats_ = nullptr;
1196 RuntimeCallTimer timer_;
1197
1198 DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
1199};
1200
1201// This file contains all the v8 counters that are in use.
1202class Counters : public std::enable_shared_from_this<Counters> {
1203 public:
1204 explicit Counters(Isolate* isolate);
1205
1206 // Register an application-defined function for recording
1207 // subsequent counter statistics. Note: Must be called on the main
1208 // thread.
1209 void ResetCounterFunction(CounterLookupCallback f);
1210
1211 // Register an application-defined function to create histograms for
1212 // recording subsequent histogram samples. Note: Must be called on
1213 // the main thread.
1214 void ResetCreateHistogramFunction(CreateHistogramCallback f);
1215
1216 // Register an application-defined function to add a sample
1217 // to a histogram. Will be used in all subsequent sample additions.
1218 // Note: Must be called on the main thread.
1219 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
1220 stats_table_.SetAddHistogramSampleFunction(f);
1221 }
1222
1223#define HR(name, caption, min, max, num_buckets) \
1224 Histogram* name() { return &name##_; }
1225 HISTOGRAM_RANGE_LIST(HR)
1226#undef HR
1227
1228#define HT(name, caption, max, res) \
1229 HistogramTimer* name() { return &name##_; }
1230 HISTOGRAM_TIMER_LIST(HT)
1231#undef HT
1232
1233#define HT(name, caption, max, res) \
1234 TimedHistogram* name() { return &name##_; }
1235 TIMED_HISTOGRAM_LIST(HT)
1236#undef HT
1237
1238#define AHT(name, caption) \
1239 AggregatableHistogramTimer* name() { return &name##_; }
1240 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1241#undef AHT
1242
1243#define HP(name, caption) \
1244 Histogram* name() { return &name##_; }
1245 HISTOGRAM_PERCENTAGE_LIST(HP)
1246#undef HP
1247
1248#define HM(name, caption) \
1249 Histogram* name() { return &name##_; }
1250 HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1251#undef HM
1252
1253#define SC(name, caption) \
1254 StatsCounter* name() { return &name##_; }
1255 STATS_COUNTER_LIST_1(SC)
1256 STATS_COUNTER_LIST_2(SC)
1257 STATS_COUNTER_NATIVE_CODE_LIST(SC)
1258#undef SC
1259
1260#define SC(name, caption) \
1261 StatsCounterThreadSafe* name() { return &name##_; }
1262 STATS_COUNTER_TS_LIST(SC)
1263#undef SC
1264
1265 // clang-format off
1266 enum Id {
1267#define RATE_ID(name, caption, max, res) k_##name,
1268 HISTOGRAM_TIMER_LIST(RATE_ID)
1269 TIMED_HISTOGRAM_LIST(RATE_ID)
1270#undef RATE_ID
1271#define AGGREGATABLE_ID(name, caption) k_##name,
1272 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
1273#undef AGGREGATABLE_ID
1274#define PERCENTAGE_ID(name, caption) k_##name,
1275 HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
1276#undef PERCENTAGE_ID
1277#define MEMORY_ID(name, caption) k_##name,
1278 HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID)
1279#undef MEMORY_ID
1280#define COUNTER_ID(name, caption) k_##name,
1281 STATS_COUNTER_LIST_1(COUNTER_ID)
1282 STATS_COUNTER_LIST_2(COUNTER_ID)
1283 STATS_COUNTER_TS_LIST(COUNTER_ID)
1284 STATS_COUNTER_NATIVE_CODE_LIST(COUNTER_ID)
1285#undef COUNTER_ID
1286#define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
1287 INSTANCE_TYPE_LIST(COUNTER_ID)
1288#undef COUNTER_ID
1289#define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
1290 kSizeOfCODE_TYPE_##name,
1291 CODE_KIND_LIST(COUNTER_ID)
1292#undef COUNTER_ID
1293#define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
1294 kSizeOfFIXED_ARRAY__##name,
1295 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
1296#undef COUNTER_ID
1297 stats_counter_count
1298 };
1299 // clang-format on
1300
1301 RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; }
1302
1303 WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats() {
1304 return &worker_thread_runtime_call_stats_;
1305 }
1306
1307 private:
1308 friend class StatsTable;
1309 friend class StatsCounterBase;
1310 friend class Histogram;
1311 friend class HistogramTimer;
1312
1313 Isolate* isolate_;
1314 StatsTable stats_table_;
1315
1316 int* FindLocation(const char* name) {
1317 return stats_table_.FindLocation(name);
1318 }
1319
1320 void* CreateHistogram(const char* name, int min, int max, size_t buckets) {
1321 return stats_table_.CreateHistogram(name, min, max, buckets);
1322 }
1323
1324 void AddHistogramSample(void* histogram, int sample) {
1325 stats_table_.AddHistogramSample(histogram, sample);
1326 }
1327
1328 Isolate* isolate() { return isolate_; }
1329
1330#define HR(name, caption, min, max, num_buckets) Histogram name##_;
1331 HISTOGRAM_RANGE_LIST(HR)
1332#undef HR
1333
1334#define HT(name, caption, max, res) HistogramTimer name##_;
1335 HISTOGRAM_TIMER_LIST(HT)
1336#undef HT
1337
1338#define HT(name, caption, max, res) TimedHistogram name##_;
1339 TIMED_HISTOGRAM_LIST(HT)
1340#undef HT
1341
1342#define AHT(name, caption) \
1343 AggregatableHistogramTimer name##_;
1344 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1345#undef AHT
1346
1347#define HP(name, caption) \
1348 Histogram name##_;
1349 HISTOGRAM_PERCENTAGE_LIST(HP)
1350#undef HP
1351
1352#define HM(name, caption) \
1353 Histogram name##_;
1354 HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1355#undef HM
1356
1357#define SC(name, caption) \
1358 StatsCounter name##_;
1359 STATS_COUNTER_LIST_1(SC)
1360 STATS_COUNTER_LIST_2(SC)
1361 STATS_COUNTER_NATIVE_CODE_LIST(SC)
1362#undef SC
1363
1364#define SC(name, caption) StatsCounterThreadSafe name##_;
1365 STATS_COUNTER_TS_LIST(SC)
1366#undef SC
1367
1368#define SC(name) \
1369 StatsCounter size_of_##name##_; \
1370 StatsCounter count_of_##name##_;
1371 INSTANCE_TYPE_LIST(SC)
1372#undef SC
1373
1374#define SC(name) \
1375 StatsCounter size_of_CODE_TYPE_##name##_; \
1376 StatsCounter count_of_CODE_TYPE_##name##_;
1377 CODE_KIND_LIST(SC)
1378#undef SC
1379
1380#define SC(name) \
1381 StatsCounter size_of_FIXED_ARRAY_##name##_; \
1382 StatsCounter count_of_FIXED_ARRAY_##name##_;
1383 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
1384#undef SC
1385
1386 RuntimeCallStats runtime_call_stats_;
1387 WorkerThreadRuntimeCallStats worker_thread_runtime_call_stats_;
1388
1389 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
1390};
1391
1392void HistogramTimer::Start() {
1393 TimedHistogram::Start(&timer_, counters()->isolate());
1394}
1395
1396void HistogramTimer::Stop() {
1397 TimedHistogram::Stop(&timer_, counters()->isolate());
1398}
1399
1400RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate,
1401 RuntimeCallCounterId counter_id) {
1402 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
1403 stats_ = isolate->counters()->runtime_call_stats();
1404 stats_->Enter(&timer_, counter_id);
1405}
1406
1407} // namespace internal
1408} // namespace v8
1409
1410#endif // V8_COUNTERS_H_
1411