V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
gc-tracer.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/gc-tracer.h"
6 
7 #include <cstdarg>
8 
9 #include "src/base/atomic-utils.h"
10 #include "src/counters-inl.h"
11 #include "src/heap/heap-inl.h"
12 #include "src/heap/incremental-marking.h"
13 #include "src/heap/spaces.h"
14 #include "src/isolate.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 static size_t CountTotalHolesSize(Heap* heap) {
20  size_t holes_size = 0;
21  PagedSpaces spaces(heap);
22  for (PagedSpace* space = spaces.next(); space != nullptr;
23  space = spaces.next()) {
24  DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
25  holes_size += space->Waste() + space->Available();
26  }
27  return holes_size;
28 }
29 
30 RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
31  STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
32  return static_cast<RuntimeCallCounterId>(
33  static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
34  static_cast<int>(id));
35 }
36 
37 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
38  : tracer_(tracer), scope_(scope) {
39  start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
40  // TODO(cbruni): remove once we fully moved to a trace-based system.
41  if (V8_LIKELY(!FLAG_runtime_stats)) return;
42  runtime_stats_ = tracer_->heap_->isolate()->counters()->runtime_call_stats();
43  runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
44 }
45 
46 GCTracer::Scope::~Scope() {
47  tracer_->AddScopeSample(
48  scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
49  // TODO(cbruni): remove once we fully moved to a trace-based system.
50  if (V8_LIKELY(runtime_stats_ == nullptr)) return;
51  runtime_stats_->Leave(&timer_);
52 }
53 
54 GCTracer::BackgroundScope::BackgroundScope(GCTracer* tracer, ScopeId scope)
55  : tracer_(tracer), scope_(scope), runtime_stats_enabled_(false) {
56  start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
57  // TODO(cbruni): remove once we fully moved to a trace-based system.
58  if (V8_LIKELY(!base::AsAtomic32::Relaxed_Load(&FLAG_runtime_stats))) return;
59  timer_.Start(&counter_, nullptr);
60  runtime_stats_enabled_ = true;
61 }
62 
63 GCTracer::BackgroundScope::~BackgroundScope() {
64  double duration_ms =
65  tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
66  // TODO(cbruni): remove once we fully moved to a trace-based system.
67  if (V8_LIKELY(!runtime_stats_enabled_)) {
68  tracer_->AddBackgroundScopeSample(scope_, duration_ms, nullptr);
69  } else {
70  timer_.Stop();
71  tracer_->AddBackgroundScopeSample(scope_, duration_ms, &counter_);
72  }
73 }
74 
75 const char* GCTracer::Scope::Name(ScopeId id) {
76 #define CASE(scope) \
77  case Scope::scope: \
78  return "V8.GC_" #scope;
79  switch (id) {
80  TRACER_SCOPES(CASE)
81  TRACER_BACKGROUND_SCOPES(CASE)
82  case Scope::NUMBER_OF_SCOPES:
83  break;
84  }
85 #undef CASE
86  UNREACHABLE();
87  return nullptr;
88 }
89 
90 const char* GCTracer::BackgroundScope::Name(ScopeId id) {
91 #define CASE(scope) \
92  case BackgroundScope::scope: \
93  return "V8.GC_" #scope;
94  switch (id) {
95  TRACER_BACKGROUND_SCOPES(CASE)
96  case BackgroundScope::NUMBER_OF_SCOPES:
97  break;
98  }
99 #undef CASE
100  UNREACHABLE();
101  return nullptr;
102 }
103 
104 GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
105  const char* collector_reason)
106  : type(type),
107  gc_reason(gc_reason),
108  collector_reason(collector_reason),
109  start_time(0.0),
110  end_time(0.0),
111  reduce_memory(false),
112  start_object_size(0),
113  end_object_size(0),
114  start_memory_size(0),
115  end_memory_size(0),
116  start_holes_size(0),
117  end_holes_size(0),
118  new_space_object_size(0),
119  survived_new_space_object_size(0),
120  incremental_marking_bytes(0),
121  incremental_marking_duration(0.0) {
122  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
123  scopes[i] = 0;
124  }
125 }
126 
127 const char* GCTracer::Event::TypeName(bool short_name) const {
128  switch (type) {
129  case SCAVENGER:
130  return (short_name) ? "s" : "Scavenge";
131  case MARK_COMPACTOR:
132  case INCREMENTAL_MARK_COMPACTOR:
133  return (short_name) ? "ms" : "Mark-sweep";
134  case MINOR_MARK_COMPACTOR:
135  return (short_name) ? "mmc" : "Minor Mark-Compact";
136  case START:
137  return (short_name) ? "st" : "Start";
138  }
139  return "Unknown Event Type";
140 }
141 
142 GCTracer::GCTracer(Heap* heap)
143  : heap_(heap),
144  current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
145  previous_(current_),
146  incremental_marking_bytes_(0),
147  incremental_marking_duration_(0.0),
148  incremental_marking_start_time_(0.0),
149  recorded_incremental_marking_speed_(0.0),
150  allocation_time_ms_(0.0),
151  new_space_allocation_counter_bytes_(0),
152  old_generation_allocation_counter_bytes_(0),
153  allocation_duration_since_gc_(0.0),
154  new_space_allocation_in_bytes_since_gc_(0),
155  old_generation_allocation_in_bytes_since_gc_(0),
156  combined_mark_compact_speed_cache_(0.0),
157  start_counter_(0),
158  average_mutator_duration_(0),
159  average_mark_compact_duration_(0),
160  current_mark_compact_mutator_utilization_(1.0),
161  previous_mark_compact_end_time_(0) {
162  // All accesses to incremental_marking_scope assume that incremental marking
163  // scopes come first.
164  STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
165  // We assume that MC_INCREMENTAL is the first scope so that we can properly
166  // map it to RuntimeCallStats.
167  STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
168  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
169  for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
170  background_counter_[i].total_duration_ms = 0;
171  background_counter_[i].runtime_call_counter = RuntimeCallCounter(nullptr);
172  }
173 }
174 
175 void GCTracer::ResetForTesting() {
176  current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
177  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
178  previous_ = current_;
179  ResetIncrementalMarkingCounters();
180  allocation_time_ms_ = 0.0;
181  new_space_allocation_counter_bytes_ = 0.0;
182  old_generation_allocation_counter_bytes_ = 0.0;
183  allocation_duration_since_gc_ = 0.0;
184  new_space_allocation_in_bytes_since_gc_ = 0.0;
185  old_generation_allocation_in_bytes_since_gc_ = 0.0;
186  combined_mark_compact_speed_cache_ = 0.0;
187  recorded_minor_gcs_total_.Reset();
188  recorded_minor_gcs_survived_.Reset();
189  recorded_compactions_.Reset();
190  recorded_mark_compacts_.Reset();
191  recorded_incremental_mark_compacts_.Reset();
192  recorded_new_generation_allocations_.Reset();
193  recorded_old_generation_allocations_.Reset();
194  recorded_context_disposal_times_.Reset();
195  recorded_survival_ratios_.Reset();
196  start_counter_ = 0;
197  average_mutator_duration_ = 0;
198  average_mark_compact_duration_ = 0;
199  current_mark_compact_mutator_utilization_ = 1.0;
200  previous_mark_compact_end_time_ = 0;
201  base::MutexGuard guard(&background_counter_mutex_);
202  for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
203  background_counter_[i].total_duration_ms = 0;
204  background_counter_[i].runtime_call_counter.Reset();
205  }
206 }
207 
208 void GCTracer::NotifyYoungGenerationHandling(
209  YoungGenerationHandling young_generation_handling) {
210  DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
211  heap_->isolate()->counters()->young_generation_handling()->AddSample(
212  static_cast<int>(young_generation_handling));
213 }
214 
215 void GCTracer::Start(GarbageCollector collector,
216  GarbageCollectionReason gc_reason,
217  const char* collector_reason) {
218  start_counter_++;
219  if (start_counter_ != 1) return;
220 
221  previous_ = current_;
222  double start_time = heap_->MonotonicallyIncreasingTimeInMs();
223  SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
224  heap_->OldGenerationAllocationCounter());
225 
226  switch (collector) {
227  case SCAVENGER:
228  current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
229  break;
230  case MINOR_MARK_COMPACTOR:
231  current_ =
232  Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
233  break;
234  case MARK_COMPACTOR:
235  if (heap_->incremental_marking()->WasActivated()) {
236  current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
237  collector_reason);
238  } else {
239  current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
240  }
241  break;
242  }
243 
244  current_.reduce_memory = heap_->ShouldReduceMemory();
245  current_.start_time = start_time;
246  current_.start_object_size = heap_->SizeOfObjects();
247  current_.start_memory_size = heap_->memory_allocator()->Size();
248  current_.start_holes_size = CountTotalHolesSize(heap_);
249  current_.new_space_object_size = heap_->new_space()->Size();
250 
251  current_.incremental_marking_bytes = 0;
252  current_.incremental_marking_duration = 0;
253 
254  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
255  current_.scopes[i] = 0;
256  }
257 
258  Counters* counters = heap_->isolate()->counters();
259 
260  if (Heap::IsYoungGenerationCollector(collector)) {
261  counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
262  } else {
263  counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
264  }
265 }
266 
267 void GCTracer::ResetIncrementalMarkingCounters() {
268  if (incremental_marking_duration_ > 0) {
269  heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
270  static_cast<int>(incremental_marking_duration_));
271  }
272  incremental_marking_bytes_ = 0;
273  incremental_marking_duration_ = 0;
274  for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
275  incremental_marking_scopes_[i].ResetCurrentCycle();
276  }
277 }
278 
279 void GCTracer::Stop(GarbageCollector collector) {
280  start_counter_--;
281  if (start_counter_ != 0) {
282  if (FLAG_trace_gc_verbose) {
283  heap_->isolate()->PrintWithTimestamp(
284  "[Finished reentrant %s during %s.]\n",
285  Heap::CollectorName(collector), current_.TypeName(false));
286  }
287  return;
288  }
289 
290  DCHECK_LE(0, start_counter_);
291  DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
292  (collector == MINOR_MARK_COMPACTOR &&
293  current_.type == Event::MINOR_MARK_COMPACTOR) ||
294  (collector == MARK_COMPACTOR &&
295  (current_.type == Event::MARK_COMPACTOR ||
296  current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
297 
298  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
299  current_.end_object_size = heap_->SizeOfObjects();
300  current_.end_memory_size = heap_->memory_allocator()->Size();
301  current_.end_holes_size = CountTotalHolesSize(heap_);
302  current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
303 
304  AddAllocation(current_.end_time);
305 
306  double duration = current_.end_time - current_.start_time;
307 
308  switch (current_.type) {
309  case Event::SCAVENGER:
310  case Event::MINOR_MARK_COMPACTOR:
311  recorded_minor_gcs_total_.Push(
312  MakeBytesAndDuration(current_.new_space_object_size, duration));
313  recorded_minor_gcs_survived_.Push(MakeBytesAndDuration(
314  current_.survived_new_space_object_size, duration));
315  FetchBackgroundMinorGCCounters();
316  break;
317  case Event::INCREMENTAL_MARK_COMPACTOR:
318  current_.incremental_marking_bytes = incremental_marking_bytes_;
319  current_.incremental_marking_duration = incremental_marking_duration_;
320  for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
321  current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
322  current_.scopes[i] = incremental_marking_scopes_[i].duration;
323  }
324 
325  RecordMutatorUtilization(
326  current_.end_time, duration + current_.incremental_marking_duration);
327  RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
328  current_.incremental_marking_duration);
329  recorded_incremental_mark_compacts_.Push(
330  MakeBytesAndDuration(current_.start_object_size, duration));
331  RecordGCSumCounters(duration);
332  ResetIncrementalMarkingCounters();
333  combined_mark_compact_speed_cache_ = 0.0;
334  FetchBackgroundMarkCompactCounters();
335  break;
336  case Event::MARK_COMPACTOR:
337  DCHECK_EQ(0u, current_.incremental_marking_bytes);
338  DCHECK_EQ(0, current_.incremental_marking_duration);
339  RecordMutatorUtilization(
340  current_.end_time, duration + current_.incremental_marking_duration);
341  recorded_mark_compacts_.Push(
342  MakeBytesAndDuration(current_.start_object_size, duration));
343  RecordGCSumCounters(duration);
344  ResetIncrementalMarkingCounters();
345  combined_mark_compact_speed_cache_ = 0.0;
346  FetchBackgroundMarkCompactCounters();
347  break;
348  case Event::START:
349  UNREACHABLE();
350  }
351  FetchBackgroundGeneralCounters();
352 
353  heap_->UpdateTotalGCTime(duration);
354 
355  if ((current_.type == Event::SCAVENGER ||
356  current_.type == Event::MINOR_MARK_COMPACTOR) &&
357  FLAG_trace_gc_ignore_scavenger)
358  return;
359 
360  if (FLAG_trace_gc_nvp) {
361  PrintNVP();
362  } else {
363  Print();
364  }
365 
366  if (FLAG_trace_gc) {
367  heap_->PrintShortHeapStatistics();
368  }
369 }
370 
371 
372 void GCTracer::SampleAllocation(double current_ms,
373  size_t new_space_counter_bytes,
374  size_t old_generation_counter_bytes) {
375  if (allocation_time_ms_ == 0) {
376  // It is the first sample.
377  allocation_time_ms_ = current_ms;
378  new_space_allocation_counter_bytes_ = new_space_counter_bytes;
379  old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
380  return;
381  }
382  // This assumes that counters are unsigned integers so that the subtraction
383  // below works even if the new counter is less than the old counter.
384  size_t new_space_allocated_bytes =
385  new_space_counter_bytes - new_space_allocation_counter_bytes_;
386  size_t old_generation_allocated_bytes =
387  old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
388  double duration = current_ms - allocation_time_ms_;
389  allocation_time_ms_ = current_ms;
390  new_space_allocation_counter_bytes_ = new_space_counter_bytes;
391  old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
392  allocation_duration_since_gc_ += duration;
393  new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
394  old_generation_allocation_in_bytes_since_gc_ +=
395  old_generation_allocated_bytes;
396 }
397 
398 
399 void GCTracer::AddAllocation(double current_ms) {
400  allocation_time_ms_ = current_ms;
401  if (allocation_duration_since_gc_ > 0) {
402  recorded_new_generation_allocations_.Push(
403  MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
404  allocation_duration_since_gc_));
405  recorded_old_generation_allocations_.Push(
406  MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
407  allocation_duration_since_gc_));
408  }
409  allocation_duration_since_gc_ = 0;
410  new_space_allocation_in_bytes_since_gc_ = 0;
411  old_generation_allocation_in_bytes_since_gc_ = 0;
412 }
413 
414 
415 void GCTracer::AddContextDisposalTime(double time) {
416  recorded_context_disposal_times_.Push(time);
417 }
418 
419 void GCTracer::AddCompactionEvent(double duration,
420  size_t live_bytes_compacted) {
421  recorded_compactions_.Push(
422  MakeBytesAndDuration(live_bytes_compacted, duration));
423 }
424 
425 
426 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
427  recorded_survival_ratios_.Push(promotion_ratio);
428 }
429 
430 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
431  if (bytes > 0) {
432  incremental_marking_bytes_ += bytes;
433  incremental_marking_duration_ += duration;
434  }
435 }
436 
437 void GCTracer::Output(const char* format, ...) const {
438  if (FLAG_trace_gc) {
439  va_list arguments;
440  va_start(arguments, format);
441  base::OS::VPrint(format, arguments);
442  va_end(arguments);
443  }
444 
445  const int kBufferSize = 256;
446  char raw_buffer[kBufferSize];
447  Vector<char> buffer(raw_buffer, kBufferSize);
448  va_list arguments2;
449  va_start(arguments2, format);
450  VSNPrintF(buffer, format, arguments2);
451  va_end(arguments2);
452 
453  heap_->AddToRingBuffer(buffer.start());
454 }
455 
456 void GCTracer::Print() const {
457  double duration = current_.end_time - current_.start_time;
458  const size_t kIncrementalStatsSize = 128;
459  char incremental_buffer[kIncrementalStatsSize] = {0};
460 
461  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
462  base::OS::SNPrintF(
463  incremental_buffer, kIncrementalStatsSize,
464  " (+ %.1f ms in %d steps since start of marking, "
465  "biggest step %.1f ms, walltime since start of marking %.f ms)",
466  current_.scopes[Scope::MC_INCREMENTAL],
467  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
468  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
469  current_.end_time - incremental_marking_start_time_);
470  }
471 
472  // Avoid PrintF as Output also appends the string to the tracing ring buffer
473  // that gets printed on OOM failures.
474  Output(
475  "[%d:%p] "
476  "%8.0f ms: "
477  "%s %.1f (%.1f) -> %.1f (%.1f) MB, "
478  "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s %s\n",
479  base::OS::GetCurrentProcessId(),
480  reinterpret_cast<void*>(heap_->isolate()),
481  heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
482  static_cast<double>(current_.start_object_size) / MB,
483  static_cast<double>(current_.start_memory_size) / MB,
484  static_cast<double>(current_.end_object_size) / MB,
485  static_cast<double>(current_.end_memory_size) / MB, duration,
486  TotalExternalTime(), incremental_buffer,
487  AverageMarkCompactMutatorUtilization(),
488  CurrentMarkCompactMutatorUtilization(),
489  Heap::GarbageCollectionReasonToString(current_.gc_reason),
490  current_.collector_reason != nullptr ? current_.collector_reason : "");
491 }
492 
493 
494 void GCTracer::PrintNVP() const {
495  double duration = current_.end_time - current_.start_time;
496  double spent_in_mutator = current_.start_time - previous_.end_time;
497  size_t allocated_since_last_gc =
498  current_.start_object_size - previous_.end_object_size;
499 
500  double incremental_walltime_duration = 0;
501 
502  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
503  incremental_walltime_duration =
504  current_.end_time - incremental_marking_start_time_;
505  }
506 
507  switch (current_.type) {
508  case Event::SCAVENGER:
509  heap_->isolate()->PrintWithTimestamp(
510  "pause=%.1f "
511  "mutator=%.1f "
512  "gc=%s "
513  "reduce_memory=%d "
514  "heap.prologue=%.2f "
515  "heap.epilogue=%.2f "
516  "heap.epilogue.reduce_new_space=%.2f "
517  "heap.external.prologue=%.2f "
518  "heap.external.epilogue=%.2f "
519  "heap.external_weak_global_handles=%.2f "
520  "fast_promote=%.2f "
521  "scavenge=%.2f "
522  "scavenge.process_array_buffers=%.2f "
523  "scavenge.roots=%.2f "
524  "scavenge.weak=%.2f "
525  "scavenge.weak_global_handles.identify=%.2f "
526  "scavenge.weak_global_handles.process=%.2f "
527  "scavenge.parallel=%.2f "
528  "scavenge.update_refs=%.2f "
529  "background.scavenge.parallel=%.2f "
530  "background.array_buffer_free=%.2f "
531  "background.store_buffer=%.2f "
532  "background.unmapper=%.2f "
533  "incremental.steps_count=%d "
534  "incremental.steps_took=%.1f "
535  "scavenge_throughput=%.f "
536  "total_size_before=%" PRIuS
537  " "
538  "total_size_after=%" PRIuS
539  " "
540  "holes_size_before=%" PRIuS
541  " "
542  "holes_size_after=%" PRIuS
543  " "
544  "allocated=%" PRIuS
545  " "
546  "promoted=%" PRIuS
547  " "
548  "semi_space_copied=%" PRIuS
549  " "
550  "nodes_died_in_new=%d "
551  "nodes_copied_in_new=%d "
552  "nodes_promoted=%d "
553  "promotion_ratio=%.1f%% "
554  "average_survival_ratio=%.1f%% "
555  "promotion_rate=%.1f%% "
556  "semi_space_copy_rate=%.1f%% "
557  "new_space_allocation_throughput=%.1f "
558  "unmapper_chunks=%d "
559  "context_disposal_rate=%.1f\n",
560  duration, spent_in_mutator, current_.TypeName(true),
561  current_.reduce_memory, current_.scopes[Scope::HEAP_PROLOGUE],
562  current_.scopes[Scope::HEAP_EPILOGUE],
563  current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
564  current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
565  current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
566  current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
567  current_.scopes[Scope::SCAVENGER_FAST_PROMOTE],
568  current_.scopes[Scope::SCAVENGER_SCAVENGE],
569  current_.scopes[Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS],
570  current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS],
571  current_.scopes[Scope::SCAVENGER_SCAVENGE_WEAK],
572  current_
573  .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY],
574  current_
575  .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS],
576  current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL],
577  current_.scopes[Scope::SCAVENGER_SCAVENGE_UPDATE_REFS],
578  current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
579  current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
580  current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
581  current_.scopes[Scope::BACKGROUND_UNMAPPER],
582  current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
583  .steps,
584  current_.scopes[Scope::MC_INCREMENTAL],
585  ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
586  current_.end_object_size, current_.start_holes_size,
587  current_.end_holes_size, allocated_since_last_gc,
588  heap_->promoted_objects_size(),
589  heap_->semi_space_copied_object_size(),
590  heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
591  heap_->nodes_promoted_, heap_->promotion_ratio_,
592  AverageSurvivalRatio(), heap_->promotion_rate_,
593  heap_->semi_space_copied_rate_,
594  NewSpaceAllocationThroughputInBytesPerMillisecond(),
595  heap_->memory_allocator()->unmapper()->NumberOfChunks(),
596  ContextDisposalRateInMilliseconds());
597  break;
598  case Event::MINOR_MARK_COMPACTOR:
599  heap_->isolate()->PrintWithTimestamp(
600  "pause=%.1f "
601  "mutator=%.1f "
602  "gc=%s "
603  "reduce_memory=%d "
604  "minor_mc=%.2f "
605  "finish_sweeping=%.2f "
606  "mark=%.2f "
607  "mark.seed=%.2f "
608  "mark.roots=%.2f "
609  "mark.weak=%.2f "
610  "mark.global_handles=%.2f "
611  "clear=%.2f "
612  "clear.string_table=%.2f "
613  "clear.weak_lists=%.2f "
614  "evacuate=%.2f "
615  "evacuate.copy=%.2f "
616  "evacuate.update_pointers=%.2f "
617  "evacuate.update_pointers.to_new_roots=%.2f "
618  "evacuate.update_pointers.slots=%.2f "
619  "background.mark=%.2f "
620  "background.evacuate.copy=%.2f "
621  "background.evacuate.update_pointers=%.2f "
622  "background.array_buffer_free=%.2f "
623  "background.store_buffer=%.2f "
624  "background.unmapper=%.2f "
625  "update_marking_deque=%.2f "
626  "reset_liveness=%.2f\n",
627  duration, spent_in_mutator, "mmc", current_.reduce_memory,
628  current_.scopes[Scope::MINOR_MC],
629  current_.scopes[Scope::MINOR_MC_SWEEPING],
630  current_.scopes[Scope::MINOR_MC_MARK],
631  current_.scopes[Scope::MINOR_MC_MARK_SEED],
632  current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
633  current_.scopes[Scope::MINOR_MC_MARK_WEAK],
634  current_.scopes[Scope::MINOR_MC_MARK_GLOBAL_HANDLES],
635  current_.scopes[Scope::MINOR_MC_CLEAR],
636  current_.scopes[Scope::MINOR_MC_CLEAR_STRING_TABLE],
637  current_.scopes[Scope::MINOR_MC_CLEAR_WEAK_LISTS],
638  current_.scopes[Scope::MINOR_MC_EVACUATE],
639  current_.scopes[Scope::MINOR_MC_EVACUATE_COPY],
640  current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS],
641  current_
642  .scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
643  current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS],
644  current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING],
645  current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
646  current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
647  current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
648  current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
649  current_.scopes[Scope::BACKGROUND_UNMAPPER],
650  current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
651  current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
652  break;
653  case Event::MARK_COMPACTOR:
654  case Event::INCREMENTAL_MARK_COMPACTOR:
655  heap_->isolate()->PrintWithTimestamp(
656  "pause=%.1f "
657  "mutator=%.1f "
658  "gc=%s "
659  "reduce_memory=%d "
660  "heap.prologue=%.2f "
661  "heap.embedder_tracing_epilogue=%.2f "
662  "heap.epilogue=%.2f "
663  "heap.epilogue.reduce_new_space=%.2f "
664  "heap.external.prologue=%.1f "
665  "heap.external.epilogue=%.1f "
666  "heap.external.weak_global_handles=%.1f "
667  "clear=%1.f "
668  "clear.dependent_code=%.1f "
669  "clear.maps=%.1f "
670  "clear.slots_buffer=%.1f "
671  "clear.store_buffer=%.1f "
672  "clear.string_table=%.1f "
673  "clear.weak_collections=%.1f "
674  "clear.weak_lists=%.1f "
675  "clear.weak_references=%.1f "
676  "epilogue=%.1f "
677  "evacuate=%.1f "
678  "evacuate.candidates=%.1f "
679  "evacuate.clean_up=%.1f "
680  "evacuate.copy=%.1f "
681  "evacuate.prologue=%.1f "
682  "evacuate.epilogue=%.1f "
683  "evacuate.rebalance=%.1f "
684  "evacuate.update_pointers=%.1f "
685  "evacuate.update_pointers.to_new_roots=%.1f "
686  "evacuate.update_pointers.slots.main=%.1f "
687  "evacuate.update_pointers.slots.map_space=%.1f "
688  "evacuate.update_pointers.weak=%.1f "
689  "finish=%.1f "
690  "mark=%.1f "
691  "mark.finish_incremental=%.1f "
692  "mark.roots=%.1f "
693  "mark.main=%.1f "
694  "mark.weak_closure=%.1f "
695  "mark.weak_closure.ephemeron=%.1f "
696  "mark.weak_closure.ephemeron.marking=%.1f "
697  "mark.weak_closure.ephemeron.linear=%.1f "
698  "mark.weak_closure.weak_handles=%.1f "
699  "mark.weak_closure.weak_roots=%.1f "
700  "mark.weak_closure.harmony=%.1f "
701  "mark.embedder_prologue=%.1f "
702  "mark.embedder_tracing=%.1f "
703  "prologue=%.1f "
704  "sweep=%.1f "
705  "sweep.code=%.1f "
706  "sweep.map=%.1f "
707  "sweep.old=%.1f "
708  "incremental=%.1f "
709  "incremental.finalize=%.1f "
710  "incremental.finalize.body=%.1f "
711  "incremental.finalize.external.prologue=%.1f "
712  "incremental.finalize.external.epilogue=%.1f "
713  "incremental.sweeping=%.1f "
714  "incremental.embedder_prologue=%.1f "
715  "incremental.embedder_tracing=%.1f "
716  "incremental_wrapper_tracing_longest_step=%.1f "
717  "incremental_finalize_longest_step=%.1f "
718  "incremental_finalize_steps_count=%d "
719  "incremental_longest_step=%.1f "
720  "incremental_steps_count=%d "
721  "incremental_marking_throughput=%.f "
722  "incremental_walltime_duration=%.f "
723  "background.mark=%.1f "
724  "background.sweep=%.1f "
725  "background.evacuate.copy=%.1f "
726  "background.evacuate.update_pointers=%.1f "
727  "background.array_buffer_free=%.2f "
728  "background.store_buffer=%.2f "
729  "background.unmapper=%.1f "
730  "total_size_before=%" PRIuS
731  " "
732  "total_size_after=%" PRIuS
733  " "
734  "holes_size_before=%" PRIuS
735  " "
736  "holes_size_after=%" PRIuS
737  " "
738  "allocated=%" PRIuS
739  " "
740  "promoted=%" PRIuS
741  " "
742  "semi_space_copied=%" PRIuS
743  " "
744  "nodes_died_in_new=%d "
745  "nodes_copied_in_new=%d "
746  "nodes_promoted=%d "
747  "promotion_ratio=%.1f%% "
748  "average_survival_ratio=%.1f%% "
749  "promotion_rate=%.1f%% "
750  "semi_space_copy_rate=%.1f%% "
751  "new_space_allocation_throughput=%.1f "
752  "unmapper_chunks=%d "
753  "context_disposal_rate=%.1f "
754  "compaction_speed=%.f\n",
755  duration, spent_in_mutator, current_.TypeName(true),
756  current_.reduce_memory, current_.scopes[Scope::HEAP_PROLOGUE],
757  current_.scopes[Scope::HEAP_EMBEDDER_TRACING_EPILOGUE],
758  current_.scopes[Scope::HEAP_EPILOGUE],
759  current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
760  current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
761  current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
762  current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
763  current_.scopes[Scope::MC_CLEAR],
764  current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
765  current_.scopes[Scope::MC_CLEAR_MAPS],
766  current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
767  current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
768  current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
769  current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
770  current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
771  current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
772  current_.scopes[Scope::MC_EPILOGUE],
773  current_.scopes[Scope::MC_EVACUATE],
774  current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
775  current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
776  current_.scopes[Scope::MC_EVACUATE_COPY],
777  current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
778  current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
779  current_.scopes[Scope::MC_EVACUATE_REBALANCE],
780  current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
781  current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
782  current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN],
783  current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAP_SPACE],
784  current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
785  current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
786  current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
787  current_.scopes[Scope::MC_MARK_ROOTS],
788  current_.scopes[Scope::MC_MARK_MAIN],
789  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
790  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON],
791  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING],
792  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR],
793  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
794  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
795  current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
796  current_.scopes[Scope::MC_MARK_EMBEDDER_PROLOGUE],
797  current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING],
798  current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
799  current_.scopes[Scope::MC_SWEEP_CODE],
800  current_.scopes[Scope::MC_SWEEP_MAP],
801  current_.scopes[Scope::MC_SWEEP_OLD],
802  current_.scopes[Scope::MC_INCREMENTAL],
803  current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
804  current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
805  current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
806  current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
807  current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
808  current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE],
809  current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_TRACING],
810  current_
811  .incremental_marking_scopes
812  [Scope::MC_INCREMENTAL_EMBEDDER_TRACING]
813  .longest_step,
814  current_
815  .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
816  .longest_step,
817  current_
818  .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
819  .steps,
820  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
821  .longest_step,
822  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
823  IncrementalMarkingSpeedInBytesPerMillisecond(),
824  incremental_walltime_duration,
825  current_.scopes[Scope::MC_BACKGROUND_MARKING],
826  current_.scopes[Scope::MC_BACKGROUND_SWEEPING],
827  current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
828  current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
829  current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
830  current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
831  current_.scopes[Scope::BACKGROUND_UNMAPPER],
832  current_.start_object_size, current_.end_object_size,
833  current_.start_holes_size, current_.end_holes_size,
834  allocated_since_last_gc, heap_->promoted_objects_size(),
835  heap_->semi_space_copied_object_size(),
836  heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
837  heap_->nodes_promoted_, heap_->promotion_ratio_,
838  AverageSurvivalRatio(), heap_->promotion_rate_,
839  heap_->semi_space_copied_rate_,
840  NewSpaceAllocationThroughputInBytesPerMillisecond(),
841  heap_->memory_allocator()->unmapper()->NumberOfChunks(),
842  ContextDisposalRateInMilliseconds(),
843  CompactionSpeedInBytesPerMillisecond());
844  break;
845  case Event::START:
846  break;
847  default:
848  UNREACHABLE();
849  }
850 }
851 
852 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
853  const BytesAndDuration& initial, double time_ms) {
854  BytesAndDuration sum = buffer.Sum(
855  [time_ms](BytesAndDuration a, BytesAndDuration b) {
856  if (time_ms != 0 && a.second >= time_ms) return a;
857  return std::make_pair(a.first + b.first, a.second + b.second);
858  },
859  initial);
860  uint64_t bytes = sum.first;
861  double durations = sum.second;
862  if (durations == 0.0) return 0;
863  double speed = bytes / durations;
864  const int max_speed = 1024 * MB;
865  const int min_speed = 1;
866  if (speed >= max_speed) return max_speed;
867  if (speed <= min_speed) return min_speed;
868  return speed;
869 }
870 
871 double GCTracer::AverageSpeed(
872  const base::RingBuffer<BytesAndDuration>& buffer) {
873  return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
874 }
875 
876 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
877  if (duration == 0 || bytes == 0) return;
878  double current_speed = bytes / duration;
879  if (recorded_incremental_marking_speed_ == 0) {
880  recorded_incremental_marking_speed_ = current_speed;
881  } else {
882  recorded_incremental_marking_speed_ =
883  (recorded_incremental_marking_speed_ + current_speed) / 2;
884  }
885 }
886 
887 void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
888  double mark_compact_duration) {
889  if (previous_mark_compact_end_time_ == 0) {
890  // The first event only contributes to previous_mark_compact_end_time_,
891  // because we cannot compute the mutator duration.
892  previous_mark_compact_end_time_ = mark_compact_end_time;
893  } else {
894  double total_duration =
895  mark_compact_end_time - previous_mark_compact_end_time_;
896  double mutator_duration = total_duration - mark_compact_duration;
897  if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
898  // This is the first event with mutator and mark-compact durations.
899  average_mark_compact_duration_ = mark_compact_duration;
900  average_mutator_duration_ = mutator_duration;
901  } else {
902  average_mark_compact_duration_ =
903  (average_mark_compact_duration_ + mark_compact_duration) / 2;
904  average_mutator_duration_ =
905  (average_mutator_duration_ + mutator_duration) / 2;
906  }
907  current_mark_compact_mutator_utilization_ =
908  total_duration ? mutator_duration / total_duration : 0;
909  previous_mark_compact_end_time_ = mark_compact_end_time;
910  }
911 }
912 
913 double GCTracer::AverageMarkCompactMutatorUtilization() const {
914  double average_total_duration =
915  average_mark_compact_duration_ + average_mutator_duration_;
916  if (average_total_duration == 0) return 1.0;
917  return average_mutator_duration_ / average_total_duration;
918 }
919 
920 double GCTracer::CurrentMarkCompactMutatorUtilization() const {
921  return current_mark_compact_mutator_utilization_;
922 }
923 
924 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
925  const int kConservativeSpeedInBytesPerMillisecond = 128 * KB;
926  if (recorded_incremental_marking_speed_ != 0) {
927  return recorded_incremental_marking_speed_;
928  }
929  if (incremental_marking_duration_ != 0.0) {
930  return incremental_marking_bytes_ / incremental_marking_duration_;
931  }
932  return kConservativeSpeedInBytesPerMillisecond;
933 }
934 
935 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
936  ScavengeSpeedMode mode) const {
937  if (mode == kForAllObjects) {
938  return AverageSpeed(recorded_minor_gcs_total_);
939  } else {
940  return AverageSpeed(recorded_minor_gcs_survived_);
941  }
942 }
943 
944 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
945  return AverageSpeed(recorded_compactions_);
946 }
947 
948 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
949  return AverageSpeed(recorded_mark_compacts_);
950 }
951 
952 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
953  return AverageSpeed(recorded_incremental_mark_compacts_);
954 }
955 
956 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
957  if (combined_mark_compact_speed_cache_ > 0)
958  return combined_mark_compact_speed_cache_;
959  const double kMinimumMarkingSpeed = 0.5;
960  double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
961  double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
962  if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
963  // No data for the incremental marking speed.
964  // Return the non-incremental mark-compact speed.
965  combined_mark_compact_speed_cache_ =
966  MarkCompactSpeedInBytesPerMillisecond();
967  } else {
968  // Combine the speed of incremental step and the speed of the final step.
969  // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
970  combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
971  }
972  return combined_mark_compact_speed_cache_;
973 }
974 
975 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
976  double time_ms) const {
977  size_t bytes = new_space_allocation_in_bytes_since_gc_;
978  double durations = allocation_duration_since_gc_;
979  return AverageSpeed(recorded_new_generation_allocations_,
980  MakeBytesAndDuration(bytes, durations), time_ms);
981 }
982 
983 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
984  double time_ms) const {
985  size_t bytes = old_generation_allocation_in_bytes_since_gc_;
986  double durations = allocation_duration_since_gc_;
987  return AverageSpeed(recorded_old_generation_allocations_,
988  MakeBytesAndDuration(bytes, durations), time_ms);
989 }
990 
991 double GCTracer::AllocationThroughputInBytesPerMillisecond(
992  double time_ms) const {
993  return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
994  OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
995 }
996 
997 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
998  return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
999 }
1000 
1001 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
1002  const {
1003  return OldGenerationAllocationThroughputInBytesPerMillisecond(
1004  kThroughputTimeFrameMs);
1005 }
1006 
1007 double GCTracer::ContextDisposalRateInMilliseconds() const {
1008  if (recorded_context_disposal_times_.Count() <
1009  recorded_context_disposal_times_.kSize)
1010  return 0.0;
1011  double begin = heap_->MonotonicallyIncreasingTimeInMs();
1012  double end = recorded_context_disposal_times_.Sum(
1013  [](double a, double b) { return b; }, 0.0);
1014  return (begin - end) / recorded_context_disposal_times_.Count();
1015 }
1016 
1017 double GCTracer::AverageSurvivalRatio() const {
1018  if (recorded_survival_ratios_.Count() == 0) return 0.0;
1019  double sum = recorded_survival_ratios_.Sum(
1020  [](double a, double b) { return a + b; }, 0.0);
1021  return sum / recorded_survival_ratios_.Count();
1022 }
1023 
1024 bool GCTracer::SurvivalEventsRecorded() const {
1025  return recorded_survival_ratios_.Count() > 0;
1026 }
1027 
1028 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
1029 
1030 void GCTracer::NotifyIncrementalMarkingStart() {
1031  incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
1032 }
1033 
1034 void GCTracer::FetchBackgroundMarkCompactCounters() {
1035  FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
1036  Scope::LAST_MC_BACKGROUND_SCOPE,
1037  BackgroundScope::FIRST_MC_BACKGROUND_SCOPE,
1038  BackgroundScope::LAST_MC_BACKGROUND_SCOPE);
1039  heap_->isolate()->counters()->background_marking()->AddSample(
1040  static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
1041  heap_->isolate()->counters()->background_sweeping()->AddSample(
1042  static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
1043 }
1044 
1045 void GCTracer::FetchBackgroundMinorGCCounters() {
1046  FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1047  Scope::LAST_MINOR_GC_BACKGROUND_SCOPE,
1048  BackgroundScope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1049  BackgroundScope::LAST_MINOR_GC_BACKGROUND_SCOPE);
1050  heap_->isolate()->counters()->background_scavenger()->AddSample(
1051  static_cast<int>(
1052  current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]));
1053 }
1054 
1055 void GCTracer::FetchBackgroundGeneralCounters() {
1056  FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
1057  Scope::LAST_GENERAL_BACKGROUND_SCOPE,
1058  BackgroundScope::FIRST_GENERAL_BACKGROUND_SCOPE,
1059  BackgroundScope::LAST_GENERAL_BACKGROUND_SCOPE);
1060 }
1061 
1062 void GCTracer::FetchBackgroundCounters(int first_global_scope,
1063  int last_global_scope,
1064  int first_background_scope,
1065  int last_background_scope) {
1066  DCHECK_EQ(last_global_scope - first_global_scope,
1067  last_background_scope - first_background_scope);
1068  base::MutexGuard guard(&background_counter_mutex_);
1069  int background_mc_scopes = last_background_scope - first_background_scope + 1;
1070  for (int i = 0; i < background_mc_scopes; i++) {
1071  current_.scopes[first_global_scope + i] +=
1072  background_counter_[first_background_scope + i].total_duration_ms;
1073  background_counter_[first_background_scope + i].total_duration_ms = 0;
1074  }
1075  if (V8_LIKELY(!FLAG_runtime_stats)) return;
1076  RuntimeCallStats* runtime_stats =
1077  heap_->isolate()->counters()->runtime_call_stats();
1078  if (!runtime_stats) return;
1079  for (int i = 0; i < background_mc_scopes; i++) {
1080  runtime_stats
1081  ->GetCounter(GCTracer::RCSCounterFromScope(
1082  static_cast<Scope::ScopeId>(first_global_scope + i)))
1083  ->Add(&background_counter_[first_background_scope + i]
1084  .runtime_call_counter);
1085  background_counter_[first_background_scope + i]
1086  .runtime_call_counter.Reset();
1087  }
1088 }
1089 
1090 void GCTracer::AddBackgroundScopeSample(
1091  BackgroundScope::ScopeId scope, double duration,
1092  RuntimeCallCounter* runtime_call_counter) {
1093  base::MutexGuard guard(&background_counter_mutex_);
1094  BackgroundCounter& counter = background_counter_[scope];
1095  counter.total_duration_ms += duration;
1096  if (runtime_call_counter) {
1097  counter.runtime_call_counter.Add(runtime_call_counter);
1098  }
1099 }
1100 
1101 void GCTracer::RecordGCPhasesHistograms(TimedHistogram* gc_timer) {
1102  Counters* counters = heap_->isolate()->counters();
1103  if (gc_timer == counters->gc_finalize()) {
1104  DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
1105  counters->gc_finalize_clear()->AddSample(
1106  static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
1107  counters->gc_finalize_epilogue()->AddSample(
1108  static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
1109  counters->gc_finalize_evacuate()->AddSample(
1110  static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
1111  counters->gc_finalize_finish()->AddSample(
1112  static_cast<int>(current_.scopes[Scope::MC_FINISH]));
1113  counters->gc_finalize_mark()->AddSample(
1114  static_cast<int>(current_.scopes[Scope::MC_MARK]));
1115  counters->gc_finalize_prologue()->AddSample(
1116  static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
1117  counters->gc_finalize_sweep()->AddSample(
1118  static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
1119  DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
1120  } else if (gc_timer == counters->gc_scavenger()) {
1121  counters->gc_scavenger_scavenge_main()->AddSample(
1122  static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL]));
1123  counters->gc_scavenger_scavenge_roots()->AddSample(
1124  static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS]));
1125  }
1126 }
1127 
1128 void GCTracer::RecordGCSumCounters(double atomic_pause_duration) {
1129  base::MutexGuard guard(&background_counter_mutex_);
1130 
1131  const double overall_duration =
1132  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
1133  .duration +
1134  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_SWEEPING]
1135  .duration +
1136  incremental_marking_duration_ +
1137  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
1138  .duration +
1139  atomic_pause_duration;
1140  const double background_duration =
1141  background_counter_[BackgroundScope::MC_BACKGROUND_EVACUATE_COPY]
1142  .total_duration_ms +
1143  background_counter_
1144  [BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]
1145  .total_duration_ms +
1146  background_counter_[BackgroundScope::MC_BACKGROUND_MARKING]
1147  .total_duration_ms +
1148  background_counter_[BackgroundScope::MC_BACKGROUND_SWEEPING]
1149  .total_duration_ms;
1150 
1151  const double marking_duration =
1152  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
1153  .duration +
1154  incremental_marking_duration_ +
1155  current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
1156  .duration +
1157  current_.scopes[Scope::MC_MARK];
1158  const double marking_background_duration =
1159  background_counter_[BackgroundScope::MC_BACKGROUND_MARKING]
1160  .total_duration_ms;
1161 
1162  // UMA.
1163  heap_->isolate()->counters()->gc_mark_compactor()->AddSample(
1164  static_cast<int>(overall_duration));
1165 
1166  // Emit trace event counters.
1167  TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1168  "V8.GCMarkCompactorSummary", TRACE_EVENT_SCOPE_THREAD,
1169  "duration", overall_duration, "background_duration",
1170  background_duration);
1171  TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1172  "V8.GCMarkCompactorMarkingSummary",
1173  TRACE_EVENT_SCOPE_THREAD, "duration", marking_duration,
1174  "background_duration", marking_background_duration);
1175 }
1176 
1177 } // namespace internal
1178 } // namespace v8
Definition: libplatform.h:13