5 #include "src/heap/scavenge-job.h" 7 #include "src/base/platform/time.h" 8 #include "src/heap/gc-tracer.h" 9 #include "src/heap/heap-inl.h" 10 #include "src/heap/heap.h" 11 #include "src/heap/spaces.h" 12 #include "src/isolate.h" 14 #include "src/vm-state-inl.h" 20 const double ScavengeJob::kMaxAllocationLimitAsFractionOfNewSpace = 0.8;
22 void ScavengeJob::IdleTask::RunInternal(
double deadline_in_seconds) {
23 VMState<GC> state(isolate());
24 TRACE_EVENT_CALL_STATS_SCOPED(isolate(),
"v8",
"V8.Task");
25 Heap* heap = isolate()->heap();
26 double deadline_in_ms =
28 static_cast<double>(base::Time::kMillisecondsPerSecond);
29 double start_ms = heap->MonotonicallyIncreasingTimeInMs();
30 double idle_time_in_ms = deadline_in_ms - start_ms;
31 double scavenge_speed_in_bytes_per_ms =
32 heap->tracer()->ScavengeSpeedInBytesPerMillisecond();
33 size_t new_space_size = heap->new_space()->Size();
34 size_t new_space_capacity = heap->new_space()->Capacity();
36 job_->NotifyIdleTask();
38 if (ReachedIdleAllocationLimit(scavenge_speed_in_bytes_per_ms, new_space_size,
39 new_space_capacity)) {
40 if (EnoughIdleTimeForScavenge(
41 idle_time_in_ms, scavenge_speed_in_bytes_per_ms, new_space_size)) {
42 heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kIdleTask);
45 job_->RescheduleIdleTask(heap);
50 bool ScavengeJob::ReachedIdleAllocationLimit(
51 double scavenge_speed_in_bytes_per_ms,
size_t new_space_size,
52 size_t new_space_capacity) {
53 if (scavenge_speed_in_bytes_per_ms == 0) {
54 scavenge_speed_in_bytes_per_ms = kInitialScavengeSpeedInBytesPerMs;
59 double allocation_limit = kAverageIdleTimeMs * scavenge_speed_in_bytes_per_ms;
63 Min<double>(allocation_limit,
64 new_space_capacity * kMaxAllocationLimitAsFractionOfNewSpace);
69 Max<double>(allocation_limit - kBytesAllocatedBeforeNextIdleTask,
72 return allocation_limit <= new_space_size;
75 bool ScavengeJob::EnoughIdleTimeForScavenge(
76 double idle_time_in_ms,
double scavenge_speed_in_bytes_per_ms,
77 size_t new_space_size) {
78 if (scavenge_speed_in_bytes_per_ms == 0) {
79 scavenge_speed_in_bytes_per_ms = kInitialScavengeSpeedInBytesPerMs;
81 return new_space_size <= idle_time_in_ms * scavenge_speed_in_bytes_per_ms;
85 void ScavengeJob::RescheduleIdleTask(Heap* heap) {
88 if (!idle_task_rescheduled_) {
89 ScheduleIdleTask(heap);
90 idle_task_rescheduled_ =
true;
95 void ScavengeJob::ScheduleIdleTaskIfNeeded(Heap* heap,
int bytes_allocated) {
96 bytes_allocated_since_the_last_task_ += bytes_allocated;
97 if (bytes_allocated_since_the_last_task_ >=
98 static_cast<int>(kBytesAllocatedBeforeNextIdleTask)) {
99 ScheduleIdleTask(heap);
100 bytes_allocated_since_the_last_task_ = 0;
101 idle_task_rescheduled_ =
false;
106 void ScavengeJob::ScheduleIdleTask(Heap* heap) {
107 if (!idle_task_pending_ && !heap->IsTearingDown()) {
108 v8::Isolate* isolate =
reinterpret_cast<v8::Isolate*
>(heap->isolate());
109 if (V8::GetCurrentPlatform()->IdleTasksEnabled(isolate)) {
110 idle_task_pending_ =
true;
111 auto task = base::make_unique<IdleTask>(heap->isolate(),
this);
112 V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate)->PostIdleTask(