5 #include "src/heap/incremental-marking.h" 7 #include "src/code-stubs.h" 8 #include "src/compilation-cache.h" 9 #include "src/conversions.h" 10 #include "src/heap/concurrent-marking.h" 11 #include "src/heap/embedder-tracing.h" 12 #include "src/heap/gc-idle-time-handler.h" 13 #include "src/heap/gc-tracer.h" 14 #include "src/heap/heap-inl.h" 15 #include "src/heap/incremental-marking-inl.h" 16 #include "src/heap/mark-compact-inl.h" 17 #include "src/heap/object-stats.h" 18 #include "src/heap/objects-visiting-inl.h" 19 #include "src/heap/objects-visiting.h" 20 #include "src/heap/sweeper.h" 21 #include "src/objects/hash-table-inl.h" 22 #include "src/objects/slots-inl.h" 23 #include "src/tracing/trace-event.h" 25 #include "src/visitors.h" 26 #include "src/vm-state-inl.h" 31 using IncrementalMarkingMarkingVisitor =
32 MarkingVisitor<FixedArrayVisitationMode::kIncremental,
33 TraceRetainingPathMode::kDisabled,
34 IncrementalMarking::MarkingState>;
36 void IncrementalMarking::Observer::Step(
int bytes_allocated, Address addr,
38 Heap* heap = incremental_marking_.heap();
39 VMState<GC> state(heap->isolate());
40 RuntimeCallTimerScope runtime_timer(
42 RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
43 incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
44 if (incremental_marking_.black_allocation() && addr != kNullAddress) {
47 HeapObject*
object = HeapObject::FromAddress(addr);
48 if (incremental_marking_.marking_state()->IsWhite(
object) &&
49 !(Heap::InNewSpace(
object) || heap->new_lo_space()->Contains(
object))) {
50 if (heap->IsLargeObject(
object)) {
51 incremental_marking_.marking_state()->WhiteToBlack(
object);
53 Page::FromAddress(addr)->CreateBlackArea(addr, addr + size);
59 IncrementalMarking::IncrementalMarking(
60 Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist,
61 WeakObjects* weak_objects)
63 marking_worklist_(marking_worklist),
64 weak_objects_(weak_objects),
65 initial_old_generation_size_(0),
66 bytes_marked_ahead_of_schedule_(0),
67 bytes_marked_concurrently_(0),
68 unscanned_bytes_of_large_object_(0),
69 is_compacting_(false),
71 was_activated_(false),
72 black_allocation_(false),
73 finalize_marking_completed_(false),
74 trace_wrappers_toggle_(false),
76 new_generation_observer_(*this, kYoungGenerationAllocatedThreshold),
77 old_generation_observer_(*this, kOldGenerationAllocatedThreshold) {
78 DCHECK_NOT_NULL(marking_worklist_);
82 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
83 HeapObject* value_heap_obj = HeapObject::cast(value);
84 DCHECK(!marking_state()->IsImpossible(value_heap_obj));
85 DCHECK(!marking_state()->IsImpossible(obj));
86 #ifdef V8_CONCURRENT_MARKING 89 const bool need_recording =
true;
91 const bool need_recording = marking_state()->IsBlack(obj);
94 if (need_recording && WhiteToGreyAndPush(value_heap_obj)) {
95 RestartIfNotMarking();
97 return is_compacting_ && need_recording;
100 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, HeapObjectSlot slot,
102 if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
104 heap_->mark_compact_collector()->RecordSlot(obj, slot,
105 HeapObject::cast(value));
109 int IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
110 Address slot_address,
112 DCHECK(obj->IsHeapObject());
113 MaybeObjectSlot slot(slot_address);
114 isolate->heap()->incremental_marking()->RecordMaybeWeakWrite(obj, slot,
120 void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
123 if (BaseRecordWrite(host, value)) {
125 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
129 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
130 if (marking_state()->WhiteToGrey(obj)) {
131 marking_worklist()->Push(obj);
137 void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
140 DCHECK(!obj->IsFixedArray() && !obj->IsFixedDoubleArray());
142 marking_state()->WhiteToGrey(obj);
143 if (marking_state()->GreyToBlack(obj)) {
144 if (FLAG_concurrent_marking) {
145 marking_worklist()->PushBailout(obj);
147 marking_worklist()->Push(obj);
152 void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
154 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
155 DCHECK_EQ(MemoryChunk::FromAddress(from->address()),
156 MemoryChunk::FromAddress(to->address()));
159 MarkBit old_mark_bit = marking_state()->MarkBitFrom(from);
160 MarkBit new_mark_bit = marking_state()->MarkBitFrom(to);
162 if (black_allocation() && Marking::IsBlack<kAtomicity>(new_mark_bit)) {
167 bool marked_black_due_to_left_trimming =
false;
168 if (FLAG_concurrent_marking) {
171 Marking::WhiteToGrey<kAtomicity>(old_mark_bit);
172 if (Marking::GreyToBlack<kAtomicity>(old_mark_bit)) {
175 marked_black_due_to_left_trimming =
true;
177 DCHECK(Marking::IsBlack<kAtomicity>(old_mark_bit));
180 if (Marking::IsBlack<kAtomicity>(old_mark_bit) &&
181 !marked_black_due_to_left_trimming) {
184 if (from->address() + kPointerSize == to->address()) {
187 DCHECK(new_mark_bit.Get<kAtomicity>());
188 new_mark_bit.Next().Set<kAtomicity>();
190 bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
194 }
else if (Marking::IsGrey<kAtomicity>(old_mark_bit) ||
195 marked_black_due_to_left_trimming) {
198 if (from->address() + kPointerSize == to->address()) {
201 new_mark_bit.Set<kAtomicity>();
202 DCHECK(!new_mark_bit.Next().Get<kAtomicity>());
204 bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
210 DCHECK(Marking::IsGrey<kAtomicity>(new_mark_bit));
211 marking_worklist()->PushBailout(to);
212 RestartIfNotMarking();
220 : heap_(incremental_marking->heap()) {}
222 void VisitRootPointer(Root root,
const char* description,
224 MarkObjectByPointer(p);
227 void VisitRootPointers(Root root,
const char* description,
ObjectSlot start,
229 for (
ObjectSlot p = start; p < end; ++p) MarkObjectByPointer(p);
235 if (!obj->IsHeapObject())
return;
237 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
243 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
245 for (
Page* p : *space) {
246 p->SetOldGenerationPageFlags(
false);
251 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
253 for (Page* p : *space) {
254 p->SetYoungGenerationPageFlags(
false);
259 void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
260 DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
261 DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
262 DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
263 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
265 for (LargePage* p : *heap_->lo_space()) {
266 p->SetOldGenerationPageFlags(
false);
269 for (LargePage* p : *heap_->code_lo_space()) {
270 p->SetOldGenerationPageFlags(
false);
275 void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
276 for (Page* p : *space) {
277 p->SetOldGenerationPageFlags(
true);
282 void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
283 for (Page* p : *space) {
284 p->SetYoungGenerationPageFlags(
true);
289 void IncrementalMarking::ActivateIncrementalWriteBarrier() {
290 ActivateIncrementalWriteBarrier(heap_->old_space());
291 ActivateIncrementalWriteBarrier(heap_->map_space());
292 ActivateIncrementalWriteBarrier(heap_->code_space());
293 ActivateIncrementalWriteBarrier(heap_->new_space());
295 for (LargePage* p : *heap_->lo_space()) {
296 p->SetOldGenerationPageFlags(
true);
299 for (LargePage* p : *heap_->code_lo_space()) {
300 p->SetOldGenerationPageFlags(
true);
305 bool IncrementalMarking::WasActivated() {
return was_activated_; }
308 bool IncrementalMarking::CanBeActivated() {
312 return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC &&
313 heap_->deserialization_complete() &&
314 !heap_->isolate()->serializer_enabled();
318 void IncrementalMarking::Deactivate() {
319 DeactivateIncrementalWriteBarrier();
322 void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
323 if (FLAG_trace_incremental_marking) {
324 int old_generation_size_mb =
325 static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
326 int old_generation_limit_mb =
327 static_cast<int>(heap()->old_generation_allocation_limit() / MB);
328 heap()->isolate()->PrintWithTimestamp(
329 "[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, " 331 Heap::GarbageCollectionReasonToString(gc_reason),
332 old_generation_size_mb, old_generation_limit_mb,
333 Max(0, old_generation_limit_mb - old_generation_size_mb));
335 DCHECK(FLAG_incremental_marking);
336 DCHECK(state_ == STOPPED);
337 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
338 DCHECK(!heap_->isolate()->serializer_enabled());
340 Counters* counters = heap_->isolate()->counters();
342 counters->incremental_marking_reason()->AddSample(
343 static_cast<int>(gc_reason));
344 HistogramTimerScope incremental_marking_scope(
345 counters->gc_incremental_marking_start());
346 TRACE_EVENT0(
"v8",
"V8.GCIncrementalMarkingStart");
347 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START);
348 heap_->tracer()->NotifyIncrementalMarkingStart();
350 start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
351 initial_old_generation_size_ = heap_->OldGenerationSizeOfObjects();
352 old_generation_allocation_counter_ = heap_->OldGenerationAllocationCounter();
353 bytes_allocated_ = 0;
354 bytes_marked_ahead_of_schedule_ = 0;
355 bytes_marked_concurrently_ = 0;
356 should_hurry_ =
false;
357 was_activated_ =
true;
359 if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
362 if (FLAG_trace_incremental_marking) {
363 heap()->isolate()->PrintWithTimestamp(
364 "[IncrementalMarking] Start sweeping.\n");
369 heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_,
370 &new_generation_observer_);
371 incremental_marking_job()->Start(heap_);
375 void IncrementalMarking::StartMarking() {
376 if (heap_->isolate()->serializer_enabled()) {
380 if (FLAG_trace_incremental_marking) {
381 heap()->isolate()->PrintWithTimestamp(
382 "[IncrementalMarking] Start delayed - serializer\n");
386 if (FLAG_trace_incremental_marking) {
387 heap()->isolate()->PrintWithTimestamp(
388 "[IncrementalMarking] Start marking\n");
392 !FLAG_never_compact && heap_->mark_compact_collector()->StartCompaction();
397 TRACE_GC(heap()->tracer(),
398 GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE);
399 heap_->local_embedder_heap_tracer()->TracePrologue();
402 ActivateIncrementalWriteBarrier();
406 if (FLAG_verify_heap) {
407 heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
411 heap_->isolate()->compilation_cache()->MarkCompactPrologue();
413 #ifdef V8_CONCURRENT_MARKING 416 if (!black_allocation_) {
417 StartBlackAllocation();
422 IncrementalMarkingRootMarkingVisitor visitor(
this);
423 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
425 if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
426 heap_->concurrent_marking()->ScheduleTasks();
430 if (FLAG_trace_incremental_marking) {
431 heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Running\n");
435 void IncrementalMarking::StartBlackAllocation() {
436 DCHECK(FLAG_black_allocation);
437 DCHECK(!black_allocation_);
439 black_allocation_ =
true;
440 heap()->old_space()->MarkLinearAllocationAreaBlack();
441 heap()->map_space()->MarkLinearAllocationAreaBlack();
442 heap()->code_space()->MarkLinearAllocationAreaBlack();
443 if (FLAG_trace_incremental_marking) {
444 heap()->isolate()->PrintWithTimestamp(
445 "[IncrementalMarking] Black allocation started\n");
449 void IncrementalMarking::PauseBlackAllocation() {
450 DCHECK(FLAG_black_allocation);
452 heap()->old_space()->UnmarkLinearAllocationArea();
453 heap()->map_space()->UnmarkLinearAllocationArea();
454 heap()->code_space()->UnmarkLinearAllocationArea();
455 if (FLAG_trace_incremental_marking) {
456 heap()->isolate()->PrintWithTimestamp(
457 "[IncrementalMarking] Black allocation paused\n");
459 black_allocation_ =
false;
462 void IncrementalMarking::FinishBlackAllocation() {
463 if (black_allocation_) {
464 black_allocation_ =
false;
465 if (FLAG_trace_incremental_marking) {
466 heap()->isolate()->PrintWithTimestamp(
467 "[IncrementalMarking] Black allocation finished\n");
472 void IncrementalMarking::MarkRoots() {
473 DCHECK(!finalize_marking_completed_);
476 IncrementalMarkingRootMarkingVisitor visitor(
this);
477 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
480 bool IncrementalMarking::ShouldRetainMap(Map map,
int age) {
485 Object* constructor = map->GetConstructor();
486 if (!constructor->IsHeapObject() ||
487 marking_state()->IsWhite(HeapObject::cast(constructor))) {
496 void IncrementalMarking::RetainMaps() {
500 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
501 FLAG_retain_maps_for_n_gc == 0;
502 WeakArrayList* retained_maps = heap()->retained_maps();
503 int length = retained_maps->length();
507 int number_of_disposed_maps = heap()->number_of_disposed_maps_;
508 for (
int i = 0;
i < length;
i += 2) {
509 MaybeObject value = retained_maps->Get(
i);
510 HeapObject* map_heap_object;
511 if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
514 int age = retained_maps->Get(
i + 1).ToSmi().value();
516 Map map = Map::cast(map_heap_object);
517 if (
i >= number_of_disposed_maps && !map_retaining_is_disabled &&
518 marking_state()->IsWhite(map)) {
519 if (ShouldRetainMap(map, age)) {
520 WhiteToGreyAndPush(map);
522 Object* prototype = map->prototype();
523 if (age > 0 && prototype->IsHeapObject() &&
524 marking_state()->IsWhite(HeapObject::cast(prototype))) {
533 new_age = FLAG_retain_maps_for_n_gc;
536 if (new_age != age) {
537 retained_maps->Set(
i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
542 void IncrementalMarking::FinalizeIncrementally() {
543 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
544 DCHECK(!finalize_marking_completed_);
547 double start = heap_->MonotonicallyIncreasingTimeInMs();
559 finalize_marking_completed_ =
true;
561 if (FLAG_black_allocation && !heap()->ShouldReduceMemory() &&
562 !black_allocation_) {
565 StartBlackAllocation();
568 if (FLAG_trace_incremental_marking) {
569 double end = heap_->MonotonicallyIncreasingTimeInMs();
570 double delta = end - start;
571 heap()->isolate()->PrintWithTimestamp(
572 "[IncrementalMarking] Finalize incrementally spent %.1f ms.\n", delta);
576 void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
577 if (!IsMarking())
return;
579 Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map();
581 #ifdef ENABLE_MINOR_MC 582 MinorMarkCompactCollector::MarkingState* minor_marking_state =
583 heap()->minor_mark_compact_collector()->marking_state();
585 void* minor_marking_state =
nullptr;
586 #endif // ENABLE_MINOR_MC 588 marking_worklist()->Update([
593 filler_map, minor_marking_state](
594 HeapObject* obj, HeapObject** out) ->
bool {
595 DCHECK(obj->IsHeapObject());
597 if (Heap::InFromSpace(obj)) {
598 MapWord map_word = obj->map_word();
599 if (!map_word.IsForwardingAddress()) {
607 HeapObject* dest = map_word.ToForwardingAddress();
608 DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
611 }
else if (Heap::InToSpace(obj)) {
614 Page::FromAddress(obj->address())->IsFlagSet(Page::SWEEP_TO_ITERATE));
615 #ifdef ENABLE_MINOR_MC 616 if (minor_marking_state->IsGrey(obj)) {
620 #endif // ENABLE_MINOR_MC 625 if (Page::FromAddress(obj->address())
626 ->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
627 #ifdef ENABLE_MINOR_MC 628 if (minor_marking_state->IsGrey(obj)) {
632 #endif // ENABLE_MINOR_MC 635 DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
638 if (obj->map() != filler_map) {
646 UpdateWeakReferencesAfterScavenge();
650 template <
typename T,
typename =
typename std::enable_if<
651 std::is_base_of<HeapObject, T>::value>::type>
652 T* ForwardingAddress(T* heap_obj) {
653 MapWord map_word = heap_obj->map_word();
655 if (map_word.IsForwardingAddress()) {
656 return T::cast(map_word.ToForwardingAddress());
657 }
else if (Heap::InNewSpace(heap_obj)) {
665 template <
typename T,
typename =
typename std::enable_if<
666 std::is_base_of<HeapObjectPtr, T>::value>::type>
667 T ForwardingAddress(T heap_obj) {
668 MapWord map_word = heap_obj->map_word();
670 if (map_word.IsForwardingAddress()) {
671 return T::cast(map_word.ToForwardingAddress());
672 }
else if (Heap::InNewSpace(heap_obj)) {
680 void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
681 weak_objects_->weak_references.Update(
682 [](std::pair<HeapObject*, HeapObjectSlot> slot_in,
683 std::pair<HeapObject*, HeapObjectSlot>* slot_out) ->
bool {
684 HeapObject* heap_obj = slot_in.first;
685 HeapObject* forwarded = ForwardingAddress(heap_obj);
688 ptrdiff_t distance_to_slot = slot_in.second.address() -
689 reinterpret_cast<Address
>(slot_in.first);
691 reinterpret_cast<Address
>(forwarded) + distance_to_slot;
692 slot_out->first = forwarded;
693 slot_out->second = HeapObjectSlot(new_slot);
699 weak_objects_->weak_objects_in_code.Update(
700 [](std::pair<HeapObject*, Code> slot_in,
701 std::pair<HeapObject*, Code>* slot_out) ->
bool {
702 HeapObject* heap_obj = slot_in.first;
703 HeapObject* forwarded = ForwardingAddress(heap_obj);
706 slot_out->first = forwarded;
707 slot_out->second = slot_in.second;
713 weak_objects_->ephemeron_hash_tables.Update(
714 [](EphemeronHashTable slot_in, EphemeronHashTable* slot_out) ->
bool {
715 EphemeronHashTable forwarded = ForwardingAddress(slot_in);
717 if (!forwarded.is_null()) {
718 *slot_out = forwarded;
725 auto ephemeron_updater = [](Ephemeron slot_in, Ephemeron* slot_out) ->
bool {
726 HeapObject* key = slot_in.key;
727 HeapObject* value = slot_in.value;
728 HeapObject* forwarded_key = ForwardingAddress(key);
729 HeapObject* forwarded_value = ForwardingAddress(value);
731 if (forwarded_key && forwarded_value) {
732 *slot_out = Ephemeron{forwarded_key, forwarded_value};
739 weak_objects_->current_ephemerons.Update(ephemeron_updater);
740 weak_objects_->next_ephemerons.Update(ephemeron_updater);
741 weak_objects_->discovered_ephemerons.Update(ephemeron_updater);
744 void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
745 size_t dead_bytes_in_new_space) {
746 if (!IsMarking())
return;
747 bytes_marked_ahead_of_schedule_ -=
748 Min(bytes_marked_ahead_of_schedule_, dead_bytes_in_new_space);
751 bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject* obj) {
752 if (!obj->IsFixedArray())
return false;
753 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
754 return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
757 int IncrementalMarking::VisitObject(Map map, HeapObject* obj) {
758 DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
759 if (!marking_state()->GreyToBlack(obj)) {
767 DCHECK(obj->IsHashTable() || obj->IsPropertyArray() ||
768 obj->IsFixedArray() || obj->IsContext() || obj->IsJSObject() ||
771 DCHECK(marking_state()->IsBlack(obj));
772 WhiteToGreyAndPush(map);
773 IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
775 return visitor.Visit(map, obj);
778 void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject* obj) {
779 if (IsMarking() && marking_state()->IsBlack(obj)) {
784 void IncrementalMarking::RevisitObject(HeapObject* obj) {
786 DCHECK(FLAG_concurrent_marking || marking_state()->IsBlack(obj));
787 Page* page = Page::FromAddress(obj->address());
788 if (page->owner()->identity() == LO_SPACE) {
789 page->ResetProgressBar();
791 Map map = obj->map();
792 WhiteToGreyAndPush(map);
793 IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
795 visitor.Visit(map, obj);
798 template <WorklistToProcess worklist_to_process>
799 intptr_t IncrementalMarking::ProcessMarkingWorklist(
800 intptr_t bytes_to_process, ForceCompletionAction completion) {
801 intptr_t bytes_processed = 0;
802 while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
804 if (worklist_to_process == WorklistToProcess::kBailout) {
805 obj = marking_worklist()->PopBailout();
807 obj = marking_worklist()->Pop();
809 if (obj ==
nullptr)
break;
812 if (obj->IsFiller()) {
813 DCHECK(!marking_state()->IsImpossible(obj));
816 unscanned_bytes_of_large_object_ = 0;
817 int size = VisitObject(obj->map(), obj);
818 bytes_processed += size - unscanned_bytes_of_large_object_;
820 return bytes_processed;
823 void IncrementalMarking::EmbedderStep(
double duration_ms) {
824 constexpr
size_t kObjectsToProcessBeforeInterrupt = 500;
826 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
827 double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
831 LocalEmbedderHeapTracer::ProcessingScope scope(
832 heap_->local_embedder_heap_tracer());
835 empty_worklist =
true;
836 while (marking_worklist()->embedder()->Pop(0, &
object)) {
837 scope.TracePossibleWrapper(JSObject::cast(
object));
838 if (++cnt == kObjectsToProcessBeforeInterrupt) {
840 empty_worklist =
false;
845 heap_->local_embedder_heap_tracer()->Trace(deadline);
846 }
while (!empty_worklist &&
847 (heap_->MonotonicallyIncreasingTimeInMs() < deadline));
848 heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(empty_worklist);
851 void IncrementalMarking::Hurry() {
857 if (!marking_worklist()->IsEmpty()) {
859 if (FLAG_trace_incremental_marking) {
860 start = heap_->MonotonicallyIncreasingTimeInMs();
861 if (FLAG_trace_incremental_marking) {
862 heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Hurry\n");
867 ProcessMarkingWorklist(0, FORCE_COMPLETION);
869 if (FLAG_trace_incremental_marking) {
870 double end = heap_->MonotonicallyIncreasingTimeInMs();
871 double delta = end - start;
872 if (FLAG_trace_incremental_marking) {
873 heap()->isolate()->PrintWithTimestamp(
874 "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
875 static_cast<int>(delta));
882 void IncrementalMarking::Stop() {
883 if (IsStopped())
return;
884 if (FLAG_trace_incremental_marking) {
885 int old_generation_size_mb =
886 static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
887 int old_generation_limit_mb =
888 static_cast<int>(heap()->old_generation_allocation_limit() / MB);
889 heap()->isolate()->PrintWithTimestamp(
890 "[IncrementalMarking] Stopping: old generation %dMB, limit %dMB, " 892 old_generation_size_mb, old_generation_limit_mb,
893 Max(0, old_generation_size_mb - old_generation_limit_mb));
896 SpaceIterator it(heap_);
897 while (it.has_next()) {
898 Space* space = it.next();
899 if (space == heap_->new_space()) {
900 space->RemoveAllocationObserver(&new_generation_observer_);
902 space->RemoveAllocationObserver(&old_generation_observer_);
906 IncrementalMarking::set_should_hurry(
false);
907 heap_->isolate()->stack_guard()->ClearGC();
909 is_compacting_ =
false;
910 FinishBlackAllocation();
914 void IncrementalMarking::Finalize() {
920 void IncrementalMarking::FinalizeMarking(CompletionAction action) {
921 DCHECK(!finalize_marking_completed_);
922 if (FLAG_trace_incremental_marking) {
923 heap()->isolate()->PrintWithTimestamp(
924 "[IncrementalMarking] requesting finalization of incremental " 927 request_type_ = FINALIZATION;
928 if (action == GC_VIA_STACK_GUARD) {
929 heap_->isolate()->stack_guard()->RequestGC();
934 void IncrementalMarking::MarkingComplete(CompletionAction action) {
941 set_should_hurry(
true);
942 if (FLAG_trace_incremental_marking) {
943 heap()->isolate()->PrintWithTimestamp(
944 "[IncrementalMarking] Complete (normal).\n");
946 request_type_ = COMPLETE_MARKING;
947 if (action == GC_VIA_STACK_GUARD) {
948 heap_->isolate()->stack_guard()->RequestGC();
953 void IncrementalMarking::Epilogue() {
954 was_activated_ =
false;
955 finalize_marking_completed_ =
false;
958 bool IncrementalMarking::ShouldDoEmbedderStep() {
959 return state_ == MARKING && FLAG_incremental_marking_wrappers &&
960 heap_->local_embedder_heap_tracer()->InUse();
963 double IncrementalMarking::AdvanceIncrementalMarking(
964 double deadline_in_ms, CompletionAction completion_action,
965 StepOrigin step_origin) {
966 HistogramTimerScope incremental_marking_scope(
967 heap_->isolate()->counters()->gc_incremental_marking());
968 TRACE_EVENT0(
"v8",
"V8.GCIncrementalMarking");
969 TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
970 DCHECK(!IsStopped());
972 double remaining_time_in_ms = 0.0;
974 if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
975 EmbedderStep(kStepSizeInMs);
977 const intptr_t step_size_in_bytes =
978 GCIdleTimeHandler::EstimateMarkingStepSize(
980 heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
981 Step(step_size_in_bytes, completion_action, step_origin);
983 trace_wrappers_toggle_ = !trace_wrappers_toggle_;
984 remaining_time_in_ms =
985 deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
986 }
while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
987 !marking_worklist()->IsEmpty());
988 return remaining_time_in_ms;
992 void IncrementalMarking::FinalizeSweeping() {
993 DCHECK(state_ == SWEEPING);
994 if (heap_->mark_compact_collector()->sweeping_in_progress() &&
995 (!FLAG_concurrent_sweeping ||
996 !heap_->mark_compact_collector()->sweeper()->AreSweeperTasksRunning())) {
997 heap_->mark_compact_collector()->EnsureSweepingCompleted();
999 if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
1001 heap_->VerifyCountersAfterSweeping();
1007 size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
1009 size_t current_counter = heap_->OldGenerationAllocationCounter();
1010 bytes_allocated_ += current_counter - old_generation_allocation_counter_;
1011 old_generation_allocation_counter_ = current_counter;
1012 return bytes_allocated_;
1015 size_t IncrementalMarking::StepSizeToMakeProgress() {
1016 const size_t kTargetStepCount = 256;
1017 const size_t kTargetStepCountAtOOM = 32;
1018 const size_t kMaxStepSizeInByte = 256 * KB;
1019 size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
1021 if (!heap()->CanExpandOldGeneration(oom_slack)) {
1022 return heap()->OldGenerationSizeOfObjects() / kTargetStepCountAtOOM;
1025 return Min(Max(initial_old_generation_size_ / kTargetStepCount,
1026 IncrementalMarking::kMinStepSizeInBytes),
1027 kMaxStepSizeInByte);
1030 void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
1033 if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
1034 (state_ != SWEEPING && state_ != MARKING) || heap_->always_allocate()) {
1038 HistogramTimerScope incremental_marking_scope(
1039 heap_->isolate()->counters()->gc_incremental_marking());
1040 TRACE_EVENT0(
"v8",
"V8.GCIncrementalMarking");
1041 TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
1043 if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
1044 EmbedderStep(kMaxStepSizeInMs);
1046 size_t bytes_to_process =
1047 StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
1048 if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
1051 size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
1053 heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
1054 bytes_to_process = Min(bytes_to_process, max_step_size);
1055 size_t bytes_processed = 0;
1056 if (FLAG_concurrent_marking) {
1057 bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
1058 StepOrigin::kV8, WorklistToProcess::kBailout);
1059 bytes_to_process = (bytes_processed >= bytes_to_process)
1061 : bytes_to_process - bytes_processed;
1062 size_t current_bytes_marked_concurrently =
1063 heap()->concurrent_marking()->TotalMarkedBytes();
1066 if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
1067 bytes_marked_ahead_of_schedule_ +=
1068 current_bytes_marked_concurrently - bytes_marked_concurrently_;
1069 bytes_marked_concurrently_ = current_bytes_marked_concurrently;
1072 if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
1076 bytes_marked_ahead_of_schedule_ -= bytes_to_process;
1077 bytes_processed += bytes_to_process;
1078 bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
1080 bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
1081 StepOrigin::kV8, WorklistToProcess::kAll);
1082 bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
1085 trace_wrappers_toggle_ = !trace_wrappers_toggle_;
1088 size_t IncrementalMarking::Step(
size_t bytes_to_process,
1089 CompletionAction action, StepOrigin step_origin,
1090 WorklistToProcess worklist_to_process) {
1091 double start = heap_->MonotonicallyIncreasingTimeInMs();
1093 if (state_ == SWEEPING) {
1094 TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING);
1098 size_t bytes_processed = 0;
1099 if (state_ == MARKING) {
1100 if (FLAG_concurrent_marking) {
1101 heap_->new_space()->ResetOriginalTop();
1105 marking_worklist()->shared()->MergeGlobalPool(
1106 marking_worklist()->on_hold());
1111 if (FLAG_trace_incremental_marking && FLAG_trace_concurrent_marking &&
1112 FLAG_trace_gc_verbose) {
1113 marking_worklist()->Print();
1117 if (worklist_to_process == WorklistToProcess::kBailout) {
1119 ProcessMarkingWorklist<WorklistToProcess::kBailout>(bytes_to_process);
1122 ProcessMarkingWorklist<WorklistToProcess::kAll>(bytes_to_process);
1125 if (step_origin == StepOrigin::kTask) {
1126 bytes_marked_ahead_of_schedule_ += bytes_processed;
1129 if (marking_worklist()->IsEmpty()) {
1130 if (heap_->local_embedder_heap_tracer()
1131 ->ShouldFinalizeIncrementalMarking()) {
1132 if (!finalize_marking_completed_) {
1133 FinalizeMarking(action);
1135 MarkingComplete(action);
1138 heap_->local_embedder_heap_tracer()->NotifyV8MarkingWorklistWasEmpty();
1142 if (FLAG_concurrent_marking) {
1143 heap_->concurrent_marking()->RescheduleTasksIfNeeded();
1146 double end = heap_->MonotonicallyIncreasingTimeInMs();
1147 double duration = (end - start);
1151 heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
1152 if (FLAG_trace_incremental_marking) {
1153 heap_->isolate()->PrintWithTimestamp(
1154 "[IncrementalMarking] Step %s %" PRIuS
"KB (%" PRIuS
"KB) in %.1f\n",
1155 step_origin == StepOrigin::kV8 ?
"in v8" :
"in task",
1156 bytes_processed / KB, bytes_to_process / KB, duration);
1158 if (FLAG_trace_concurrent_marking) {
1159 heap_->isolate()->PrintWithTimestamp(
1160 "Concurrently marked %" PRIuS
"KB\n",
1161 heap_->concurrent_marking()->TotalMarkedBytes() / KB);
1163 return bytes_processed;