5 #ifndef V8_HEAP_MARK_COMPACT_H_ 6 #define V8_HEAP_MARK_COMPACT_H_ 10 #include "src/heap/concurrent-marking.h" 11 #include "src/heap/marking.h" 12 #include "src/heap/objects-visiting.h" 13 #include "src/heap/spaces.h" 14 #include "src/heap/sweeper.h" 15 #include "src/heap/worklist.h" 21 class EvacuationJobTraits;
22 class HeapObjectVisitor;
23 class ItemParallelJob;
25 class MigrationObserver;
26 class RecordMigratedSlotVisitor;
28 class YoungGenerationMarkingVisitor;
30 template <
typename ConcreteState, AccessMode access_mode>
34 return MarkBitFrom(MemoryChunk::FromAddress(obj->address()),
39 return static_cast<ConcreteState*
>(
this)->bitmap(p)->MarkBitFromIndex(
40 p->AddressToMarkbitIndex(addr));
44 return Marking::Color(MarkBitFrom(obj));
48 return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
52 return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
56 return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
60 return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
63 V8_INLINE
bool IsBlackOrGrey(
HeapObject* obj) {
64 return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
72 static_cast<ConcreteState*
>(
this)->bitmap(chunk)->Clear();
73 static_cast<ConcreteState*
>(
this)->SetLiveBytes(chunk, 0);
81 Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(chunk_->area_end()));
82 cell_base_ = chunk_->address();
84 Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(cell_base_));
85 cells_ = bitmap->cells();
88 inline bool Done() {
return cell_index_ >= last_cell_index_; }
90 inline bool HasNext() {
return cell_index_ < last_cell_index_ - 1; }
93 DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
94 chunk_->AddressToMarkbitIndex(cell_base_))));
95 return &cells_[cell_index_];
98 inline Address CurrentCellBase() {
99 DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
100 chunk_->AddressToMarkbitIndex(cell_base_))));
104 V8_WARN_UNUSED_RESULT
inline bool Advance() {
105 cell_base_ += Bitmap::kBitsPerCell * kPointerSize;
106 return ++cell_index_ != last_cell_index_;
109 inline bool Advance(
unsigned int new_cell_index) {
110 if (new_cell_index != cell_index_) {
111 DCHECK_GT(new_cell_index, cell_index_);
112 DCHECK_LE(new_cell_index, last_cell_index_);
113 unsigned int diff = new_cell_index - cell_index_;
114 cell_index_ = new_cell_index;
115 cell_base_ += diff * (Bitmap::kBitsPerCell * kPointerSize);
124 return cells_[cell_index_ + 1];
132 unsigned int last_cell_index_;
133 unsigned int cell_index_;
137 enum LiveObjectIterationMode {
143 template <LiveObjectIterationMode mode>
148 using value_type = std::pair<
HeapObject*,
int >;
149 using pointer =
const value_type*;
150 using reference =
const value_type&;
151 using iterator_category = std::forward_iterator_tag;
158 bool operator==(
iterator other)
const {
159 return current_object_ == other.current_object_;
162 bool operator!=(
iterator other)
const {
return !(*
this == other); }
164 value_type operator*() {
165 return std::make_pair(current_object_, current_size_);
169 inline void AdvanceToNextValidObject();
172 Map const one_word_filler_map_;
173 Map const two_word_filler_map_;
174 Map const free_space_map_;
185 start_(chunk_->area_start()),
186 end_(chunk->area_end()) {}
188 inline iterator begin();
189 inline iterator end();
208 template <
class Visitor,
typename MarkingState>
209 static bool VisitBlackObjects(
MemoryChunk* chunk, MarkingState* state,
210 Visitor* visitor, IterationMode iteration_mode,
215 template <
class Visitor,
typename MarkingState>
216 static void VisitBlackObjectsNoFail(
MemoryChunk* chunk, MarkingState* state,
218 IterationMode iteration_mode);
222 template <
class Visitor,
typename MarkingState>
223 static void VisitGreyObjectsNoFail(
MemoryChunk* chunk, MarkingState* state,
225 IterationMode iteration_mode);
227 template <
typename MarkingState>
228 static void RecomputeLiveBytes(
MemoryChunk* chunk, MarkingState* state);
231 enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
232 enum MarkingTreatmentMode { KEEP, CLEAR };
233 enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
238 static const int kMainThread = 0;
242 virtual void SetUp() = 0;
243 virtual void TearDown() = 0;
244 virtual void CollectGarbage() = 0;
246 inline Heap* heap()
const {
return heap_; }
251 : heap_(heap), old_to_new_slots_(0) {}
254 virtual void MarkLiveObjects() = 0;
257 virtual void ProcessMarkingWorklist() = 0;
259 virtual void ClearNonLiveReferences() = 0;
260 virtual void EvacuatePrologue() = 0;
261 virtual void EvacuateEpilogue() = 0;
262 virtual void Evacuate() = 0;
263 virtual void EvacuatePagesInParallel() = 0;
264 virtual void UpdatePointersAfterEvacuation() = 0;
269 MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
271 template <
class Evacuator,
class Collector>
272 void CreateAndExecuteEvacuationTasks(
278 bool ShouldMovePage(
Page* p, intptr_t live_bytes);
281 template <
typename IterateableSpace>
283 IterateableSpace* space,
284 RememberedSetUpdatingMode mode);
286 int NumberOfParallelCompactionTasks(
int pages);
287 int NumberOfParallelPointerUpdateTasks(
int pages,
int slots);
288 int NumberOfParallelToSpacePointerUpdateTasks(
int pages);
293 int old_to_new_slots_;
300 return chunk->young_generation_bitmap_;
303 void IncrementLiveBytes(
MemoryChunk* chunk, intptr_t by) {
304 chunk->young_generation_live_byte_count_ += by;
308 return chunk->young_generation_live_byte_count_;
311 void SetLiveBytes(
MemoryChunk* chunk, intptr_t value) {
312 chunk->young_generation_live_byte_count_ = value;
318 AccessMode::NON_ATOMIC> {
321 return chunk->young_generation_bitmap_;
324 void IncrementLiveBytes(
MemoryChunk* chunk, intptr_t by) {
325 chunk->young_generation_live_byte_count_ += by;
329 return chunk->young_generation_live_byte_count_;
332 void SetLiveBytes(
MemoryChunk* chunk, intptr_t value) {
333 chunk->young_generation_live_byte_count_ = value;
342 DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
343 reinterpret_cast<intptr_t>(chunk),
344 MemoryChunk::kMarkBitmapOffset);
345 return chunk->marking_bitmap_;
349 void IncrementLiveBytes(
MemoryChunk* chunk, intptr_t by) {
350 chunk->live_byte_count_ += by;
354 return chunk->live_byte_count_;
357 void SetLiveBytes(
MemoryChunk* chunk, intptr_t value) {
358 chunk->live_byte_count_ = value;
366 DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
367 reinterpret_cast<intptr_t>(chunk),
368 MemoryChunk::kMarkBitmapOffset);
369 return chunk->marking_bitmap_;
372 void IncrementLiveBytes(
MemoryChunk* chunk, intptr_t by) {
373 chunk->live_byte_count_ += by;
377 return chunk->live_byte_count_;
380 void SetLiveBytes(
MemoryChunk* chunk, intptr_t value) {
381 chunk->live_byte_count_ = value;
387 AccessMode::NON_ATOMIC> {
390 DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
391 reinterpret_cast<intptr_t>(chunk),
392 MemoryChunk::kMarkBitmapOffset);
393 return chunk->marking_bitmap_;
396 void IncrementLiveBytes(
MemoryChunk* chunk, intptr_t by) {
397 chunk->live_byte_count_ += by;
401 return chunk->live_byte_count_;
404 void SetLiveBytes(
MemoryChunk* chunk, intptr_t value) {
405 chunk->live_byte_count_ = value;
451 std::vector<HeapObject*> newly_discovered;
452 bool newly_discovered_overflowed;
453 size_t newly_discovered_limit;
459 #ifdef V8_CONCURRENT_MARKING 463 #endif // V8_CONCURRENT_MARKING 477 bool success = shared_.Push(kMainThread,
object);
483 bool success = bailout_.Push(kMainThread,
object);
490 #ifdef V8_CONCURRENT_MARKING 491 if (bailout_.Pop(kMainThread, &result))
return result;
493 if (shared_.Pop(kMainThread, &result))
return result;
494 #ifdef V8_CONCURRENT_MARKING 497 if (on_hold_.Pop(kMainThread, &result))
return result;
503 #ifdef V8_CONCURRENT_MARKING 505 if (bailout_.Pop(kMainThread, &result))
return result;
517 bool IsBailoutEmpty() {
return bailout_.IsLocalEmpty(kMainThread); }
520 return bailout_.IsLocalEmpty(kMainThread) &&
521 shared_.IsLocalEmpty(kMainThread) &&
522 on_hold_.IsLocalEmpty(kMainThread) &&
523 bailout_.IsGlobalPoolEmpty() && shared_.IsGlobalPoolEmpty() &&
524 on_hold_.IsGlobalPoolEmpty();
527 bool IsEmbedderEmpty() {
528 return embedder_.IsLocalEmpty(kMainThread) &&
529 embedder_.IsGlobalPoolEmpty();
533 return static_cast<int>(bailout_.LocalSize(kMainThread) +
534 shared_.LocalSize(kMainThread) +
535 on_hold_.LocalSize(kMainThread));
542 template <
typename Callback>
543 void Update(Callback callback) {
544 bailout_.Update(callback);
545 shared_.Update(callback);
546 on_hold_.Update(callback);
547 embedder_.Update(callback);
556 PrintWorklist(
"shared", &shared_);
557 PrintWorklist(
"bailout", &bailout_);
558 PrintWorklist(
"on_hold", &on_hold_);
563 void PrintWorklist(
const char* worklist_name,
594 MarkingState* marking_state() {
return &marking_state_; }
596 NonAtomicMarkingState* non_atomic_marking_state() {
597 return &non_atomic_marking_state_;
600 void SetUp()
override;
601 void TearDown()
override;
603 void CollectGarbage()
override;
605 void CollectEvacuationCandidates(PagedSpace* space);
607 void AddEvacuationCandidate(Page* p);
615 void FinishConcurrentMarking(ConcurrentMarking::StopRequest stop_request);
617 bool StartCompaction();
619 void AbortCompaction();
621 static inline bool IsOnEvacuationCandidate(Object* obj) {
622 return Page::FromAddress(reinterpret_cast<Address>(obj))
623 ->IsEvacuationCandidate();
626 static bool IsOnEvacuationCandidate(MaybeObject obj);
628 void RecordRelocSlot(Code host, RelocInfo* rinfo, Object* target);
629 V8_INLINE
static void RecordSlot(HeapObject*
object, ObjectSlot slot,
631 V8_INLINE
static void RecordSlot(HeapObject*
object, HeapObjectSlot slot,
633 void RecordLiveSlotsOnPage(Page* page);
635 void UpdateSlots(SlotsBuffer* buffer);
636 void UpdateSlotsRecordedIn(SlotsBuffer* buffer);
638 bool is_compacting()
const {
return compacting_; }
643 void EnsureSweepingCompleted();
646 bool sweeping_in_progress()
const {
return sweeper_->sweeping_in_progress(); }
648 void set_evacuation(
bool evacuation) { evacuation_ = evacuation; }
650 bool evacuation()
const {
return evacuation_; }
652 MarkingWorklist* marking_worklist() {
return &marking_worklist_; }
654 WeakObjects* weak_objects() {
return &weak_objects_; }
656 void AddTransitionArray(TransitionArray* array) {
657 weak_objects_.transition_arrays.Push(kMainThread, array);
660 void AddEphemeronHashTable(EphemeronHashTable table) {
661 weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
664 void AddEphemeron(HeapObject* key, HeapObject* value) {
665 weak_objects_.discovered_ephemerons.Push(kMainThread,
666 Ephemeron{key, value});
669 void AddWeakReference(HeapObject* host, HeapObjectSlot slot) {
670 weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
673 void AddWeakObjectInCode(HeapObject*
object, Code code) {
674 weak_objects_.weak_objects_in_code.Push(kMainThread,
675 std::make_pair(
object, code));
678 void AddWeakCell(JSWeakCell* weak_cell) {
679 weak_objects_.js_weak_cells.Push(kMainThread, weak_cell);
682 void AddNewlyDiscovered(HeapObject*
object) {
683 if (ephemeron_marking_.newly_discovered_overflowed)
return;
685 if (ephemeron_marking_.newly_discovered.size() <
686 ephemeron_marking_.newly_discovered_limit) {
687 ephemeron_marking_.newly_discovered.push_back(
object);
689 ephemeron_marking_.newly_discovered_overflowed =
true;
693 void ResetNewlyDiscovered() {
694 ephemeron_marking_.newly_discovered_overflowed =
false;
695 ephemeron_marking_.newly_discovered.clear();
698 Sweeper* sweeper() {
return sweeper_; }
702 bool in_use() {
return state_ > PREPARE_GC; }
703 bool are_map_pointers_encoded() {
return state_ == UPDATE_POINTERS; }
706 void VerifyMarking();
708 void VerifyValidStoreAndSlotsBufferEntries();
709 void VerifyMarkbitsAreClean();
710 void VerifyMarkbitsAreDirty(PagedSpace* space);
711 void VerifyMarkbitsAreClean(PagedSpace* space);
712 void VerifyMarkbitsAreClean(NewSpace* space);
713 void VerifyMarkbitsAreClean(LargeObjectSpace* space);
717 explicit MarkCompactCollector(Heap* heap);
718 ~MarkCompactCollector()
override;
720 void ComputeEvacuationHeuristics(
size_t area_size,
721 int* target_fragmentation_percent,
722 size_t* max_evacuated_bytes);
724 void RecordObjectStats();
729 void MarkLiveObjects()
override;
733 V8_INLINE
void MarkObject(HeapObject* host, HeapObject* obj);
737 V8_INLINE
void MarkRootObject(Root root, HeapObject* obj);
740 V8_INLINE
void MarkExternallyReferencedObject(HeapObject* obj);
743 void MarkRoots(RootVisitor* root_visitor,
744 ObjectVisitor* custom_root_body_visitor);
748 void MarkStringTable(ObjectVisitor* visitor);
751 void ProcessEphemeronMarking();
756 void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
759 DependentCode* DependentCodeListFromNonLiveMaps();
763 void ProcessMarkingWorklist()
override;
765 enum class MarkingWorklistProcessingMode {
767 kTrackNewlyDiscoveredObjects
770 template <MarkingWorklistProcessingMode mode>
771 void ProcessMarkingWorklistInternal();
775 bool VisitEphemeron(HeapObject* key, HeapObject* value);
779 void ProcessEphemeronsUntilFixpoint();
783 bool ProcessEphemerons();
787 void ProcessEphemeronsLinear();
790 void PerformWrapperTracing();
794 static bool IsUnmarkedHeapObject(Heap* heap, ObjectSlot p);
798 void ClearNonLiveReferences()
override;
799 void MarkDependentCodeForDeoptimization();
803 void ClearPotentialSimpleMapTransition(Map dead_target);
804 void ClearPotentialSimpleMapTransition(Map map, Map dead_target);
807 void ClearFullMapTransitions();
808 bool CompactTransitionArray(Map map, TransitionArray* transitions,
809 DescriptorArray* descriptors);
810 void TrimDescriptorArray(Map map, DescriptorArray* descriptors);
811 void TrimEnumCache(Map map, DescriptorArray* descriptors);
816 void ClearWeakCollections();
822 void ClearWeakReferences();
826 void ClearJSWeakCells();
828 void AbortWeakObjects();
832 void StartSweepSpaces();
833 void StartSweepSpace(PagedSpace* space);
835 void EvacuatePrologue()
override;
836 void EvacuateEpilogue()
override;
837 void Evacuate()
override;
838 void EvacuatePagesInParallel()
override;
839 void UpdatePointersAfterEvacuation()
override;
841 UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
842 Address end)
override;
843 UpdatingItem* CreateRememberedSetUpdatingItem(
844 MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode)
override;
846 int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
847 int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
849 void ReleaseEvacuationCandidates();
850 void PostProcessEvacuationCandidates();
851 void ReportAbortedEvacuationCandidate(HeapObject* failed_object,
854 static const int kEphemeronChunkSize = 8 * KB;
856 int NumberOfParallelEphemeronVisitingTasks(
size_t elements);
858 void RightTrimDescriptorArray(DescriptorArray* array,
859 int descriptors_to_trim);
862 base::Semaphore page_parallel_job_semaphore_;
865 enum CollectorState {
870 ENCODE_FORWARDING_ADDRESSES,
876 CollectorState state_;
879 bool was_marked_incrementally_;
887 bool black_allocation_;
889 bool have_code_to_deoptimize_;
891 MarkingWorklist marking_worklist_;
892 WeakObjects weak_objects_;
893 EphemeronMarking ephemeron_marking_;
896 std::vector<Page*> evacuation_candidates_;
898 std::vector<Page*> old_space_evacuation_pages_;
899 std::vector<Page*> new_space_evacuation_pages_;
900 std::vector<std::pair<HeapObject*, Page*>> aborted_evacuation_candidates_;
904 MarkingState marking_state_;
905 NonAtomicMarkingState non_atomic_marking_state_;
907 friend class EphemeronHashTableMarkingTask;
908 friend class FullEvacuator;
910 friend class RecordMigratedSlotVisitor;
913 template <FixedArrayVisitationMode fixed_array_mode,
914 TraceRetainingPathMode retaining_path_mode,
typename MarkingState>
918 MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>> {
925 MarkingState* marking_state);
927 V8_INLINE
bool ShouldVisitMapPointer() {
return false; }
932 V8_INLINE
int VisitJSApiObject(
Map map,
JSObject*
object);
936 V8_INLINE
int VisitMap(
Map map,
Map object);
947 V8_INLINE
void VisitEmbeddedPointer(
Code host,
RelocInfo* rinfo)
final;
948 V8_INLINE
void VisitCodeTarget(
Code host,
RelocInfo* rinfo)
final;
958 static const int kProgressBarScanningChunk = 32 * 1024;
960 V8_INLINE
int VisitFixedArrayIncremental(
Map map,
FixedArray object);
962 template <
typename T>
963 V8_INLINE
int VisitEmbedderTracingSubclass(
Map map,
T*
object);
965 V8_INLINE
void MarkMapContents(
Map map);
974 MarkingState* marking_state() {
return marking_state_; }
977 return collector_->marking_worklist();
982 MarkingState*
const marking_state_;
988 : collector_(collector) {
989 collector_->set_evacuation(
true);
998 #ifdef ENABLE_MINOR_MC 1006 explicit MinorMarkCompactCollector(
Heap* heap);
1007 ~MinorMarkCompactCollector()
override;
1009 MarkingState* marking_state() {
return &marking_state_; }
1011 NonAtomicMarkingState* non_atomic_marking_state() {
1012 return &non_atomic_marking_state_;
1015 void SetUp()
override;
1016 void TearDown()
override;
1017 void CollectGarbage()
override;
1019 void MakeIterable(Page* page, MarkingTreatmentMode marking_mode,
1020 FreeSpaceTreatmentMode free_space_mode);
1021 void CleanupSweepToIteratePages();
1024 using MarkingWorklist = Worklist<HeapObject*, 64 >;
1025 class RootMarkingVisitor;
1027 static const int kNumMarkers = 8;
1028 static const int kMainMarker = 0;
1030 inline MarkingWorklist* worklist() {
return worklist_; }
1032 inline YoungGenerationMarkingVisitor* main_marking_visitor() {
1033 return main_marking_visitor_;
1036 void MarkLiveObjects()
override;
1037 void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
1038 V8_INLINE
void MarkRootObject(HeapObject* obj);
1039 void ProcessMarkingWorklist()
override;
1040 void ClearNonLiveReferences()
override;
1042 void EvacuatePrologue()
override;
1043 void EvacuateEpilogue()
override;
1044 void Evacuate()
override;
1045 void EvacuatePagesInParallel()
override;
1046 void UpdatePointersAfterEvacuation()
override;
1048 UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
1049 Address end)
override;
1050 UpdatingItem* CreateRememberedSetUpdatingItem(
1051 MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode)
override;
1053 int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
1055 int NumberOfParallelMarkingTasks(
int pages);
1057 MarkingWorklist* worklist_;
1059 YoungGenerationMarkingVisitor* main_marking_visitor_;
1060 base::Semaphore page_parallel_job_semaphore_;
1061 std::vector<Page*> new_space_evacuation_pages_;
1062 std::vector<Page*> sweep_to_iterate_pages_;
1064 MarkingState marking_state_;
1065 NonAtomicMarkingState non_atomic_marking_state_;
1067 friend class YoungGenerationMarkingTask;
1068 friend class YoungGenerationMarkingVisitor;
1071 #endif // ENABLE_MINOR_MC 1076 #endif // V8_HEAP_MARK_COMPACT_H_