5 #ifndef V8_HEAP_INCREMENTAL_MARKING_H_ 6 #define V8_HEAP_INCREMENTAL_MARKING_H_ 8 #include "src/cancelable-task.h" 9 #include "src/heap/heap.h" 10 #include "src/heap/incremental-marking-job.h" 11 #include "src/heap/mark-compact.h" 22 enum class StepOrigin { kV8, kTask };
23 enum class WorklistToProcess { kAll, kBailout };
27 enum State { STOPPED, SWEEPING, MARKING, COMPLETE };
29 enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD };
31 enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
33 enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
35 #ifdef V8_CONCURRENT_MARKING 39 #endif // V8_CONCURRENT_MARKING 46 : marking_(marking), paused_(
false) {
47 if (marking_->black_allocation()) {
49 marking_->PauseBlackAllocation();
55 marking_->StartBlackAllocation();
70 static const size_t kYoungGenerationAllocatedThreshold = 64 * KB;
71 static const size_t kOldGenerationAllocatedThreshold = 256 * KB;
72 static const size_t kMinStepSizeInBytes = 64 * KB;
74 static const int kStepSizeInMs = 1;
75 static const int kMaxStepSizeInMs = 5;
78 static const intptr_t kActivationThreshold = 8 * MB;
80 static const intptr_t kActivationThreshold = 0;
83 #ifdef V8_CONCURRENT_MARKING 84 static const AccessMode kAtomicity = AccessMode::ATOMIC;
86 static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
93 MarkingState* marking_state() {
return &marking_state_; }
95 AtomicMarkingState* atomic_marking_state() {
return &atomic_marking_state_; }
97 NonAtomicMarkingState* non_atomic_marking_state() {
98 return &non_atomic_marking_state_;
101 void NotifyLeftTrimming(HeapObject* from, HeapObject* to);
103 V8_INLINE
void TransferColor(HeapObject* from, HeapObject* to);
105 State state()
const {
106 DCHECK(state_ == STOPPED || FLAG_incremental_marking);
110 bool should_hurry()
const {
return should_hurry_; }
111 void set_should_hurry(
bool val) { should_hurry_ = val; }
113 bool finalize_marking_completed()
const {
114 return finalize_marking_completed_;
117 void SetWeakClosureWasOverApproximatedForTesting(
bool val) {
118 finalize_marking_completed_ = val;
121 inline bool IsStopped()
const {
return state() == STOPPED; }
123 inline bool IsSweeping()
const {
return state() == SWEEPING; }
125 inline bool IsMarking()
const {
return state() >= MARKING; }
127 inline bool IsMarkingIncomplete()
const {
return state() == MARKING; }
129 inline bool IsComplete()
const {
return state() == COMPLETE; }
131 inline bool IsReadyToOverApproximateWeakClosure()
const {
132 return request_type_ == FINALIZATION && !finalize_marking_completed_;
135 inline bool NeedsFinalization() {
136 return IsMarking() &&
137 (request_type_ == FINALIZATION || request_type_ == COMPLETE_MARKING);
140 GCRequestType request_type()
const {
return request_type_; }
142 void reset_request_type() { request_type_ = NONE; }
144 bool CanBeActivated();
148 void Start(GarbageCollectionReason gc_reason);
150 void FinalizeIncrementally();
152 void UpdateMarkingWorklistAfterScavenge();
153 void UpdateWeakReferencesAfterScavenge();
154 void UpdateMarkedBytesAfterScavenge(
size_t dead_bytes_in_new_space);
162 void FinalizeMarking(CompletionAction action);
164 void MarkingComplete(CompletionAction action);
171 double AdvanceIncrementalMarking(
double deadline_in_ms,
172 CompletionAction completion_action,
173 StepOrigin step_origin);
175 void FinalizeSweeping();
177 size_t Step(
size_t bytes_to_process, CompletionAction action,
178 StepOrigin step_origin,
179 WorklistToProcess worklist_to_process = WorklistToProcess::kAll);
181 bool ShouldDoEmbedderStep();
182 void EmbedderStep(
double duration);
184 inline void RestartIfNotMarking();
188 static int RecordWriteFromCode(HeapObject* obj, Address slot_address,
197 V8_INLINE
bool BaseRecordWrite(HeapObject* obj, Object* value);
198 V8_INLINE
void RecordWrite(HeapObject* obj, ObjectSlot slot, Object* value);
199 V8_INLINE
void RecordMaybeWeakWrite(HeapObject* obj, MaybeObjectSlot slot,
201 void RevisitObject(HeapObject* obj);
203 void RecordWriteSlow(HeapObject* obj, HeapObjectSlot slot, Object* value);
204 void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject* value);
208 bool WhiteToGreyAndPush(HeapObject* obj);
213 void MarkBlackAndPush(HeapObject* obj);
215 bool IsCompacting() {
return IsMarking() && is_compacting_; }
217 void NotifyIncompleteScanOfObject(
int unscanned_bytes) {
218 unscanned_bytes_of_large_object_ = unscanned_bytes;
221 void ProcessBlackAllocatedObject(HeapObject* obj);
223 Heap* heap()
const {
return heap_; }
225 IncrementalMarkingJob* incremental_marking_job() {
226 return &incremental_marking_job_;
229 bool black_allocation() {
return black_allocation_; }
231 void StartBlackAllocationForTesting() {
232 if (!black_allocation_) {
233 StartBlackAllocation();
237 MarkCompactCollector::MarkingWorklist* marking_worklist()
const {
238 return marking_worklist_;
244 class Observer :
public AllocationObserver {
246 Observer(IncrementalMarking& incremental_marking, intptr_t step_size)
247 : AllocationObserver(step_size),
248 incremental_marking_(incremental_marking) {}
250 void Step(
int bytes_allocated, Address,
size_t)
override;
253 IncrementalMarking& incremental_marking_;
258 void StartBlackAllocation();
259 void PauseBlackAllocation();
260 void FinishBlackAllocation();
263 bool ShouldRetainMap(Map map,
int age);
268 void ActivateIncrementalWriteBarrier(PagedSpace* space);
269 void ActivateIncrementalWriteBarrier(NewSpace* space);
270 void ActivateIncrementalWriteBarrier();
272 void DeactivateIncrementalWriteBarrierForSpace(PagedSpace* space);
273 void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
274 void DeactivateIncrementalWriteBarrier();
276 template <WorklistToProcess worklist_to_process = WorklistToProcess::kAll>
277 V8_INLINE intptr_t ProcessMarkingWorklist(
278 intptr_t bytes_to_process,
279 ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
281 V8_INLINE
bool IsFixedArrayWithProgressBar(HeapObject*
object);
284 V8_INLINE
int VisitObject(Map map, HeapObject* obj);
286 void IncrementIdleMarkingDelayCounter();
288 void AdvanceIncrementalMarkingOnAllocation();
290 size_t StepSizeToKeepUpWithAllocations();
291 size_t StepSizeToMakeProgress();
293 void SetState(State s) {
295 heap_->SetIsMarkingFlag(s >= MARKING);
299 MarkCompactCollector::MarkingWorklist*
const marking_worklist_;
300 WeakObjects* weak_objects_;
302 double start_time_ms_;
303 size_t initial_old_generation_size_;
304 size_t old_generation_allocation_counter_;
305 size_t bytes_allocated_;
306 size_t bytes_marked_ahead_of_schedule_;
310 size_t bytes_marked_concurrently_;
311 size_t unscanned_bytes_of_large_object_;
319 bool black_allocation_;
320 bool finalize_marking_completed_;
321 bool trace_wrappers_toggle_;
322 IncrementalMarkingJob incremental_marking_job_;
324 GCRequestType request_type_;
326 Observer new_generation_observer_;
327 Observer old_generation_observer_;
329 MarkingState marking_state_;
330 AtomicMarkingState atomic_marking_state_;
331 NonAtomicMarkingState non_atomic_marking_state_;
333 DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
338 #endif // V8_HEAP_INCREMENTAL_MARKING_H_