5 #ifndef V8_HEAP_SCAVENGER_INL_H_ 6 #define V8_HEAP_SCAVENGER_INL_H_ 8 #include "src/heap/scavenger.h" 10 #include "src/heap/incremental-marking-inl.h" 11 #include "src/heap/local-allocator-inl.h" 12 #include "src/objects-inl.h" 13 #include "src/objects/map.h" 14 #include "src/objects/slots-inl.h" 19 void Scavenger::PromotionList::View::PushRegularObject(HeapObject*
object,
21 promotion_list_->PushRegularObject(task_id_,
object, size);
24 void Scavenger::PromotionList::View::PushLargeObject(HeapObject*
object,
26 promotion_list_->PushLargeObject(task_id_,
object, map, size);
29 bool Scavenger::PromotionList::View::IsEmpty() {
30 return promotion_list_->IsEmpty();
33 size_t Scavenger::PromotionList::View::LocalPushSegmentSize() {
34 return promotion_list_->LocalPushSegmentSize(task_id_);
37 bool Scavenger::PromotionList::View::Pop(
struct PromotionListEntry* entry) {
38 return promotion_list_->Pop(task_id_, entry);
41 bool Scavenger::PromotionList::View::IsGlobalPoolEmpty() {
42 return promotion_list_->IsGlobalPoolEmpty();
45 bool Scavenger::PromotionList::View::ShouldEagerlyProcessPromotionList() {
46 return promotion_list_->ShouldEagerlyProcessPromotionList(task_id_);
49 void Scavenger::PromotionList::PushRegularObject(
int task_id,
50 HeapObject*
object,
int size) {
51 regular_object_promotion_list_.Push(task_id, ObjectAndSize(
object, size));
54 void Scavenger::PromotionList::PushLargeObject(
int task_id, HeapObject*
object,
56 large_object_promotion_list_.Push(task_id, {object, map, size});
59 bool Scavenger::PromotionList::IsEmpty() {
60 return regular_object_promotion_list_.IsEmpty() &&
61 large_object_promotion_list_.IsEmpty();
64 size_t Scavenger::PromotionList::LocalPushSegmentSize(
int task_id) {
65 return regular_object_promotion_list_.LocalPushSegmentSize(task_id) +
66 large_object_promotion_list_.LocalPushSegmentSize(task_id);
69 bool Scavenger::PromotionList::Pop(
int task_id,
70 struct PromotionListEntry* entry) {
71 ObjectAndSize regular_object;
72 if (regular_object_promotion_list_.Pop(task_id, ®ular_object)) {
73 entry->heap_object = regular_object.first;
74 entry->size = regular_object.second;
75 entry->map = entry->heap_object->map();
78 return large_object_promotion_list_.Pop(task_id, entry);
81 bool Scavenger::PromotionList::IsGlobalPoolEmpty() {
82 return regular_object_promotion_list_.IsGlobalPoolEmpty() &&
83 large_object_promotion_list_.IsGlobalPoolEmpty();
86 bool Scavenger::PromotionList::ShouldEagerlyProcessPromotionList(
int task_id) {
89 const int kProcessPromotionListThreshold =
90 kRegularObjectPromotionListSegmentSize / 2;
91 return LocalPushSegmentSize(task_id) < kProcessPromotionListThreshold;
95 bool Scavenger::ContainsOnlyData(VisitorId visitor_id) {
97 case kVisitSeqOneByteString:
99 case kVisitSeqTwoByteString:
101 case kVisitByteArray:
103 case kVisitFixedDoubleArray:
105 case kVisitDataObject:
113 void Scavenger::PageMemoryFence(MaybeObject
object) {
114 #ifdef THREAD_SANITIZER 117 HeapObject* heap_object;
118 if (object->GetHeapObject(&heap_object)) {
119 MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address());
120 CHECK_NOT_NULL(chunk->synchronized_heap());
125 bool Scavenger::MigrateObject(Map map, HeapObject* source, HeapObject* target,
128 target->set_map_word(MapWord::FromMap(map));
129 heap()->CopyBlock(target->address() + kPointerSize,
130 source->address() + kPointerSize, size - kPointerSize);
132 ObjectPtr old = source->map_slot().Release_CompareAndSwap(
133 map, MapWord::FromForwardingAddress(target).ToMap());
139 if (V8_UNLIKELY(is_logging_)) {
140 heap()->OnMoveEvent(target, source, size);
143 if (is_incremental_marking_) {
144 heap()->incremental_marking()->TransferColor(source, target);
146 heap()->UpdateAllocationSite(map, source, &local_pretenuring_feedback_);
150 CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
154 DCHECK(heap()->AllowedToBeMigrated(
object, NEW_SPACE));
155 AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
156 AllocationResult allocation =
157 allocator_.Allocate(NEW_SPACE, object_size, alignment);
159 HeapObject* target =
nullptr;
160 if (allocation.To(&target)) {
161 DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
163 const bool self_success = MigrateObject(map,
object, target, object_size);
165 allocator_.FreeLast(NEW_SPACE, target, object_size);
166 MapWord map_word =
object->synchronized_map_word();
167 HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
168 DCHECK(!Heap::InFromSpace(*slot));
169 return Heap::InToSpace(*slot)
170 ? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
171 : CopyAndForwardResult::SUCCESS_OLD_GENERATION;
173 HeapObjectReference::Update(slot, target);
175 copied_list_.Push(ObjectAndSize(target, object_size));
176 copied_size_ += object_size;
177 return CopyAndForwardResult::SUCCESS_YOUNG_GENERATION;
179 return CopyAndForwardResult::FAILURE;
182 CopyAndForwardResult Scavenger::PromoteObject(Map map, HeapObjectSlot slot,
185 AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
186 AllocationResult allocation =
187 allocator_.Allocate(OLD_SPACE, object_size, alignment);
189 HeapObject* target =
nullptr;
190 if (allocation.To(&target)) {
191 DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
193 const bool self_success = MigrateObject(map,
object, target, object_size);
195 allocator_.FreeLast(OLD_SPACE, target, object_size);
196 MapWord map_word =
object->synchronized_map_word();
197 HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
198 DCHECK(!Heap::InFromSpace(*slot));
199 return Heap::InToSpace(*slot)
200 ? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
201 : CopyAndForwardResult::SUCCESS_OLD_GENERATION;
203 HeapObjectReference::Update(slot, target);
204 if (!ContainsOnlyData(map->visitor_id())) {
205 promotion_list_.PushRegularObject(target, object_size);
207 promoted_size_ += object_size;
208 return CopyAndForwardResult::SUCCESS_OLD_GENERATION;
210 return CopyAndForwardResult::FAILURE;
213 SlotCallbackResult Scavenger::RememberedSetEntryNeeded(
214 CopyAndForwardResult result) {
215 DCHECK_NE(CopyAndForwardResult::FAILURE, result);
216 return result == CopyAndForwardResult::SUCCESS_YOUNG_GENERATION ? KEEP_SLOT
220 bool Scavenger::HandleLargeObject(Map map, HeapObject*
object,
225 FLAG_young_generation_large_objects &&
226 MemoryChunk::FromHeapObject(
object)->IsInNewLargeObjectSpace())) {
227 DCHECK_EQ(NEW_LO_SPACE,
228 MemoryChunk::FromHeapObject(
object)->owner()->identity());
229 if (object->map_slot().Release_CompareAndSwap(
230 map, MapWord::FromForwardingAddress(
object).ToMap()) == map) {
231 surviving_new_large_objects_.insert({object, map});
233 if (!ContainsOnlyData(map->visitor_id())) {
234 promotion_list_.PushLargeObject(
object, map, object_size);
242 SlotCallbackResult Scavenger::EvacuateObjectDefault(Map map,
246 SLOW_DCHECK(object->SizeFromMap(map) == object_size);
247 CopyAndForwardResult result;
249 if (HandleLargeObject(map,
object, object_size)) {
253 SLOW_DCHECK(static_cast<size_t>(object_size) <=
254 MemoryChunkLayout::AllocatableMemoryInDataPage());
256 if (!heap()->ShouldBePromoted(object->address())) {
259 result = SemiSpaceCopyObject(map, slot,
object, object_size);
260 if (result != CopyAndForwardResult::FAILURE) {
261 return RememberedSetEntryNeeded(result);
268 result = PromoteObject(map, slot,
object, object_size);
269 if (result != CopyAndForwardResult::FAILURE) {
270 return RememberedSetEntryNeeded(result);
274 result = SemiSpaceCopyObject(map, slot,
object, object_size);
275 if (result != CopyAndForwardResult::FAILURE) {
276 return RememberedSetEntryNeeded(result);
279 heap()->FatalProcessOutOfMemory(
"Scavenger: semi-space copy");
283 SlotCallbackResult Scavenger::EvacuateThinString(Map map, HeapObjectSlot slot,
286 if (!is_incremental_marking_) {
290 String actual =
object->actual();
293 DCHECK(!Heap::InNewSpace(actual));
294 slot.StoreHeapObject(actual);
298 return EvacuateObjectDefault(map, slot,
object, object_size);
301 SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
305 DCHECK(IsShortcutCandidate(map->instance_type()));
306 if (!is_incremental_marking_ &&
307 object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
308 HeapObject* first = HeapObject::cast(object->unchecked_first());
310 slot.StoreHeapObject(first);
312 if (!Heap::InNewSpace(first)) {
313 object->map_slot().Release_Store(
314 MapWord::FromForwardingAddress(first).ToMap());
318 MapWord first_word = first->synchronized_map_word();
319 if (first_word.IsForwardingAddress()) {
320 HeapObject* target = first_word.ToForwardingAddress();
322 slot.StoreHeapObject(target);
323 object->map_slot().Release_Store(
324 MapWord::FromForwardingAddress(target).ToMap());
325 return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT;
327 Map map = first_word.ToMap();
328 SlotCallbackResult result =
329 EvacuateObjectDefault(map, slot, first, first->SizeFromMap(map));
330 object->map_slot().Release_Store(
331 MapWord::FromForwardingAddress(slot.ToHeapObject()).ToMap());
335 return EvacuateObjectDefault(map, slot,
object, object_size);
338 SlotCallbackResult Scavenger::EvacuateObject(HeapObjectSlot slot, Map map,
339 HeapObject* source) {
340 SLOW_DCHECK(Heap::InFromSpace(source));
341 SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
342 int size = source->SizeFromMap(map);
345 switch (map->visitor_id()) {
346 case kVisitThinString:
348 DCHECK(!(*slot)->IsWeak());
349 return EvacuateThinString(map, slot, ThinString::unchecked_cast(source),
351 case kVisitShortcutCandidate:
352 DCHECK(!(*slot)->IsWeak());
354 return EvacuateShortcutCandidate(
355 map, slot, ConsString::unchecked_cast(source), size);
357 return EvacuateObjectDefault(map, slot, source, size);
361 SlotCallbackResult Scavenger::ScavengeObject(HeapObjectSlot p,
362 HeapObject*
object) {
363 DCHECK(Heap::InFromSpace(
object));
366 MapWord first_word =
object->synchronized_map_word();
370 if (first_word.IsForwardingAddress()) {
371 HeapObject* dest = first_word.ToForwardingAddress();
372 DCHECK(Heap::InFromSpace(*p));
373 if ((*p)->IsWeak()) {
374 p.store(HeapObjectReference::Weak(dest));
376 DCHECK((*p)->IsStrong());
377 p.store(HeapObjectReference::Strong(dest));
379 DCHECK_IMPLIES(Heap::InNewSpace(dest),
380 (Heap::InToSpace(dest) ||
381 MemoryChunk::FromHeapObject(dest)->owner()->identity() ==
384 return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT;
387 Map map = first_word.ToMap();
389 DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
391 return EvacuateObject(p, map,
object);
394 SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
395 MaybeObjectSlot slot) {
396 MaybeObject
object = *slot;
397 if (Heap::InFromSpace(
object)) {
398 HeapObject* heap_object =
object->GetHeapObject();
399 DCHECK(heap_object->IsHeapObject());
401 SlotCallbackResult result =
402 ScavengeObject(HeapObjectSlot(slot), heap_object);
403 DCHECK_IMPLIES(result == REMOVE_SLOT,
404 !heap->IsInYoungGeneration((*slot)->GetHeapObject()));
406 }
else if (Heap::InToSpace(
object)) {
416 void ScavengeVisitor::VisitPointers(HeapObject* host, ObjectSlot start,
418 for (ObjectSlot p = start; p < end; ++p) {
420 if (!Heap::InNewSpace(
object))
continue;
421 scavenger_->ScavengeObject(HeapObjectSlot(p),
422 reinterpret_cast<HeapObject*>(
object));
426 void ScavengeVisitor::VisitPointers(HeapObject* host, MaybeObjectSlot start,
427 MaybeObjectSlot end) {
428 for (MaybeObjectSlot p = start; p < end; ++p) {
429 MaybeObject
object = *p;
430 if (!Heap::InNewSpace(
object))
continue;
432 HeapObject* heap_object;
433 if (object->GetHeapObject(&heap_object)) {
434 scavenger_->ScavengeObject(HeapObjectSlot(p), heap_object);
444 #endif // V8_HEAP_SCAVENGER_INL_H_