5 #ifndef V8_HEAP_SPACES_INL_H_ 6 #define V8_HEAP_SPACES_INL_H_ 8 #include "src/base/atomic-utils.h" 9 #include "src/base/bounded-page-allocator.h" 10 #include "src/base/v8-fallthrough.h" 11 #include "src/heap/incremental-marking.h" 12 #include "src/heap/spaces.h" 14 #include "src/objects/code-inl.h" 19 template <
class PAGE_TYPE>
20 PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
25 template <
class PAGE_TYPE>
26 PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(
int) {
27 PageIteratorImpl<PAGE_TYPE> tmp(*
this);
32 PageRange::PageRange(Address start, Address limit)
33 : begin_(Page::FromAddress(start)),
34 end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
36 if (begin_->InNewSpace()) {
37 SemiSpace::AssertValidRange(start, limit);
45 HeapObject* SemiSpaceIterator::Next() {
46 while (current_ != limit_) {
47 if (Page::IsAlignedToPageSize(current_)) {
48 Page* page = Page::FromAllocationAreaAddress(current_);
49 page = page->next_page();
51 current_ = page->area_start();
52 if (current_ == limit_)
return nullptr;
54 HeapObject*
object = HeapObject::FromAddress(current_);
55 current_ +=
object->Size();
56 if (!object->IsFiller()) {
66 HeapObject* HeapObjectIterator::Next() {
68 HeapObject* next_obj = FromCurrentPage();
69 if (next_obj !=
nullptr)
return next_obj;
70 }
while (AdvanceToNextPage());
74 HeapObject* HeapObjectIterator::FromCurrentPage() {
75 while (cur_addr_ != cur_end_) {
76 if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
77 cur_addr_ = space_->limit();
80 HeapObject* obj = HeapObject::FromAddress(cur_addr_);
81 const int obj_size = obj->Size();
82 cur_addr_ += obj_size;
83 DCHECK_LE(cur_addr_, cur_end_);
84 if (!obj->IsFiller()) {
86 DCHECK_EQ(space_, space_->heap()->code_space());
87 DCHECK_CODEOBJECT_SIZE(obj_size, space_);
89 DCHECK_OBJECT_SIZE(obj_size);
97 void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
99 base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
100 heap()->IncrementExternalBackingStoreBytes(type, amount);
103 void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
105 base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
106 heap()->DecrementExternalBackingStoreBytes(type, amount);
109 void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
110 Space* from, Space* to,
112 if (from == to)
return;
114 base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
115 base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
121 bool SemiSpace::Contains(HeapObject* o) {
122 return id_ == kToSpace
123 ? MemoryChunk::FromAddress(o->address())->InToSpace()
124 : MemoryChunk::FromAddress(o->address())->InFromSpace();
127 bool SemiSpace::Contains(Object* o) {
128 return o->IsHeapObject() && Contains(HeapObject::cast(o));
131 bool SemiSpace::ContainsSlow(Address a) {
132 for (Page* p : *
this) {
133 if (p == MemoryChunk::FromAddress(a))
return true;
141 bool NewSpace::Contains(HeapObject* o) {
142 return MemoryChunk::FromAddress(o->address())->InNewSpace();
145 bool NewSpace::Contains(Object* o) {
146 return o->IsHeapObject() && Contains(HeapObject::cast(o));
149 bool NewSpace::ContainsSlow(Address a) {
150 return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
153 bool NewSpace::ToSpaceContainsSlow(Address a) {
154 return to_space_.ContainsSlow(a);
157 bool NewSpace::ToSpaceContains(Object* o) {
return to_space_.Contains(o); }
158 bool NewSpace::FromSpaceContains(Object* o) {
return from_space_.Contains(o); }
160 bool PagedSpace::Contains(Address addr) {
161 if (heap()->IsWithinLargeObject(addr))
return false;
162 return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() ==
this;
165 bool PagedSpace::Contains(Object* o) {
166 if (!o->IsHeapObject())
return false;
167 return Page::FromAddress(HeapObject::cast(o)->address())->owner() ==
this;
170 void PagedSpace::UnlinkFreeListCategories(Page* page) {
171 DCHECK_EQ(
this, page->owner());
172 page->ForAllFreeListCategories([
this](FreeListCategory* category) {
173 DCHECK_EQ(free_list(), category->owner());
174 category->set_free_list(
nullptr);
175 free_list()->RemoveCategory(category);
179 size_t PagedSpace::RelinkFreeListCategories(Page* page) {
180 DCHECK_EQ(
this, page->owner());
182 page->ForAllFreeListCategories([
this, &added](FreeListCategory* category) {
183 category->set_free_list(&free_list_);
184 added += category->available();
187 DCHECK_EQ(page->AvailableInFreeList(),
188 page->AvailableInFreeListFromAllocatedBytes());
192 bool PagedSpace::TryFreeLast(HeapObject*
object,
int object_size) {
193 if (allocation_info_.top() != kNullAddress) {
194 const Address object_address =
object->address();
195 if ((allocation_info_.top() - object_size) == object_address) {
196 allocation_info_.set_top(object_address);
203 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) {
204 MemoryChunk* chunk = heap->lo_space()->FindPage(addr);
205 if (chunk ==
nullptr) {
206 chunk = MemoryChunk::FromAddress(addr);
211 void MemoryChunk::IncrementExternalBackingStoreBytes(
212 ExternalBackingStoreType type,
size_t amount) {
213 base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
214 owner()->IncrementExternalBackingStoreBytes(type, amount);
217 void MemoryChunk::DecrementExternalBackingStoreBytes(
218 ExternalBackingStoreType type,
size_t amount) {
219 base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
220 owner()->DecrementExternalBackingStoreBytes(type, amount);
223 void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
227 base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
228 base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
229 Space::MoveExternalBackingStoreBytes(type, from->owner(), to->owner(),
233 bool MemoryChunk::IsInNewLargeObjectSpace()
const {
234 return owner()->identity() == NEW_LO_SPACE;
237 void Page::MarkNeverAllocateForTesting() {
238 DCHECK(this->owner()->identity() != NEW_SPACE);
239 DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
240 SetFlag(NEVER_ALLOCATE_ON_PAGE);
241 SetFlag(NEVER_EVACUATE);
242 reinterpret_cast<PagedSpace*
>(owner())->free_list()->EvictFreeListItems(
this);
245 void Page::MarkEvacuationCandidate() {
246 DCHECK(!IsFlagSet(NEVER_EVACUATE));
247 DCHECK_NULL(slot_set<OLD_TO_OLD>());
248 DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
249 SetFlag(EVACUATION_CANDIDATE);
250 reinterpret_cast<PagedSpace*
>(owner())->free_list()->EvictFreeListItems(
this);
253 void Page::ClearEvacuationCandidate() {
254 if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
255 DCHECK_NULL(slot_set<OLD_TO_OLD>());
256 DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
258 ClearFlag(EVACUATION_CANDIDATE);
259 InitializeFreeListCategories();
262 OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
264 state_(kOldSpaceState),
265 old_iterator_(heap->old_space()->begin()),
266 code_iterator_(heap->code_space()->begin()),
267 map_iterator_(heap->map_space()->begin()),
268 lo_iterator_(heap->lo_space()->begin()),
269 code_lo_iterator_(heap->code_lo_space()->begin()) {}
271 MemoryChunk* OldGenerationMemoryChunkIterator::next() {
273 case kOldSpaceState: {
274 if (old_iterator_ != heap_->old_space()->end())
return *(old_iterator_++);
279 if (map_iterator_ != heap_->map_space()->end())
return *(map_iterator_++);
284 if (code_iterator_ != heap_->code_space()->end())
285 return *(code_iterator_++);
286 state_ = kLargeObjectState;
289 case kLargeObjectState: {
290 if (lo_iterator_ != heap_->lo_space()->end())
return *(lo_iterator_++);
291 state_ = kCodeLargeObjectState;
294 case kCodeLargeObjectState: {
295 if (code_lo_iterator_ != heap_->code_lo_space()->end())
296 return *(code_lo_iterator_++);
297 state_ = kFinishedState;
308 Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
309 return top(type) ? top(type)->page() :
nullptr;
312 FreeList* FreeListCategory::owner() {
return free_list_; }
314 bool FreeListCategory::is_linked() {
315 return prev_ !=
nullptr || next_ !=
nullptr;
318 AllocationResult LocalAllocationBuffer::AllocateRawAligned(
319 int size_in_bytes, AllocationAlignment alignment) {
320 Address current_top = allocation_info_.top();
321 int filler_size = Heap::GetFillToAlign(current_top, alignment);
323 Address new_top = current_top + filler_size + size_in_bytes;
324 if (new_top > allocation_info_.limit())
return AllocationResult::Retry();
326 allocation_info_.set_top(new_top);
327 if (filler_size > 0) {
328 return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
332 return AllocationResult(HeapObject::FromAddress(current_top));
335 bool PagedSpace::EnsureLinearAllocationArea(
int size_in_bytes) {
336 if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
339 return SlowRefillLinearAllocationArea(size_in_bytes);
342 HeapObject* PagedSpace::AllocateLinearly(
int size_in_bytes) {
343 Address current_top = allocation_info_.top();
344 Address new_top = current_top + size_in_bytes;
345 DCHECK_LE(new_top, allocation_info_.limit());
346 allocation_info_.set_top(new_top);
347 return HeapObject::FromAddress(current_top);
350 HeapObject* PagedSpace::TryAllocateLinearlyAligned(
351 int* size_in_bytes, AllocationAlignment alignment) {
352 Address current_top = allocation_info_.top();
353 int filler_size = Heap::GetFillToAlign(current_top, alignment);
355 Address new_top = current_top + filler_size + *size_in_bytes;
356 if (new_top > allocation_info_.limit())
return nullptr;
358 allocation_info_.set_top(new_top);
359 if (filler_size > 0) {
360 *size_in_bytes += filler_size;
361 return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
365 return HeapObject::FromAddress(current_top);
368 AllocationResult PagedSpace::AllocateRawUnaligned(
369 int size_in_bytes, UpdateSkipList update_skip_list) {
370 DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
371 if (!EnsureLinearAllocationArea(size_in_bytes)) {
372 return AllocationResult::Retry(identity());
374 HeapObject*
object = AllocateLinearly(size_in_bytes);
375 DCHECK_NOT_NULL(
object);
376 if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
377 SkipList::Update(object->address(), size_in_bytes);
379 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
384 AllocationResult PagedSpace::AllocateRawAligned(
int size_in_bytes,
385 AllocationAlignment alignment) {
386 DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
387 DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
388 int allocation_size = size_in_bytes;
389 HeapObject*
object = TryAllocateLinearlyAligned(&allocation_size, alignment);
390 if (
object ==
nullptr) {
393 int filler_size = Heap::GetMaximumFillToAlign(alignment);
394 allocation_size += filler_size;
395 if (!EnsureLinearAllocationArea(allocation_size)) {
396 return AllocationResult::Retry(identity());
398 allocation_size = size_in_bytes;
399 object = TryAllocateLinearlyAligned(&allocation_size, alignment);
400 DCHECK_NOT_NULL(
object);
402 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
407 AllocationResult PagedSpace::AllocateRaw(
int size_in_bytes,
408 AllocationAlignment alignment) {
409 if (top_on_previous_step_ && top() < top_on_previous_step_ &&
410 SupportsInlineAllocation()) {
413 DCHECK_NE(top(), kNullAddress);
414 DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
415 Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
416 top_on_previous_step_ = top();
418 size_t bytes_since_last =
419 top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
421 DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
422 #ifdef V8_HOST_ARCH_32_BIT 423 AllocationResult result =
424 alignment == kDoubleAligned
425 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
426 : AllocateRawUnaligned(size_in_bytes);
428 AllocationResult result = AllocateRawUnaligned(size_in_bytes);
430 HeapObject* heap_obj =
nullptr;
431 if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
433 heap()->incremental_marking()->black_allocation(),
434 heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
435 AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
436 heap_obj->address(), size_in_bytes);
437 StartNextInlineAllocationStep();
447 AllocationResult NewSpace::AllocateRawAligned(
int size_in_bytes,
448 AllocationAlignment alignment) {
449 Address top = allocation_info_.top();
450 int filler_size = Heap::GetFillToAlign(top, alignment);
451 int aligned_size_in_bytes = size_in_bytes + filler_size;
453 if (allocation_info_.limit() - top <
454 static_cast<uintptr_t>(aligned_size_in_bytes)) {
456 if (!EnsureAllocation(size_in_bytes, alignment)) {
457 return AllocationResult::Retry();
460 top = allocation_info_.top();
461 filler_size = Heap::GetFillToAlign(top, alignment);
462 aligned_size_in_bytes = size_in_bytes + filler_size;
465 HeapObject* obj = HeapObject::FromAddress(top);
466 allocation_info_.set_top(top + aligned_size_in_bytes);
467 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
469 if (filler_size > 0) {
470 obj = heap()->PrecedeWithFiller(obj, filler_size);
473 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
479 AllocationResult NewSpace::AllocateRawUnaligned(
int size_in_bytes) {
480 Address top = allocation_info_.top();
481 if (allocation_info_.limit() < top + size_in_bytes) {
483 if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
484 return AllocationResult::Retry();
487 top = allocation_info_.top();
490 HeapObject* obj = HeapObject::FromAddress(top);
491 allocation_info_.set_top(top + size_in_bytes);
492 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
494 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
500 AllocationResult NewSpace::AllocateRaw(
int size_in_bytes,
501 AllocationAlignment alignment) {
502 if (top() < top_on_previous_step_) {
504 DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
505 Page::FromAllocationAreaAddress(top_on_previous_step_));
506 top_on_previous_step_ = top();
508 #ifdef V8_HOST_ARCH_32_BIT 509 return alignment == kDoubleAligned
510 ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
511 : AllocateRawUnaligned(size_in_bytes);
513 return AllocateRawUnaligned(size_in_bytes);
517 V8_WARN_UNUSED_RESULT
inline AllocationResult NewSpace::AllocateRawSynchronized(
518 int size_in_bytes, AllocationAlignment alignment) {
519 base::MutexGuard guard(&mutex_);
520 return AllocateRaw(size_in_bytes, alignment);
523 LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
524 AllocationResult result,
526 if (result.IsRetry())
return InvalidBuffer();
527 HeapObject* obj =
nullptr;
528 bool ok = result.To(&obj);
531 Address top = HeapObject::cast(obj)->address();
532 return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
536 bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
537 if (allocation_info_.top() == other->allocation_info_.limit()) {
538 allocation_info_.set_top(other->allocation_info_.top());
539 other->allocation_info_.Reset(kNullAddress, kNullAddress);
545 bool LocalAllocationBuffer::TryFreeLast(HeapObject*
object,
int object_size) {
547 const Address object_address =
object->address();
548 if ((allocation_info_.top() - object_size) == object_address) {
549 allocation_info_.set_top(object_address);
559 #endif // V8_HEAP_SPACES_INL_H_