V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
spaces-inl.h
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_
7 
8 #include "src/base/atomic-utils.h"
9 #include "src/base/bounded-page-allocator.h"
10 #include "src/base/v8-fallthrough.h"
11 #include "src/heap/incremental-marking.h"
12 #include "src/heap/spaces.h"
13 #include "src/msan.h"
14 #include "src/objects/code-inl.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 template <class PAGE_TYPE>
20 PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
21  p_ = p_->next_page();
22  return *this;
23 }
24 
25 template <class PAGE_TYPE>
26 PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
27  PageIteratorImpl<PAGE_TYPE> tmp(*this);
28  operator++();
29  return tmp;
30 }
31 
32 PageRange::PageRange(Address start, Address limit)
33  : begin_(Page::FromAddress(start)),
34  end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
35 #ifdef DEBUG
36  if (begin_->InNewSpace()) {
37  SemiSpace::AssertValidRange(start, limit);
38  }
39 #endif // DEBUG
40 }
41 
42 // -----------------------------------------------------------------------------
43 // SemiSpaceIterator
44 
45 HeapObject* SemiSpaceIterator::Next() {
46  while (current_ != limit_) {
47  if (Page::IsAlignedToPageSize(current_)) {
48  Page* page = Page::FromAllocationAreaAddress(current_);
49  page = page->next_page();
50  DCHECK(page);
51  current_ = page->area_start();
52  if (current_ == limit_) return nullptr;
53  }
54  HeapObject* object = HeapObject::FromAddress(current_);
55  current_ += object->Size();
56  if (!object->IsFiller()) {
57  return object;
58  }
59  }
60  return nullptr;
61 }
62 
63 // -----------------------------------------------------------------------------
64 // HeapObjectIterator
65 
66 HeapObject* HeapObjectIterator::Next() {
67  do {
68  HeapObject* next_obj = FromCurrentPage();
69  if (next_obj != nullptr) return next_obj;
70  } while (AdvanceToNextPage());
71  return nullptr;
72 }
73 
74 HeapObject* HeapObjectIterator::FromCurrentPage() {
75  while (cur_addr_ != cur_end_) {
76  if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
77  cur_addr_ = space_->limit();
78  continue;
79  }
80  HeapObject* obj = HeapObject::FromAddress(cur_addr_);
81  const int obj_size = obj->Size();
82  cur_addr_ += obj_size;
83  DCHECK_LE(cur_addr_, cur_end_);
84  if (!obj->IsFiller()) {
85  if (obj->IsCode()) {
86  DCHECK_EQ(space_, space_->heap()->code_space());
87  DCHECK_CODEOBJECT_SIZE(obj_size, space_);
88  } else {
89  DCHECK_OBJECT_SIZE(obj_size);
90  }
91  return obj;
92  }
93  }
94  return nullptr;
95 }
96 
97 void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
98  size_t amount) {
99  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
100  heap()->IncrementExternalBackingStoreBytes(type, amount);
101 }
102 
103 void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
104  size_t amount) {
105  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
106  heap()->DecrementExternalBackingStoreBytes(type, amount);
107 }
108 
109 void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
110  Space* from, Space* to,
111  size_t amount) {
112  if (from == to) return;
113 
114  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
115  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
116 }
117 
118 // -----------------------------------------------------------------------------
119 // SemiSpace
120 
121 bool SemiSpace::Contains(HeapObject* o) {
122  return id_ == kToSpace
123  ? MemoryChunk::FromAddress(o->address())->InToSpace()
124  : MemoryChunk::FromAddress(o->address())->InFromSpace();
125 }
126 
127 bool SemiSpace::Contains(Object* o) {
128  return o->IsHeapObject() && Contains(HeapObject::cast(o));
129 }
130 
131 bool SemiSpace::ContainsSlow(Address a) {
132  for (Page* p : *this) {
133  if (p == MemoryChunk::FromAddress(a)) return true;
134  }
135  return false;
136 }
137 
138 // --------------------------------------------------------------------------
139 // NewSpace
140 
141 bool NewSpace::Contains(HeapObject* o) {
142  return MemoryChunk::FromAddress(o->address())->InNewSpace();
143 }
144 
145 bool NewSpace::Contains(Object* o) {
146  return o->IsHeapObject() && Contains(HeapObject::cast(o));
147 }
148 
149 bool NewSpace::ContainsSlow(Address a) {
150  return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
151 }
152 
153 bool NewSpace::ToSpaceContainsSlow(Address a) {
154  return to_space_.ContainsSlow(a);
155 }
156 
157 bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
158 bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
159 
160 bool PagedSpace::Contains(Address addr) {
161  if (heap()->IsWithinLargeObject(addr)) return false;
162  return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
163 }
164 
165 bool PagedSpace::Contains(Object* o) {
166  if (!o->IsHeapObject()) return false;
167  return Page::FromAddress(HeapObject::cast(o)->address())->owner() == this;
168 }
169 
170 void PagedSpace::UnlinkFreeListCategories(Page* page) {
171  DCHECK_EQ(this, page->owner());
172  page->ForAllFreeListCategories([this](FreeListCategory* category) {
173  DCHECK_EQ(free_list(), category->owner());
174  category->set_free_list(nullptr);
175  free_list()->RemoveCategory(category);
176  });
177 }
178 
179 size_t PagedSpace::RelinkFreeListCategories(Page* page) {
180  DCHECK_EQ(this, page->owner());
181  size_t added = 0;
182  page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
183  category->set_free_list(&free_list_);
184  added += category->available();
185  category->Relink();
186  });
187  DCHECK_EQ(page->AvailableInFreeList(),
188  page->AvailableInFreeListFromAllocatedBytes());
189  return added;
190 }
191 
192 bool PagedSpace::TryFreeLast(HeapObject* object, int object_size) {
193  if (allocation_info_.top() != kNullAddress) {
194  const Address object_address = object->address();
195  if ((allocation_info_.top() - object_size) == object_address) {
196  allocation_info_.set_top(object_address);
197  return true;
198  }
199  }
200  return false;
201 }
202 
203 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) {
204  MemoryChunk* chunk = heap->lo_space()->FindPage(addr);
205  if (chunk == nullptr) {
206  chunk = MemoryChunk::FromAddress(addr);
207  }
208  return chunk;
209 }
210 
211 void MemoryChunk::IncrementExternalBackingStoreBytes(
212  ExternalBackingStoreType type, size_t amount) {
213  base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
214  owner()->IncrementExternalBackingStoreBytes(type, amount);
215 }
216 
217 void MemoryChunk::DecrementExternalBackingStoreBytes(
218  ExternalBackingStoreType type, size_t amount) {
219  base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
220  owner()->DecrementExternalBackingStoreBytes(type, amount);
221 }
222 
223 void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
224  MemoryChunk* from,
225  MemoryChunk* to,
226  size_t amount) {
227  base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
228  base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
229  Space::MoveExternalBackingStoreBytes(type, from->owner(), to->owner(),
230  amount);
231 }
232 
233 bool MemoryChunk::IsInNewLargeObjectSpace() const {
234  return owner()->identity() == NEW_LO_SPACE;
235 }
236 
237 void Page::MarkNeverAllocateForTesting() {
238  DCHECK(this->owner()->identity() != NEW_SPACE);
239  DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
240  SetFlag(NEVER_ALLOCATE_ON_PAGE);
241  SetFlag(NEVER_EVACUATE);
242  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
243 }
244 
245 void Page::MarkEvacuationCandidate() {
246  DCHECK(!IsFlagSet(NEVER_EVACUATE));
247  DCHECK_NULL(slot_set<OLD_TO_OLD>());
248  DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
249  SetFlag(EVACUATION_CANDIDATE);
250  reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
251 }
252 
253 void Page::ClearEvacuationCandidate() {
254  if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
255  DCHECK_NULL(slot_set<OLD_TO_OLD>());
256  DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
257  }
258  ClearFlag(EVACUATION_CANDIDATE);
259  InitializeFreeListCategories();
260 }
261 
262 OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
263  : heap_(heap),
264  state_(kOldSpaceState),
265  old_iterator_(heap->old_space()->begin()),
266  code_iterator_(heap->code_space()->begin()),
267  map_iterator_(heap->map_space()->begin()),
268  lo_iterator_(heap->lo_space()->begin()),
269  code_lo_iterator_(heap->code_lo_space()->begin()) {}
270 
271 MemoryChunk* OldGenerationMemoryChunkIterator::next() {
272  switch (state_) {
273  case kOldSpaceState: {
274  if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
275  state_ = kMapState;
276  V8_FALLTHROUGH;
277  }
278  case kMapState: {
279  if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
280  state_ = kCodeState;
281  V8_FALLTHROUGH;
282  }
283  case kCodeState: {
284  if (code_iterator_ != heap_->code_space()->end())
285  return *(code_iterator_++);
286  state_ = kLargeObjectState;
287  V8_FALLTHROUGH;
288  }
289  case kLargeObjectState: {
290  if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
291  state_ = kCodeLargeObjectState;
292  V8_FALLTHROUGH;
293  }
294  case kCodeLargeObjectState: {
295  if (code_lo_iterator_ != heap_->code_lo_space()->end())
296  return *(code_lo_iterator_++);
297  state_ = kFinishedState;
298  V8_FALLTHROUGH;
299  }
300  case kFinishedState:
301  return nullptr;
302  default:
303  break;
304  }
305  UNREACHABLE();
306 }
307 
308 Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
309  return top(type) ? top(type)->page() : nullptr;
310 }
311 
312 FreeList* FreeListCategory::owner() { return free_list_; }
313 
314 bool FreeListCategory::is_linked() {
315  return prev_ != nullptr || next_ != nullptr;
316 }
317 
318 AllocationResult LocalAllocationBuffer::AllocateRawAligned(
319  int size_in_bytes, AllocationAlignment alignment) {
320  Address current_top = allocation_info_.top();
321  int filler_size = Heap::GetFillToAlign(current_top, alignment);
322 
323  Address new_top = current_top + filler_size + size_in_bytes;
324  if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
325 
326  allocation_info_.set_top(new_top);
327  if (filler_size > 0) {
328  return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
329  filler_size);
330  }
331 
332  return AllocationResult(HeapObject::FromAddress(current_top));
333 }
334 
335 bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
336  if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
337  return true;
338  }
339  return SlowRefillLinearAllocationArea(size_in_bytes);
340 }
341 
342 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
343  Address current_top = allocation_info_.top();
344  Address new_top = current_top + size_in_bytes;
345  DCHECK_LE(new_top, allocation_info_.limit());
346  allocation_info_.set_top(new_top);
347  return HeapObject::FromAddress(current_top);
348 }
349 
350 HeapObject* PagedSpace::TryAllocateLinearlyAligned(
351  int* size_in_bytes, AllocationAlignment alignment) {
352  Address current_top = allocation_info_.top();
353  int filler_size = Heap::GetFillToAlign(current_top, alignment);
354 
355  Address new_top = current_top + filler_size + *size_in_bytes;
356  if (new_top > allocation_info_.limit()) return nullptr;
357 
358  allocation_info_.set_top(new_top);
359  if (filler_size > 0) {
360  *size_in_bytes += filler_size;
361  return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
362  filler_size);
363  }
364 
365  return HeapObject::FromAddress(current_top);
366 }
367 
368 AllocationResult PagedSpace::AllocateRawUnaligned(
369  int size_in_bytes, UpdateSkipList update_skip_list) {
370  DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
371  if (!EnsureLinearAllocationArea(size_in_bytes)) {
372  return AllocationResult::Retry(identity());
373  }
374  HeapObject* object = AllocateLinearly(size_in_bytes);
375  DCHECK_NOT_NULL(object);
376  if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
377  SkipList::Update(object->address(), size_in_bytes);
378  }
379  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
380  return object;
381 }
382 
383 
384 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
385  AllocationAlignment alignment) {
386  DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
387  DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
388  int allocation_size = size_in_bytes;
389  HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
390  if (object == nullptr) {
391  // We don't know exactly how much filler we need to align until space is
392  // allocated, so assume the worst case.
393  int filler_size = Heap::GetMaximumFillToAlign(alignment);
394  allocation_size += filler_size;
395  if (!EnsureLinearAllocationArea(allocation_size)) {
396  return AllocationResult::Retry(identity());
397  }
398  allocation_size = size_in_bytes;
399  object = TryAllocateLinearlyAligned(&allocation_size, alignment);
400  DCHECK_NOT_NULL(object);
401  }
402  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
403  return object;
404 }
405 
406 
407 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
408  AllocationAlignment alignment) {
409  if (top_on_previous_step_ && top() < top_on_previous_step_ &&
410  SupportsInlineAllocation()) {
411  // Generated code decreased the top() pointer to do folded allocations.
412  // The top_on_previous_step_ can be one byte beyond the current page.
413  DCHECK_NE(top(), kNullAddress);
414  DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
415  Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
416  top_on_previous_step_ = top();
417  }
418  size_t bytes_since_last =
419  top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
420 
421  DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
422 #ifdef V8_HOST_ARCH_32_BIT
423  AllocationResult result =
424  alignment == kDoubleAligned
425  ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
426  : AllocateRawUnaligned(size_in_bytes);
427 #else
428  AllocationResult result = AllocateRawUnaligned(size_in_bytes);
429 #endif
430  HeapObject* heap_obj = nullptr;
431  if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
432  DCHECK_IMPLIES(
433  heap()->incremental_marking()->black_allocation(),
434  heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
435  AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
436  heap_obj->address(), size_in_bytes);
437  StartNextInlineAllocationStep();
438  }
439  return result;
440 }
441 
442 
443 // -----------------------------------------------------------------------------
444 // NewSpace
445 
446 
447 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
448  AllocationAlignment alignment) {
449  Address top = allocation_info_.top();
450  int filler_size = Heap::GetFillToAlign(top, alignment);
451  int aligned_size_in_bytes = size_in_bytes + filler_size;
452 
453  if (allocation_info_.limit() - top <
454  static_cast<uintptr_t>(aligned_size_in_bytes)) {
455  // See if we can create room.
456  if (!EnsureAllocation(size_in_bytes, alignment)) {
457  return AllocationResult::Retry();
458  }
459 
460  top = allocation_info_.top();
461  filler_size = Heap::GetFillToAlign(top, alignment);
462  aligned_size_in_bytes = size_in_bytes + filler_size;
463  }
464 
465  HeapObject* obj = HeapObject::FromAddress(top);
466  allocation_info_.set_top(top + aligned_size_in_bytes);
467  DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
468 
469  if (filler_size > 0) {
470  obj = heap()->PrecedeWithFiller(obj, filler_size);
471  }
472 
473  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
474 
475  return obj;
476 }
477 
478 
479 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
480  Address top = allocation_info_.top();
481  if (allocation_info_.limit() < top + size_in_bytes) {
482  // See if we can create room.
483  if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
484  return AllocationResult::Retry();
485  }
486 
487  top = allocation_info_.top();
488  }
489 
490  HeapObject* obj = HeapObject::FromAddress(top);
491  allocation_info_.set_top(top + size_in_bytes);
492  DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
493 
494  MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
495 
496  return obj;
497 }
498 
499 
500 AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
501  AllocationAlignment alignment) {
502  if (top() < top_on_previous_step_) {
503  // Generated code decreased the top() pointer to do folded allocations
504  DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
505  Page::FromAllocationAreaAddress(top_on_previous_step_));
506  top_on_previous_step_ = top();
507  }
508 #ifdef V8_HOST_ARCH_32_BIT
509  return alignment == kDoubleAligned
510  ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
511  : AllocateRawUnaligned(size_in_bytes);
512 #else
513  return AllocateRawUnaligned(size_in_bytes);
514 #endif
515 }
516 
517 V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
518  int size_in_bytes, AllocationAlignment alignment) {
519  base::MutexGuard guard(&mutex_);
520  return AllocateRaw(size_in_bytes, alignment);
521 }
522 
523 LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
524  AllocationResult result,
525  intptr_t size) {
526  if (result.IsRetry()) return InvalidBuffer();
527  HeapObject* obj = nullptr;
528  bool ok = result.To(&obj);
529  USE(ok);
530  DCHECK(ok);
531  Address top = HeapObject::cast(obj)->address();
532  return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
533 }
534 
535 
536 bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
537  if (allocation_info_.top() == other->allocation_info_.limit()) {
538  allocation_info_.set_top(other->allocation_info_.top());
539  other->allocation_info_.Reset(kNullAddress, kNullAddress);
540  return true;
541  }
542  return false;
543 }
544 
545 bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) {
546  if (IsValid()) {
547  const Address object_address = object->address();
548  if ((allocation_info_.top() - object_size) == object_address) {
549  allocation_info_.set_top(object_address);
550  return true;
551  }
552  }
553  return false;
554 }
555 
556 } // namespace internal
557 } // namespace v8
558 
559 #endif // V8_HEAP_SPACES_INL_H_
Definition: libplatform.h:13