V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
mark-compact-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 #define V8_HEAP_MARK_COMPACT_INL_H_
7 
8 #include "src/base/bits.h"
9 #include "src/heap/mark-compact.h"
10 #include "src/heap/objects-visiting-inl.h"
11 #include "src/heap/remembered-set.h"
12 #include "src/objects/js-collection-inl.h"
13 #include "src/objects/js-weak-refs-inl.h"
14 #include "src/objects/slots-inl.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 template <typename ConcreteState, AccessMode access_mode>
20 bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(
21  HeapObject* obj) {
22  MemoryChunk* p = MemoryChunk::FromAddress(obj->address());
23  MarkBit markbit = MarkBitFrom(p, obj->address());
24  if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
25  static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj->Size());
26  return true;
27 }
28 
29 template <typename ConcreteState, AccessMode access_mode>
30 bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(
31  HeapObject* obj) {
32  return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
33 }
34 
35 template <typename ConcreteState, AccessMode access_mode>
36 bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack(
37  HeapObject* obj) {
38  return WhiteToGrey(obj) && GreyToBlack(obj);
39 }
40 
41 template <FixedArrayVisitationMode fixed_array_mode,
42  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
43 MarkingVisitor<fixed_array_mode, retaining_path_mode,
44  MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
45  MarkingState* marking_state)
46  : heap_(collector->heap()),
47  collector_(collector),
48  marking_state_(marking_state) {}
49 
50 template <FixedArrayVisitationMode fixed_array_mode,
51  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
52 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
53  MarkingState>::VisitBytecodeArray(Map map,
54  BytecodeArray array) {
55  int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
56  BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
57  array->MakeOlder();
58  return size;
59 }
60 
61 template <FixedArrayVisitationMode fixed_array_mode,
62  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
63 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
64  MarkingState>::VisitFixedArray(Map map, FixedArray object) {
65  return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
66  ? Parent::VisitFixedArray(map, object)
67  : VisitFixedArrayIncremental(map, object);
68 }
69 
70 template <FixedArrayVisitationMode fixed_array_mode,
71  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
72 template <typename T>
73 V8_INLINE int
74 MarkingVisitor<fixed_array_mode, retaining_path_mode,
75  MarkingState>::VisitEmbedderTracingSubclass(Map map, T* object) {
76  if (heap_->local_embedder_heap_tracer()->InUse()) {
77  marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
78  object);
79  }
80  int size = T::BodyDescriptor::SizeOf(map, object);
81  T::BodyDescriptor::IterateBody(map, object, size, this);
82  return size;
83 }
84 
85 template <FixedArrayVisitationMode fixed_array_mode,
86  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
87 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
88  MarkingState>::VisitJSApiObject(Map map, JSObject* object) {
89  return VisitEmbedderTracingSubclass(map, object);
90 }
91 
92 template <FixedArrayVisitationMode fixed_array_mode,
93  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
94 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
95  MarkingState>::VisitJSArrayBuffer(Map map,
96  JSArrayBuffer* object) {
97  return VisitEmbedderTracingSubclass(map, object);
98 }
99 
100 template <FixedArrayVisitationMode fixed_array_mode,
101  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
102 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
103  MarkingState>::VisitJSDataView(Map map, JSDataView* object) {
104  return VisitEmbedderTracingSubclass(map, object);
105 }
106 
107 template <FixedArrayVisitationMode fixed_array_mode,
108  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
109 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
110  MarkingState>::VisitJSTypedArray(Map map,
111  JSTypedArray* object) {
112  return VisitEmbedderTracingSubclass(map, object);
113 }
114 
115 template <FixedArrayVisitationMode fixed_array_mode,
116  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
117 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
118  VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
119  collector_->AddEphemeronHashTable(table);
120 
121  for (int i = 0; i < table->Capacity(); i++) {
122  ObjectSlot key_slot =
123  table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
124  HeapObject* key = HeapObject::cast(table->KeyAt(i));
125  collector_->RecordSlot(table, key_slot, key);
126 
127  ObjectSlot value_slot =
128  table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
129 
130  if (marking_state()->IsBlackOrGrey(key)) {
131  VisitPointer(table, value_slot);
132 
133  } else {
134  Object* value_obj = *value_slot;
135 
136  if (value_obj->IsHeapObject()) {
137  HeapObject* value = HeapObject::cast(value_obj);
138  collector_->RecordSlot(table, value_slot, value);
139 
140  // Revisit ephemerons with both key and value unreachable at end
141  // of concurrent marking cycle.
142  if (marking_state()->IsWhite(value)) {
143  collector_->AddEphemeron(key, value);
144  }
145  }
146  }
147  }
148 
149  return table->SizeFromMap(map);
150 }
151 
152 template <FixedArrayVisitationMode fixed_array_mode,
153  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
154 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
155  MarkingState>::VisitMap(Map map, Map object) {
156  // When map collection is enabled we have to mark through map's transitions
157  // and back pointers in a special way to make these links weak.
158  int size = Map::BodyDescriptor::SizeOf(map, object);
159  if (object->CanTransition()) {
160  MarkMapContents(object);
161  } else {
162  Map::BodyDescriptor::IterateBody(map, object, size, this);
163  }
164  return size;
165 }
166 
167 template <FixedArrayVisitationMode fixed_array_mode,
168  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
169 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
170  MarkingState>::VisitTransitionArray(Map map,
171  TransitionArray* array) {
172  int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
173  TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
174  collector_->AddTransitionArray(array);
175  return size;
176 }
177 
178 template <FixedArrayVisitationMode fixed_array_mode,
179  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
180 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
181  MarkingState>::VisitJSWeakCell(Map map,
182  JSWeakCell* weak_cell) {
183  if (weak_cell->target()->IsHeapObject()) {
184  HeapObject* target = HeapObject::cast(weak_cell->target());
185  if (marking_state()->IsBlackOrGrey(target)) {
186  // Record the slot inside the JSWeakCell, since the IterateBody below
187  // won't visit it.
188  ObjectSlot slot =
189  HeapObject::RawField(weak_cell, JSWeakCell::kTargetOffset);
190  collector_->RecordSlot(weak_cell, slot, target);
191  } else {
192  // JSWeakCell points to a potentially dead object. We have to process
193  // them when we know the liveness of the whole transitive closure.
194  collector_->AddWeakCell(weak_cell);
195  }
196  }
197  int size = JSWeakCell::BodyDescriptor::SizeOf(map, weak_cell);
198  JSWeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
199  return size;
200 }
201 
202 template <FixedArrayVisitationMode fixed_array_mode,
203  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
204 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
205  MarkingState>::VisitPointer(HeapObject* host,
206  ObjectSlot p) {
207  if (!(*p)->IsHeapObject()) return;
208  HeapObject* target_object = HeapObject::cast(*p);
209  collector_->RecordSlot(host, p, target_object);
210  MarkObject(host, target_object);
211 }
212 
213 template <FixedArrayVisitationMode fixed_array_mode,
214  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
215 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
216  MarkingState>::VisitPointer(HeapObject* host,
217  MaybeObjectSlot p) {
218  HeapObject* target_object;
219  if ((*p)->GetHeapObjectIfStrong(&target_object)) {
220  collector_->RecordSlot(host, HeapObjectSlot(p), target_object);
221  MarkObject(host, target_object);
222  } else if ((*p)->GetHeapObjectIfWeak(&target_object)) {
223  if (marking_state()->IsBlackOrGrey(target_object)) {
224  // Weak references with live values are directly processed here to reduce
225  // the processing time of weak cells during the main GC pause.
226  collector_->RecordSlot(host, HeapObjectSlot(p), target_object);
227  } else {
228  // If we do not know about liveness of values of weak cells, we have to
229  // process them when we know the liveness of the whole transitive
230  // closure.
231  collector_->AddWeakReference(host, HeapObjectSlot(p));
232  }
233  }
234 }
235 
236 template <FixedArrayVisitationMode fixed_array_mode,
237  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
238 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
239  MarkingState>::VisitPointers(HeapObject* host,
240  ObjectSlot start,
241  ObjectSlot end) {
242  for (ObjectSlot p = start; p < end; ++p) {
243  VisitPointer(host, p);
244  }
245 }
246 
247 template <FixedArrayVisitationMode fixed_array_mode,
248  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
249 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
250  MarkingState>::VisitPointers(HeapObject* host,
251  MaybeObjectSlot start,
252  MaybeObjectSlot end) {
253  for (MaybeObjectSlot p = start; p < end; ++p) {
254  VisitPointer(host, p);
255  }
256 }
257 
258 template <FixedArrayVisitationMode fixed_array_mode,
259  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
260 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
261  MarkingState>::VisitEmbeddedPointer(Code host,
262  RelocInfo* rinfo) {
263  DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
264  HeapObject* object = HeapObject::cast(rinfo->target_object());
265  collector_->RecordRelocSlot(host, rinfo, object);
266  if (!host->IsWeakObject(object)) {
267  MarkObject(host, object);
268  } else if (!marking_state()->IsBlackOrGrey(object)) {
269  collector_->AddWeakObjectInCode(object, host);
270  }
271 }
272 
273 template <FixedArrayVisitationMode fixed_array_mode,
274  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
275 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
276  MarkingState>::VisitCodeTarget(Code host,
277  RelocInfo* rinfo) {
278  DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
279  Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
280  collector_->RecordRelocSlot(host, rinfo, target);
281  MarkObject(host, target);
282 }
283 
284 template <FixedArrayVisitationMode fixed_array_mode,
285  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
286 bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
287  MarkingState>::MarkObjectWithoutPush(HeapObject* host,
288  HeapObject* object) {
289  if (marking_state()->WhiteToBlack(object)) {
290  if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
291  V8_UNLIKELY(FLAG_track_retaining_path)) {
292  heap_->AddRetainer(host, object);
293  }
294  return true;
295  }
296  return false;
297 }
298 
299 template <FixedArrayVisitationMode fixed_array_mode,
300  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
301 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
302  MarkingState>::MarkObject(HeapObject* host,
303  HeapObject* object) {
304  if (marking_state()->WhiteToGrey(object)) {
305  marking_worklist()->Push(object);
306  if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
307  V8_UNLIKELY(FLAG_track_retaining_path)) {
308  heap_->AddRetainer(host, object);
309  }
310  }
311 }
312 
313 template <FixedArrayVisitationMode fixed_array_mode,
314  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
315 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
316  VisitFixedArrayIncremental(Map map, FixedArray object) {
317  MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
318  int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
319  if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
320  DCHECK(!FLAG_use_marking_progress_bar ||
321  chunk->owner()->identity() == LO_SPACE);
322  // When using a progress bar for large fixed arrays, scan only a chunk of
323  // the array and try to push it onto the marking deque again until it is
324  // fully scanned. Fall back to scanning it through to the end in case this
325  // fails because of a full deque.
326  int start_offset =
327  Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
328  if (start_offset < object_size) {
329  // Ensure that the object is either grey or black before pushing it
330  // into marking worklist.
331  marking_state()->WhiteToGrey(object);
332  if (FLAG_concurrent_marking || FLAG_parallel_marking) {
333  marking_worklist()->PushBailout(object);
334  } else {
335  marking_worklist()->Push(object);
336  }
337  DCHECK(marking_state()->IsGrey(object) ||
338  marking_state()->IsBlack(object));
339 
340  int end_offset =
341  Min(object_size, start_offset + kProgressBarScanningChunk);
342  int already_scanned_offset = start_offset;
343  VisitPointers(object, HeapObject::RawField(object, start_offset),
344  HeapObject::RawField(object, end_offset));
345  start_offset = end_offset;
346  end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
347  chunk->set_progress_bar(start_offset);
348  if (start_offset < object_size) {
349  heap_->incremental_marking()->NotifyIncompleteScanOfObject(
350  object_size - (start_offset - already_scanned_offset));
351  }
352  }
353  } else {
354  FixedArray::BodyDescriptor::IterateBody(map, object, object_size, this);
355  }
356  return object_size;
357 }
358 
359 template <FixedArrayVisitationMode fixed_array_mode,
360  TraceRetainingPathMode retaining_path_mode, typename MarkingState>
361 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
362  MarkingState>::MarkMapContents(Map map) {
363  // Since descriptor arrays are potentially shared, ensure that only the
364  // descriptors that belong to this map are marked. The first time a non-empty
365  // descriptor array is marked, its header is also visited. The slot holding
366  // the descriptor array will be implicitly recorded when the pointer fields of
367  // this map are visited. Prototype maps don't keep track of transitions, so
368  // just mark the entire descriptor array.
369  if (!map->is_prototype_map()) {
370  DescriptorArray* descriptors = map->instance_descriptors();
371  if (MarkObjectWithoutPush(map, descriptors)) {
372  VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
373  descriptors->GetDescriptorSlot(0));
374  }
375  int start = 0;
376  int end = map->NumberOfOwnDescriptors();
377  if (start < end) {
378  VisitPointers(descriptors,
379  MaybeObjectSlot(descriptors->GetDescriptorSlot(start)),
380  MaybeObjectSlot(descriptors->GetDescriptorSlot(end)));
381  }
382  }
383 
384  // Mark the pointer fields of the Map. Since the transitions array has
385  // been marked already, it is fine that one of these fields contains a
386  // pointer to it.
387  Map::BodyDescriptor::IterateBody(
388  map->map(), map, Map::BodyDescriptor::SizeOf(map->map(), map), this);
389 }
390 
391 void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
392  if (marking_state()->WhiteToGrey(obj)) {
393  marking_worklist()->Push(obj);
394  if (V8_UNLIKELY(FLAG_track_retaining_path)) {
395  heap_->AddRetainer(host, obj);
396  }
397  }
398 }
399 
400 void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
401  if (marking_state()->WhiteToGrey(obj)) {
402  marking_worklist()->Push(obj);
403  if (V8_UNLIKELY(FLAG_track_retaining_path)) {
404  heap_->AddRetainingRoot(root, obj);
405  }
406  }
407 }
408 
409 #ifdef ENABLE_MINOR_MC
410 
411 void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
412  if (Heap::InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
413  worklist_->Push(kMainThread, obj);
414  }
415 }
416 
417 #endif
418 
419 void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
420  if (marking_state()->WhiteToGrey(obj)) {
421  marking_worklist()->Push(obj);
422  if (V8_UNLIKELY(FLAG_track_retaining_path)) {
423  heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
424  }
425  }
426 }
427 
428 void MarkCompactCollector::RecordSlot(HeapObject* object, ObjectSlot slot,
429  HeapObject* target) {
430  RecordSlot(object, HeapObjectSlot(slot), target);
431 }
432 
433 void MarkCompactCollector::RecordSlot(HeapObject* object, HeapObjectSlot slot,
434  HeapObject* target) {
435  Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
436  Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
437  if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
438  !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
439  RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
440  }
441 }
442 
443 template <LiveObjectIterationMode mode>
444 LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
445  Address start)
446  : chunk_(chunk),
447  one_word_filler_map_(
448  ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
449  two_word_filler_map_(
450  ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
451  free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
452  it_(chunk, bitmap) {
453  it_.Advance(Bitmap::IndexToCell(
454  Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
455  if (!it_.Done()) {
456  cell_base_ = it_.CurrentCellBase();
457  current_cell_ = *it_.CurrentCell();
458  AdvanceToNextValidObject();
459  } else {
460  current_object_ = nullptr;
461  }
462 }
463 
464 template <LiveObjectIterationMode mode>
465 typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
466 operator++() {
467  AdvanceToNextValidObject();
468  return *this;
469 }
470 
471 template <LiveObjectIterationMode mode>
472 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
473 operator++(int) {
474  iterator retval = *this;
475  ++(*this);
476  return retval;
477 }
478 
479 template <LiveObjectIterationMode mode>
480 void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
481  while (!it_.Done()) {
482  HeapObject* object = nullptr;
483  int size = 0;
484  while (current_cell_ != 0) {
485  uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
486  Address addr = cell_base_ + trailing_zeros * kPointerSize;
487 
488  // Clear the first bit of the found object..
489  current_cell_ &= ~(1u << trailing_zeros);
490 
491  uint32_t second_bit_index = 0;
492  if (trailing_zeros >= Bitmap::kBitIndexMask) {
493  second_bit_index = 0x1;
494  // The overlapping case; there has to exist a cell after the current
495  // cell.
496  // However, if there is a black area at the end of the page, and the
497  // last word is a one word filler, we are not allowed to advance. In
498  // that case we can return immediately.
499  if (!it_.Advance()) {
500  DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
501  current_object_ = nullptr;
502  return;
503  }
504  cell_base_ = it_.CurrentCellBase();
505  current_cell_ = *it_.CurrentCell();
506  } else {
507  second_bit_index = 1u << (trailing_zeros + 1);
508  }
509 
510  Map map;
511  if (current_cell_ & second_bit_index) {
512  // We found a black object. If the black object is within a black area,
513  // make sure that we skip all set bits in the black area until the
514  // object ends.
515  HeapObject* black_object = HeapObject::FromAddress(addr);
516  map = Map::cast(ObjectSlot(addr).Acquire_Load());
517  size = black_object->SizeFromMap(map);
518  Address end = addr + size - kPointerSize;
519  // One word filler objects do not borrow the second mark bit. We have
520  // to jump over the advancing and clearing part.
521  // Note that we know that we are at a one word filler when
522  // object_start + object_size - kPointerSize == object_start.
523  if (addr != end) {
524  DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
525  uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
526  unsigned int end_cell_index =
527  end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
528  MarkBit::CellType end_index_mask =
529  1u << Bitmap::IndexInCell(end_mark_bit_index);
530  if (it_.Advance(end_cell_index)) {
531  cell_base_ = it_.CurrentCellBase();
532  current_cell_ = *it_.CurrentCell();
533  }
534 
535  // Clear all bits in current_cell, including the end index.
536  current_cell_ &= ~(end_index_mask + end_index_mask - 1);
537  }
538 
539  if (mode == kBlackObjects || mode == kAllLiveObjects) {
540  object = black_object;
541  }
542  } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
543  map = Map::cast(ObjectSlot(addr).Acquire_Load());
544  object = HeapObject::FromAddress(addr);
545  size = object->SizeFromMap(map);
546  }
547 
548  // We found a live object.
549  if (object != nullptr) {
550  // Do not use IsFiller() here. This may cause a data race for reading
551  // out the instance type when a new map concurrently is written into
552  // this object while iterating over the object.
553  if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
554  map == free_space_map_) {
555  // There are two reasons why we can get black or grey fillers:
556  // 1) Black areas together with slack tracking may result in black one
557  // word filler objects.
558  // 2) Left trimming may leave black or grey fillers behind because we
559  // do not clear the old location of the object start.
560  // We filter these objects out in the iterator.
561  object = nullptr;
562  } else {
563  break;
564  }
565  }
566  }
567 
568  if (current_cell_ == 0) {
569  if (it_.Advance()) {
570  cell_base_ = it_.CurrentCellBase();
571  current_cell_ = *it_.CurrentCell();
572  }
573  }
574  if (object != nullptr) {
575  current_object_ = object;
576  current_size_ = size;
577  return;
578  }
579  }
580  current_object_ = nullptr;
581 }
582 
583 template <LiveObjectIterationMode mode>
584 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
585  return iterator(chunk_, bitmap_, start_);
586 }
587 
588 template <LiveObjectIterationMode mode>
589 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
590  return iterator(chunk_, bitmap_, end_);
591 }
592 
593 Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }
594 
595 } // namespace internal
596 } // namespace v8
597 
598 #endif // V8_HEAP_MARK_COMPACT_INL_H_
Definition: libplatform.h:13