5 #include "src/heap/heap.h" 7 #include <unordered_map> 8 #include <unordered_set> 10 #include "src/accessors.h" 11 #include "src/api-inl.h" 12 #include "src/assembler-inl.h" 13 #include "src/base/bits.h" 14 #include "src/base/once.h" 15 #include "src/base/utils/random-number-generator.h" 16 #include "src/bootstrapper.h" 17 #include "src/code-stubs.h" 18 #include "src/compilation-cache.h" 19 #include "src/conversions.h" 20 #include "src/debug/debug.h" 21 #include "src/deoptimizer.h" 22 #include "src/feedback-vector.h" 23 #include "src/global-handles.h" 24 #include "src/heap/array-buffer-collector.h" 25 #include "src/heap/array-buffer-tracker-inl.h" 26 #include "src/heap/barrier.h" 27 #include "src/heap/code-stats.h" 28 #include "src/heap/concurrent-marking.h" 29 #include "src/heap/embedder-tracing.h" 30 #include "src/heap/gc-idle-time-handler.h" 31 #include "src/heap/gc-tracer.h" 32 #include "src/heap/heap-controller.h" 33 #include "src/heap/heap-write-barrier-inl.h" 34 #include "src/heap/incremental-marking.h" 35 #include "src/heap/mark-compact-inl.h" 36 #include "src/heap/mark-compact.h" 37 #include "src/heap/memory-reducer.h" 38 #include "src/heap/object-stats.h" 39 #include "src/heap/objects-visiting-inl.h" 40 #include "src/heap/objects-visiting.h" 41 #include "src/heap/remembered-set.h" 42 #include "src/heap/scavenge-job.h" 43 #include "src/heap/scavenger-inl.h" 44 #include "src/heap/store-buffer.h" 45 #include "src/heap/stress-marking-observer.h" 46 #include "src/heap/stress-scavenge-observer.h" 47 #include "src/heap/sweeper.h" 48 #include "src/interpreter/interpreter.h" 49 #include "src/microtask-queue.h" 50 #include "src/objects/data-handler.h" 51 #include "src/objects/hash-table-inl.h" 52 #include "src/objects/maybe-object.h" 53 #include "src/objects/shared-function-info.h" 54 #include "src/objects/slots-inl.h" 55 #include "src/regexp/jsregexp.h" 56 #include "src/runtime-profiler.h" 57 #include "src/snapshot/embedded-data.h" 58 #include "src/snapshot/natives.h" 59 #include "src/snapshot/serializer-common.h" 60 #include "src/snapshot/snapshot.h" 61 #include "src/tracing/trace-event.h" 62 #include "src/unicode-decoder.h" 63 #include "src/unicode-inl.h" 64 #include "src/utils-inl.h" 65 #include "src/utils.h" 67 #include "src/vm-state-inl.h" 70 #include "src/objects/object-macros.h" 75 void Heap::SetArgumentsAdaptorDeoptPCOffset(
int pc_offset) {
76 DCHECK_EQ(Smi::kZero, arguments_adaptor_deopt_pc_offset());
77 set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
80 void Heap::SetConstructStubCreateDeoptPCOffset(
int pc_offset) {
81 DCHECK(construct_stub_create_deopt_pc_offset() == Smi::kZero);
82 set_construct_stub_create_deopt_pc_offset(Smi::FromInt(pc_offset));
85 void Heap::SetConstructStubInvokeDeoptPCOffset(
int pc_offset) {
86 DCHECK(construct_stub_invoke_deopt_pc_offset() == Smi::kZero);
87 set_construct_stub_invoke_deopt_pc_offset(Smi::FromInt(pc_offset));
90 void Heap::SetInterpreterEntryReturnPCOffset(
int pc_offset) {
91 DCHECK_EQ(Smi::kZero, interpreter_entry_return_pc_offset());
92 set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
95 void Heap::SetSerializedObjects(FixedArray objects) {
96 DCHECK(isolate()->serializer_enabled());
97 set_serialized_objects(objects);
100 void Heap::SetSerializedGlobalProxySizes(FixedArray sizes) {
101 DCHECK(isolate()->serializer_enabled());
102 set_serialized_global_proxy_sizes(sizes);
105 bool Heap::GCCallbackTuple::operator==(
106 const Heap::GCCallbackTuple& other)
const {
107 return other.callback == callback && other.data == data;
110 Heap::GCCallbackTuple& Heap::GCCallbackTuple::operator=(
111 const Heap::GCCallbackTuple& other) =
default;
124 void Step(
int bytes_allocated,
Address,
size_t)
override {
125 heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated);
133 : isolate_(isolate()),
134 initial_max_old_generation_size_(max_old_generation_size_),
135 initial_old_generation_size_(max_old_generation_size_ /
136 kInitalOldGenerationLimitFactor),
137 memory_pressure_level_(MemoryPressureLevel::
kNone),
138 old_generation_allocation_limit_(initial_old_generation_size_),
139 global_pretenuring_feedback_(kInitialFeedbackCapacity),
140 current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
141 external_string_table_(this) {
143 DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1));
145 set_native_contexts_list(
nullptr);
146 set_allocation_sites_list(Smi::kZero);
149 RememberUnmappedPage(kNullAddress,
false);
152 size_t Heap::MaxReserved() {
153 return static_cast<size_t>(2 * max_semi_space_size_ +
154 max_old_generation_size_);
157 size_t Heap::ComputeMaxOldGenerationSize(uint64_t physical_memory) {
158 const size_t old_space_physical_memory_factor = 4;
159 size_t computed_size =
static_cast<size_t>(physical_memory / i::MB /
160 old_space_physical_memory_factor *
162 return Max(Min(computed_size, HeapController::kMaxSize),
163 HeapController::kMinSize);
166 size_t Heap::Capacity() {
167 if (!HasBeenSetUp())
return 0;
169 return new_space_->Capacity() + OldGenerationCapacity();
172 size_t Heap::OldGenerationCapacity() {
173 if (!HasBeenSetUp())
return 0;
174 PagedSpaces spaces(
this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
176 for (PagedSpace* space = spaces.next(); space !=
nullptr;
177 space = spaces.next()) {
178 total += space->Capacity();
180 return total + lo_space_->SizeOfObjects() + code_lo_space_->SizeOfObjects();
183 size_t Heap::CommittedOldGenerationMemory() {
184 if (!HasBeenSetUp())
return 0;
186 PagedSpaces spaces(
this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
188 for (PagedSpace* space = spaces.next(); space !=
nullptr;
189 space = spaces.next()) {
190 total += space->CommittedMemory();
192 return total + lo_space_->Size() + code_lo_space_->Size();
195 size_t Heap::CommittedMemoryOfUnmapper() {
196 if (!HasBeenSetUp())
return 0;
198 return memory_allocator()->unmapper()->CommittedBufferedMemory();
201 size_t Heap::CommittedMemory() {
202 if (!HasBeenSetUp())
return 0;
204 return new_space_->CommittedMemory() + CommittedOldGenerationMemory();
208 size_t Heap::CommittedPhysicalMemory() {
209 if (!HasBeenSetUp())
return 0;
212 for (SpaceIterator it(
this); it.has_next();) {
213 total += it.next()->CommittedPhysicalMemory();
219 size_t Heap::CommittedMemoryExecutable() {
220 if (!HasBeenSetUp())
return 0;
222 return static_cast<size_t>(memory_allocator()->SizeExecutable());
226 void Heap::UpdateMaximumCommitted() {
227 if (!HasBeenSetUp())
return;
229 const size_t current_committed_memory = CommittedMemory();
230 if (current_committed_memory > maximum_committed_) {
231 maximum_committed_ = current_committed_memory;
235 size_t Heap::Available() {
236 if (!HasBeenSetUp())
return 0;
240 for (SpaceIterator it(
this); it.has_next();) {
241 total += it.next()->Available();
244 total += memory_allocator()->Available();
248 bool Heap::CanExpandOldGeneration(
size_t size) {
249 if (force_oom_)
return false;
250 if (OldGenerationCapacity() + size > MaxOldGenerationSize())
return false;
254 return memory_allocator()->Size() + size <= MaxReserved();
257 bool Heap::HasBeenSetUp() {
259 return new_space_ !=
nullptr;
263 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
264 const char** reason) {
266 if (space != NEW_SPACE) {
267 isolate_->counters()->gc_compactor_caused_by_request()->Increment();
268 *reason =
"GC in old space requested";
269 return MARK_COMPACTOR;
272 if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
273 *reason =
"GC in old space forced by flags";
274 return MARK_COMPACTOR;
277 if (incremental_marking()->NeedsFinalization() &&
278 AllocationLimitOvershotByLargeMargin()) {
279 *reason =
"Incremental marking needs finalization";
280 return MARK_COMPACTOR;
284 if (!CanExpandOldGeneration(new_space_->TotalCapacity())) {
286 ->gc_compactor_caused_by_oldspace_exhaustion()
288 *reason =
"scavenge might not succeed";
289 return MARK_COMPACTOR;
294 return YoungGenerationCollector();
297 void Heap::SetGCState(HeapState state) {
301 void Heap::PrintShortHeapStatistics() {
302 if (!FLAG_trace_gc_verbose)
return;
303 PrintIsolate(isolate_,
304 "Memory allocator, used: %6" PRIuS
306 " available: %6" PRIuS
" KB\n",
307 memory_allocator()->Size() / KB,
308 memory_allocator()->Available() / KB);
309 PrintIsolate(isolate_,
310 "Read-only space, used: %6" PRIuS
312 ", available: %6" PRIuS
314 ", committed: %6" PRIuS
" KB\n",
315 read_only_space_->Size() / KB,
316 read_only_space_->Available() / KB,
317 read_only_space_->CommittedMemory() / KB);
318 PrintIsolate(isolate_,
319 "New space, used: %6" PRIuS
321 ", available: %6" PRIuS
323 ", committed: %6" PRIuS
" KB\n",
324 new_space_->Size() / KB, new_space_->Available() / KB,
325 new_space_->CommittedMemory() / KB);
326 PrintIsolate(isolate_,
327 "New large object space, used: %6" PRIuS
329 ", available: %6" PRIuS
331 ", committed: %6" PRIuS
" KB\n",
332 new_lo_space_->SizeOfObjects() / KB,
333 new_lo_space_->Available() / KB,
334 new_lo_space_->CommittedMemory() / KB);
335 PrintIsolate(isolate_,
336 "Old space, used: %6" PRIuS
338 ", available: %6" PRIuS
340 ", committed: %6" PRIuS
" KB\n",
341 old_space_->SizeOfObjects() / KB, old_space_->Available() / KB,
342 old_space_->CommittedMemory() / KB);
343 PrintIsolate(isolate_,
344 "Code space, used: %6" PRIuS
346 ", available: %6" PRIuS
348 ", committed: %6" PRIuS
"KB\n",
349 code_space_->SizeOfObjects() / KB, code_space_->Available() / KB,
350 code_space_->CommittedMemory() / KB);
351 PrintIsolate(isolate_,
352 "Map space, used: %6" PRIuS
354 ", available: %6" PRIuS
356 ", committed: %6" PRIuS
" KB\n",
357 map_space_->SizeOfObjects() / KB, map_space_->Available() / KB,
358 map_space_->CommittedMemory() / KB);
359 PrintIsolate(isolate_,
360 "Large object space, used: %6" PRIuS
362 ", available: %6" PRIuS
364 ", committed: %6" PRIuS
" KB\n",
365 lo_space_->SizeOfObjects() / KB, lo_space_->Available() / KB,
366 lo_space_->CommittedMemory() / KB);
367 PrintIsolate(isolate_,
368 "Code large object space, used: %6" PRIuS
370 ", available: %6" PRIuS
372 ", committed: %6" PRIuS
" KB\n",
373 lo_space_->SizeOfObjects() / KB,
374 code_lo_space_->Available() / KB,
375 code_lo_space_->CommittedMemory() / KB);
376 PrintIsolate(isolate_,
377 "All spaces, used: %6" PRIuS
379 ", available: %6" PRIuS
381 ", committed: %6" PRIuS
"KB\n",
382 this->SizeOfObjects() / KB, this->Available() / KB,
383 this->CommittedMemory() / KB);
384 PrintIsolate(isolate_,
385 "Unmapper buffering %zu chunks of committed: %6" PRIuS
" KB\n",
386 memory_allocator()->unmapper()->NumberOfCommittedChunks(),
387 CommittedMemoryOfUnmapper() / KB);
388 PrintIsolate(isolate_,
"External memory reported: %6" PRId64
" KB\n",
389 isolate()->isolate_data()->external_memory_ / KB);
390 PrintIsolate(isolate_,
"Backing store memory: %6" PRIuS
" KB\n",
391 backing_store_bytes_ / KB);
392 PrintIsolate(isolate_,
"External memory global %zu KB\n",
393 external_memory_callback_() / KB);
394 PrintIsolate(isolate_,
"Total time spent in GC : %.1f ms\n",
398 void Heap::ReportStatisticsAfterGC() {
399 for (
int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
401 int count = deferred_counters_[
i];
402 deferred_counters_[
i] = 0;
405 isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(
i));
410 void Heap::AddHeapObjectAllocationTracker(
411 HeapObjectAllocationTracker* tracker) {
412 if (allocation_trackers_.empty()) DisableInlineAllocation();
413 allocation_trackers_.push_back(tracker);
416 void Heap::RemoveHeapObjectAllocationTracker(
417 HeapObjectAllocationTracker* tracker) {
418 allocation_trackers_.erase(std::remove(allocation_trackers_.begin(),
419 allocation_trackers_.end(), tracker),
420 allocation_trackers_.end());
421 if (allocation_trackers_.empty()) EnableInlineAllocation();
424 void Heap::AddRetainingPathTarget(Handle<HeapObject>
object,
425 RetainingPathOption option) {
426 if (!FLAG_track_retaining_path) {
427 PrintF(
"Retaining path tracking requires --track-retaining-path\n");
429 Handle<WeakArrayList> array(retaining_path_targets(), isolate());
430 int index = array->length();
431 array = WeakArrayList::AddToEnd(isolate(), array,
432 MaybeObjectHandle::Weak(
object));
433 set_retaining_path_targets(*array);
434 DCHECK_EQ(array->length(), index + 1);
435 retaining_path_target_option_[index] = option;
439 bool Heap::IsRetainingPathTarget(HeapObject*
object,
440 RetainingPathOption* option) {
441 WeakArrayList* targets = retaining_path_targets();
442 int length = targets->length();
443 MaybeObject object_to_check = HeapObjectReference::Weak(
object);
444 for (
int i = 0;
i < length;
i++) {
445 MaybeObject target = targets->Get(
i);
446 DCHECK(target->IsWeakOrCleared());
447 if (target == object_to_check) {
448 DCHECK(retaining_path_target_option_.count(
i));
449 *option = retaining_path_target_option_[
i];
456 void Heap::PrintRetainingPath(HeapObject* target, RetainingPathOption option) {
458 PrintF(
"#################################################\n");
459 PrintF(
"Retaining path for %p:\n", static_cast<void*>(target));
460 HeapObject*
object = target;
461 std::vector<std::pair<HeapObject*, bool>> retaining_path;
462 Root root = Root::kUnknown;
463 bool ephemeron =
false;
465 retaining_path.push_back(std::make_pair(
object, ephemeron));
466 if (option == RetainingPathOption::kTrackEphemeronPath &&
467 ephemeron_retainer_.count(
object)) {
468 object = ephemeron_retainer_[object];
470 }
else if (retainer_.count(
object)) {
471 object = retainer_[object];
474 if (retaining_root_.count(
object)) {
475 root = retaining_root_[object];
480 int distance =
static_cast<int>(retaining_path.size());
481 for (
auto node : retaining_path) {
482 HeapObject*
object = node.first;
483 bool ephemeron = node.second;
485 PrintF(
"^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n");
486 PrintF(
"Distance from root %d%s: ", distance,
487 ephemeron ?
" (ephemeron)" :
"");
488 object->ShortPrint();
497 PrintF(
"^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n");
498 PrintF(
"Root: %s\n", RootVisitor::RootName(root));
499 PrintF(
"-------------------------------------------------\n");
502 void Heap::AddRetainer(HeapObject* retainer, HeapObject*
object) {
503 if (retainer_.count(
object))
return;
504 retainer_[object] = retainer;
505 RetainingPathOption option = RetainingPathOption::kDefault;
506 if (IsRetainingPathTarget(
object, &option)) {
509 if (ephemeron_retainer_.count(
object) == 0 ||
510 option == RetainingPathOption::kDefault) {
511 PrintRetainingPath(
object, option);
516 void Heap::AddEphemeronRetainer(HeapObject* retainer, HeapObject*
object) {
517 if (ephemeron_retainer_.count(
object))
return;
518 ephemeron_retainer_[object] = retainer;
519 RetainingPathOption option = RetainingPathOption::kDefault;
520 if (IsRetainingPathTarget(
object, &option) &&
521 option == RetainingPathOption::kTrackEphemeronPath) {
523 if (retainer_.count(
object) == 0) {
524 PrintRetainingPath(
object, option);
529 void Heap::AddRetainingRoot(Root root, HeapObject*
object) {
530 if (retaining_root_.count(
object))
return;
531 retaining_root_[object] = root;
532 RetainingPathOption option = RetainingPathOption::kDefault;
533 if (IsRetainingPathTarget(
object, &option)) {
534 PrintRetainingPath(
object, option);
538 void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
539 deferred_counters_[feature]++;
542 bool Heap::UncommitFromSpace() {
return new_space_->UncommitFromSpace(); }
544 void Heap::GarbageCollectionPrologue() {
545 TRACE_GC(tracer(), GCTracer::Scope::HEAP_PROLOGUE);
547 AllowHeapAllocation for_the_first_part_of_prologue;
551 if (FLAG_verify_heap) {
558 promoted_objects_size_ = 0;
559 previous_semi_space_copied_object_size_ = semi_space_copied_object_size_;
560 semi_space_copied_object_size_ = 0;
561 nodes_died_in_new_space_ = 0;
562 nodes_copied_in_new_space_ = 0;
565 UpdateMaximumCommitted();
568 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
570 if (FLAG_gc_verbose) Print();
573 if (new_space_->IsAtMaximumCapacity()) {
574 maximum_size_scavenges_++;
576 maximum_size_scavenges_ = 0;
578 CheckNewSpaceExpansionCriteria();
579 UpdateNewSpaceAllocationCounter();
580 if (FLAG_track_retaining_path) {
582 ephemeron_retainer_.clear();
583 retaining_root_.clear();
587 size_t Heap::SizeOfObjects() {
590 for (SpaceIterator it(
this); it.has_next();) {
591 total += it.next()->SizeOfObjects();
597 const char* Heap::GetSpaceName(
int idx) {
608 return "large_object_space";
610 return "new_large_object_space";
612 return "code_large_object_space";
614 return "read_only_space";
621 void Heap::MergeAllocationSitePretenuringFeedback(
622 const PretenuringFeedbackMap& local_pretenuring_feedback) {
623 AllocationSite* site =
nullptr;
624 for (
auto& site_and_count : local_pretenuring_feedback) {
625 site = site_and_count.first;
626 MapWord map_word = site_and_count.first->map_word();
627 if (map_word.IsForwardingAddress()) {
628 site = AllocationSite::cast(map_word.ToForwardingAddress());
634 if (!site->IsAllocationSite() || site->IsZombie())
continue;
636 const int value =
static_cast<int>(site_and_count.second);
638 if (site->IncrementMementoFoundCount(value)) {
640 global_pretenuring_feedback_.insert(std::make_pair(site, 0));
645 void Heap::AddAllocationObserversToAllSpaces(
646 AllocationObserver* observer, AllocationObserver* new_space_observer) {
647 DCHECK(observer && new_space_observer);
649 for (SpaceIterator it(
this); it.has_next();) {
650 Space* space = it.next();
651 if (space == new_space()) {
652 space->AddAllocationObserver(new_space_observer);
654 space->AddAllocationObserver(observer);
659 void Heap::RemoveAllocationObserversFromAllSpaces(
660 AllocationObserver* observer, AllocationObserver* new_space_observer) {
661 DCHECK(observer && new_space_observer);
663 for (SpaceIterator it(
this); it.has_next();) {
664 Space* space = it.next();
665 if (space == new_space()) {
666 space->RemoveAllocationObserver(new_space_observer);
668 space->RemoveAllocationObserver(observer);
676 : store_buffer_(store_buffer) {
677 store_buffer_->MoveAllEntriesToRememberedSet();
678 store_buffer_->SetMode(StoreBuffer::IN_GC);
682 DCHECK(store_buffer_->Empty());
683 store_buffer_->SetMode(StoreBuffer::NOT_IN_GC);
691 inline bool MakePretenureDecision(
692 AllocationSite* site, AllocationSite::PretenureDecision current_decision,
693 double ratio,
bool maximum_size_scavenge) {
696 if ((current_decision == AllocationSite::kUndecided ||
697 current_decision == AllocationSite::kMaybeTenure)) {
698 if (ratio >= AllocationSite::kPretenureRatio) {
701 if (maximum_size_scavenge) {
702 site->set_deopt_dependent_code(
true);
703 site->set_pretenure_decision(AllocationSite::kTenure);
708 site->set_pretenure_decision(AllocationSite::kMaybeTenure);
710 site->set_pretenure_decision(AllocationSite::kDontTenure);
716 inline bool DigestPretenuringFeedback(Isolate* isolate, AllocationSite* site,
717 bool maximum_size_scavenge) {
719 int create_count = site->memento_create_count();
720 int found_count = site->memento_found_count();
721 bool minimum_mementos_created =
722 create_count >= AllocationSite::kPretenureMinimumCreated;
723 double ratio = minimum_mementos_created || FLAG_trace_pretenuring_statistics
724 ?
static_cast<double>(found_count) / create_count
726 AllocationSite::PretenureDecision current_decision =
727 site->pretenure_decision();
729 if (minimum_mementos_created) {
730 deopt = MakePretenureDecision(site, current_decision, ratio,
731 maximum_size_scavenge);
734 if (FLAG_trace_pretenuring_statistics) {
735 PrintIsolate(isolate,
736 "pretenuring: AllocationSite(%p): (created, found, ratio) " 737 "(%d, %d, %f) %s => %s\n",
738 static_cast<void*>(site), create_count, found_count, ratio,
739 site->PretenureDecisionName(current_decision),
740 site->PretenureDecisionName(site->pretenure_decision()));
744 site->set_memento_found_count(0);
745 site->set_memento_create_count(0);
750 void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
751 global_pretenuring_feedback_.erase(site);
754 bool Heap::DeoptMaybeTenuredAllocationSites() {
755 return new_space_->IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
758 void Heap::ProcessPretenuringFeedback() {
759 bool trigger_deoptimization =
false;
760 if (FLAG_allocation_site_pretenuring) {
761 int tenure_decisions = 0;
762 int dont_tenure_decisions = 0;
763 int allocation_mementos_found = 0;
764 int allocation_sites = 0;
765 int active_allocation_sites = 0;
767 AllocationSite* site =
nullptr;
770 bool maximum_size_scavenge = MaximumSizeScavenge();
771 for (
auto& site_and_count : global_pretenuring_feedback_) {
773 site = site_and_count.first;
775 DCHECK_EQ(0, site_and_count.second);
776 int found_count = site->memento_found_count();
780 if (found_count > 0) {
781 DCHECK(site->IsAllocationSite());
782 active_allocation_sites++;
783 allocation_mementos_found += found_count;
784 if (DigestPretenuringFeedback(isolate_, site, maximum_size_scavenge)) {
785 trigger_deoptimization =
true;
787 if (site->GetPretenureMode() == TENURED) {
790 dont_tenure_decisions++;
796 bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites();
797 if (deopt_maybe_tenured) {
798 ForeachAllocationSite(
799 allocation_sites_list(),
800 [&allocation_sites, &trigger_deoptimization](AllocationSite* site) {
801 DCHECK(site->IsAllocationSite());
803 if (site->IsMaybeTenure()) {
804 site->set_deopt_dependent_code(
true);
805 trigger_deoptimization =
true;
810 if (trigger_deoptimization) {
811 isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
814 if (FLAG_trace_pretenuring_statistics &&
815 (allocation_mementos_found > 0 || tenure_decisions > 0 ||
816 dont_tenure_decisions > 0)) {
817 PrintIsolate(isolate(),
818 "pretenuring: deopt_maybe_tenured=%d visited_sites=%d " 820 "mementos=%d tenured=%d not_tenured=%d\n",
821 deopt_maybe_tenured ? 1 : 0, allocation_sites,
822 active_allocation_sites, allocation_mementos_found,
823 tenure_decisions, dont_tenure_decisions);
826 global_pretenuring_feedback_.clear();
827 global_pretenuring_feedback_.reserve(kInitialFeedbackCapacity);
831 void Heap::InvalidateCodeDeoptimizationData(Code code) {
832 MemoryChunk* chunk = MemoryChunk::FromAddress(code->ptr());
833 CodePageMemoryModificationScope modification_scope(chunk);
834 code->set_deoptimization_data(ReadOnlyRoots(
this).empty_fixed_array());
837 void Heap::DeoptMarkedAllocationSites() {
841 ForeachAllocationSite(allocation_sites_list(), [
this](AllocationSite* site) {
842 if (site->deopt_dependent_code()) {
843 site->dependent_code()->MarkCodeForDeoptimization(
844 isolate_, DependentCode::kAllocationSiteTenuringChangedGroup);
845 site->set_deopt_dependent_code(
false);
849 Deoptimizer::DeoptimizeMarkedCode(isolate_);
853 void Heap::GarbageCollectionEpilogue() {
854 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE);
855 if (Heap::ShouldZapGarbage() || FLAG_clear_free_memory) {
860 if (FLAG_verify_heap) {
865 AllowHeapAllocation for_the_rest_of_the_epilogue;
868 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
869 if (FLAG_print_handles) PrintHandles();
870 if (FLAG_gc_verbose) Print();
871 if (FLAG_code_stats) ReportCodeStatistics(
"After GC");
872 if (FLAG_check_handle_count) CheckHandleCount();
875 UpdateMaximumCommitted();
877 isolate_->counters()->alive_after_last_gc()->Set(
878 static_cast<int>(SizeOfObjects()));
880 isolate_->counters()->string_table_capacity()->Set(
881 string_table()->Capacity());
882 isolate_->counters()->number_of_symbols()->Set(
883 string_table()->NumberOfElements());
885 if (CommittedMemory() > 0) {
886 isolate_->counters()->external_fragmentation_total()->AddSample(
887 static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
889 isolate_->counters()->heap_sample_total_committed()->AddSample(
890 static_cast<int>(CommittedMemory() / KB));
891 isolate_->counters()->heap_sample_total_used()->AddSample(
892 static_cast<int>(SizeOfObjects() / KB));
893 isolate_->counters()->heap_sample_map_space_committed()->AddSample(
894 static_cast<int>(map_space()->CommittedMemory() / KB));
895 isolate_->counters()->heap_sample_code_space_committed()->AddSample(
896 static_cast<int>(code_space()->CommittedMemory() / KB));
898 isolate_->counters()->heap_sample_maximum_committed()->AddSample(
899 static_cast<int>(MaximumCommittedMemory() / KB));
902 #define UPDATE_COUNTERS_FOR_SPACE(space) \ 903 isolate_->counters()->space##_bytes_available()->Set( \ 904 static_cast<int>(space()->Available())); \ 905 isolate_->counters()->space##_bytes_committed()->Set( \ 906 static_cast<int>(space()->CommittedMemory())); \ 907 isolate_->counters()->space##_bytes_used()->Set( \ 908 static_cast<int>(space()->SizeOfObjects())); 909 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \ 910 if (space()->CommittedMemory() > 0) { \ 911 isolate_->counters()->external_fragmentation_##space()->AddSample( \ 912 static_cast<int>(100 - \ 913 (space()->SizeOfObjects() * 100.0) / \ 914 space()->CommittedMemory())); \ 916 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \ 917 UPDATE_COUNTERS_FOR_SPACE(space) \ 918 UPDATE_FRAGMENTATION_FOR_SPACE(space) 920 UPDATE_COUNTERS_FOR_SPACE(new_space)
921 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
922 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
923 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
924 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
925 #undef UPDATE_COUNTERS_FOR_SPACE 926 #undef UPDATE_FRAGMENTATION_FOR_SPACE 927 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE 930 ReportStatisticsAfterGC();
933 last_gc_time_ = MonotonicallyIncreasingTimeInMs();
936 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE);
937 ReduceNewSpaceSize();
940 if (FLAG_harmony_weak_refs) {
947 HandleScope handle_scope(isolate());
949 !isolate()->heap()->dirty_js_weak_factories()->IsUndefined(isolate())) {
951 Handle<JSWeakFactory> weak_factory(
952 JSWeakFactory::cast(isolate()->heap()->dirty_js_weak_factories()),
954 isolate()->heap()->set_dirty_js_weak_factories(weak_factory->next());
955 weak_factory->set_next(ReadOnlyRoots(isolate()).undefined_value());
956 Handle<Context> context(weak_factory->native_context(), isolate());
966 Handle<WeakFactoryCleanupJobTask> task =
967 isolate()->factory()->NewWeakFactoryCleanupJobTask(weak_factory);
968 isolate()->EnqueueMicrotask(task);
976 heap_->gc_callbacks_depth_++;
980 bool CheckReenter() {
return heap_->gc_callbacks_depth_ == 1; }
987 void Heap::HandleGCRequest() {
988 if (FLAG_stress_scavenge > 0 && stress_scavenge_observer_->HasRequestedGC()) {
989 CollectAllGarbage(NEW_SPACE, GarbageCollectionReason::kTesting);
990 stress_scavenge_observer_->RequestedGCDone();
991 }
else if (HighMemoryPressure()) {
992 incremental_marking()->reset_request_type();
993 CheckMemoryPressure();
994 }
else if (incremental_marking()->request_type() ==
995 IncrementalMarking::COMPLETE_MARKING) {
996 incremental_marking()->reset_request_type();
997 CollectAllGarbage(current_gc_flags_,
998 GarbageCollectionReason::kFinalizeMarkingViaStackGuard,
999 current_gc_callback_flags_);
1000 }
else if (incremental_marking()->request_type() ==
1001 IncrementalMarking::FINALIZATION &&
1002 incremental_marking()->IsMarking() &&
1003 !incremental_marking()->finalize_marking_completed()) {
1004 incremental_marking()->reset_request_type();
1005 FinalizeIncrementalMarkingIncrementally(
1006 GarbageCollectionReason::kFinalizeMarkingViaStackGuard);
1011 void Heap::ScheduleIdleScavengeIfNeeded(
int bytes_allocated) {
1012 DCHECK(FLAG_idle_time_scavenge);
1013 DCHECK_NOT_NULL(scavenge_job_);
1014 scavenge_job_->ScheduleIdleTaskIfNeeded(
this, bytes_allocated);
1017 TimedHistogram* Heap::GCTypePriorityTimer(GarbageCollector collector) {
1018 if (IsYoungGenerationCollector(collector)) {
1019 if (isolate_->IsIsolateInBackground()) {
1020 return isolate_->counters()->gc_scavenger_background();
1022 return isolate_->counters()->gc_scavenger_foreground();
1024 if (!incremental_marking()->IsStopped()) {
1025 if (ShouldReduceMemory()) {
1026 if (isolate_->IsIsolateInBackground()) {
1027 return isolate_->counters()->gc_finalize_reduce_memory_background();
1029 return isolate_->counters()->gc_finalize_reduce_memory_foreground();
1031 if (isolate_->IsIsolateInBackground()) {
1032 return isolate_->counters()->gc_finalize_background();
1034 return isolate_->counters()->gc_finalize_foreground();
1037 if (isolate_->IsIsolateInBackground()) {
1038 return isolate_->counters()->gc_compactor_background();
1040 return isolate_->counters()->gc_compactor_foreground();
1045 TimedHistogram* Heap::GCTypeTimer(GarbageCollector collector) {
1046 if (IsYoungGenerationCollector(collector)) {
1047 return isolate_->counters()->gc_scavenger();
1049 if (!incremental_marking()->IsStopped()) {
1050 if (ShouldReduceMemory()) {
1051 return isolate_->counters()->gc_finalize_reduce_memory();
1053 return isolate_->counters()->gc_finalize();
1056 return isolate_->counters()->gc_compactor();
1061 void Heap::CollectAllGarbage(
int flags, GarbageCollectionReason gc_reason,
1062 const v8::GCCallbackFlags gc_callback_flags) {
1066 set_current_gc_flags(flags);
1067 CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
1068 set_current_gc_flags(kNoGCFlags);
1073 intptr_t CompareWords(
int size, HeapObject* a, HeapObject* b) {
1074 int words = size / kPointerSize;
1075 DCHECK_EQ(a->Size(), size);
1076 DCHECK_EQ(b->Size(), size);
1077 intptr_t* slot_a =
reinterpret_cast<intptr_t*
>(a->address());
1078 intptr_t* slot_b =
reinterpret_cast<intptr_t*
>(b->address());
1079 for (
int i = 0;
i < words;
i++) {
1080 if (*slot_a != *slot_b) {
1081 return *slot_a - *slot_b;
1089 void ReportDuplicates(
int size, std::vector<HeapObject*>& objects) {
1090 if (objects.size() == 0)
return;
1092 sort(objects.begin(), objects.end(), [size](HeapObject* a, HeapObject* b) {
1093 intptr_t c = CompareWords(size, a, b);
1094 if (c != 0)
return c < 0;
1098 std::vector<std::pair<int, HeapObject*>> duplicates;
1099 HeapObject* current = objects[0];
1101 for (
size_t i = 1;
i < objects.size();
i++) {
1102 if (CompareWords(size, current, objects[
i]) == 0) {
1106 duplicates.push_back(std::make_pair(count - 1, current));
1109 current = objects[
i];
1113 duplicates.push_back(std::make_pair(count - 1, current));
1116 int threshold = FLAG_trace_duplicate_threshold_kb * KB;
1118 sort(duplicates.begin(), duplicates.end());
1119 for (
auto it = duplicates.rbegin(); it != duplicates.rend(); ++it) {
1120 int duplicate_bytes = it->first * size;
1121 if (duplicate_bytes < threshold)
break;
1122 PrintF(
"%d duplicates of size %d each (%dKB)\n", it->first, size,
1123 duplicate_bytes / KB);
1124 PrintF(
"Sample object: ");
1125 it->second->Print();
1126 PrintF(
"============================\n");
1131 void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
1143 if (gc_reason == GarbageCollectionReason::kLastResort) {
1144 InvokeNearHeapLimitCallback();
1146 RuntimeCallTimerScope runtime_timer(
1147 isolate(), RuntimeCallCounterId::kGC_Custom_AllAvailableGarbage);
1150 isolate()->AbortConcurrentOptimization(BlockingBehavior::kDontBlock);
1151 isolate()->ClearSerializerData();
1152 set_current_gc_flags(kReduceMemoryFootprintMask);
1153 isolate_->compilation_cache()->Clear();
1154 const int kMaxNumberOfAttempts = 7;
1155 const int kMinNumberOfAttempts = 2;
1156 const v8::GCCallbackFlags callback_flags =
1157 gc_reason == GarbageCollectionReason::kLowMemoryNotification
1158 ? v8::kGCCallbackFlagForced
1159 : v8::kGCCallbackFlagCollectAllAvailableGarbage;
1160 for (
int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
1161 if (!CollectGarbage(OLD_SPACE, gc_reason, callback_flags) &&
1162 attempt + 1 >= kMinNumberOfAttempts) {
1167 set_current_gc_flags(kNoGCFlags);
1168 new_space_->Shrink();
1169 UncommitFromSpace();
1170 EagerlyFreeExternalMemory();
1172 if (FLAG_trace_duplicate_threshold_kb) {
1173 std::map<int, std::vector<HeapObject*>> objects_by_size;
1174 PagedSpaces spaces(
this);
1175 for (PagedSpace* space = spaces.next(); space !=
nullptr;
1176 space = spaces.next()) {
1177 HeapObjectIterator it(space);
1178 for (HeapObject* obj = it.Next(); obj !=
nullptr; obj = it.Next()) {
1179 objects_by_size[obj->Size()].push_back(obj);
1183 LargeObjectIterator it(lo_space());
1184 for (HeapObject* obj = it.Next(); obj !=
nullptr; obj = it.Next()) {
1185 objects_by_size[obj->Size()].push_back(obj);
1188 for (
auto it = objects_by_size.rbegin(); it != objects_by_size.rend();
1190 ReportDuplicates(it->first, it->second);
1195 void Heap::PreciseCollectAllGarbage(
int flags,
1196 GarbageCollectionReason gc_reason,
1197 const GCCallbackFlags gc_callback_flags) {
1198 if (!incremental_marking()->IsStopped()) {
1199 FinalizeIncrementalMarkingAtomically(gc_reason);
1201 CollectAllGarbage(flags, gc_reason, gc_callback_flags);
1204 void Heap::ReportExternalMemoryPressure() {
1205 const GCCallbackFlags kGCCallbackFlagsForExternalMemory =
1206 static_cast<GCCallbackFlags
>(
1207 kGCCallbackFlagSynchronousPhantomCallbackProcessing |
1208 kGCCallbackFlagCollectAllExternalMemory);
1209 if (isolate()->isolate_data()->external_memory_ >
1210 (isolate()->isolate_data()->external_memory_at_last_mark_compact_ +
1211 external_memory_hard_limit())) {
1213 kReduceMemoryFootprintMask,
1214 GarbageCollectionReason::kExternalMemoryPressure,
1215 static_cast<GCCallbackFlags>(kGCCallbackFlagCollectAllAvailableGarbage |
1216 kGCCallbackFlagsForExternalMemory));
1219 if (incremental_marking()->IsStopped()) {
1220 if (incremental_marking()->CanBeActivated()) {
1221 StartIncrementalMarking(GCFlagsForIncrementalMarking(),
1222 GarbageCollectionReason::kExternalMemoryPressure,
1223 kGCCallbackFlagsForExternalMemory);
1225 CollectAllGarbage(i::Heap::kNoGCFlags,
1226 GarbageCollectionReason::kExternalMemoryPressure,
1227 kGCCallbackFlagsForExternalMemory);
1231 const double kMinStepSize = 5;
1232 const double kMaxStepSize = 10;
1233 const double ms_step = Min(
1236 static_cast<double>(isolate()->isolate_data()->external_memory_) /
1237 isolate()->isolate_data()->external_memory_limit_ *
1239 const double deadline = MonotonicallyIncreasingTimeInMs() + ms_step;
1241 current_gc_callback_flags_ =
static_cast<GCCallbackFlags
>(
1242 current_gc_callback_flags_ | kGCCallbackFlagsForExternalMemory);
1243 incremental_marking()->AdvanceIncrementalMarking(
1244 deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
1248 void Heap::EnsureFillerObjectAtTop() {
1253 Address to_top = new_space_->top();
1254 Page* page = Page::FromAddress(to_top - kPointerSize);
1255 if (page->Contains(to_top)) {
1256 int remaining_in_page =
static_cast<int>(page->area_end() - to_top);
1257 CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo);
1261 bool Heap::CollectGarbage(AllocationSpace space,
1262 GarbageCollectionReason gc_reason,
1263 const v8::GCCallbackFlags gc_callback_flags) {
1264 const char* collector_reason =
nullptr;
1265 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
1267 if (!CanExpandOldGeneration(new_space()->Capacity())) {
1268 InvokeNearHeapLimitCallback();
1272 isolate()->global_handles()->InvokeSecondPassPhantomCallbacks();
1275 VMState<GC> state(isolate());
1277 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT 1282 if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
1283 allocation_timeout_ = Max(6, NextAllocationTimeout(allocation_timeout_));
1287 EnsureFillerObjectAtTop();
1289 if (IsYoungGenerationCollector(collector) &&
1290 !incremental_marking()->IsStopped()) {
1291 if (FLAG_trace_incremental_marking) {
1292 isolate()->PrintWithTimestamp(
1293 "[IncrementalMarking] Scavenge during marking.\n");
1297 bool next_gc_likely_to_collect_more =
false;
1298 size_t committed_memory_before = 0;
1300 if (collector == MARK_COMPACTOR) {
1301 committed_memory_before = CommittedOldGenerationMemory();
1305 tracer()->Start(collector, gc_reason, collector_reason);
1306 DCHECK(AllowHeapAllocation::IsAllowed());
1307 DisallowHeapAllocation no_allocation_during_gc;
1308 GarbageCollectionPrologue();
1311 TimedHistogram* gc_type_timer = GCTypeTimer(collector);
1312 TimedHistogramScope histogram_timer_scope(gc_type_timer, isolate_);
1313 TRACE_EVENT0(
"v8", gc_type_timer->name());
1315 TimedHistogram* gc_type_priority_timer = GCTypePriorityTimer(collector);
1316 OptionalTimedHistogramScopeMode mode =
1317 isolate_->IsMemorySavingsModeActive()
1318 ? OptionalTimedHistogramScopeMode::DONT_TAKE_TIME
1319 : OptionalTimedHistogramScopeMode::TAKE_TIME;
1320 OptionalTimedHistogramScope histogram_timer_priority_scope(
1321 gc_type_priority_timer, isolate_, mode);
1323 next_gc_likely_to_collect_more =
1324 PerformGarbageCollection(collector, gc_callback_flags);
1325 if (collector == MARK_COMPACTOR || collector == SCAVENGER) {
1326 tracer()->RecordGCPhasesHistograms(gc_type_timer);
1330 GarbageCollectionEpilogue();
1331 if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
1332 isolate()->CheckDetachedContextsAfterGC();
1335 if (collector == MARK_COMPACTOR) {
1336 size_t committed_memory_after = CommittedOldGenerationMemory();
1337 size_t used_memory_after = OldGenerationSizeOfObjects();
1338 MemoryReducer::Event event;
1339 event.type = MemoryReducer::kMarkCompact;
1340 event.time_ms = MonotonicallyIncreasingTimeInMs();
1345 event.next_gc_likely_to_collect_more =
1346 (committed_memory_before > committed_memory_after + MB) ||
1347 HasHighFragmentation(used_memory_after, committed_memory_after) ||
1348 (detached_contexts()->length() > 0);
1349 event.committed_memory = committed_memory_after;
1350 if (deserialization_complete_) {
1351 memory_reducer_->NotifyMarkCompact(event);
1355 tracer()->Stop(collector);
1358 if (collector == MARK_COMPACTOR &&
1359 (gc_callback_flags & (kGCCallbackFlagForced |
1360 kGCCallbackFlagCollectAllAvailableGarbage)) != 0) {
1361 isolate()->CountUsage(v8::Isolate::kForcedGC);
1366 if (IsYoungGenerationCollector(collector)) {
1367 StartIncrementalMarkingIfAllocationLimitIsReached(
1368 GCFlagsForIncrementalMarking(),
1369 kGCCallbackScheduleIdleGarbageCollection);
1372 return next_gc_likely_to_collect_more;
1376 int Heap::NotifyContextDisposed(
bool dependant_context) {
1377 if (!dependant_context) {
1378 tracer()->ResetSurvivalEvents();
1379 old_generation_size_configured_ =
false;
1380 MemoryReducer::Event event;
1381 event.type = MemoryReducer::kPossibleGarbage;
1382 event.time_ms = MonotonicallyIncreasingTimeInMs();
1383 memory_reducer_->NotifyPossibleGarbage(event);
1385 isolate()->AbortConcurrentOptimization(BlockingBehavior::kDontBlock);
1387 number_of_disposed_maps_ = retained_maps()->length();
1388 tracer()->AddContextDisposalTime(MonotonicallyIncreasingTimeInMs());
1389 return ++contexts_disposed_;
1392 void Heap::StartIncrementalMarking(
int gc_flags,
1393 GarbageCollectionReason gc_reason,
1394 GCCallbackFlags gc_callback_flags) {
1395 DCHECK(incremental_marking()->IsStopped());
1396 set_current_gc_flags(gc_flags);
1397 current_gc_callback_flags_ = gc_callback_flags;
1398 incremental_marking()->Start(gc_reason);
1401 void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
1402 int gc_flags,
const GCCallbackFlags gc_callback_flags) {
1403 if (incremental_marking()->IsStopped()) {
1404 IncrementalMarkingLimit reached_limit = IncrementalMarkingLimitReached();
1405 if (reached_limit == IncrementalMarkingLimit::kSoftLimit) {
1406 incremental_marking()->incremental_marking_job()->ScheduleTask(
this);
1407 }
else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
1408 StartIncrementalMarking(gc_flags,
1409 GarbageCollectionReason::kAllocationLimit,
1415 void Heap::StartIdleIncrementalMarking(
1416 GarbageCollectionReason gc_reason,
1417 const GCCallbackFlags gc_callback_flags) {
1418 gc_idle_time_handler_->ResetNoProgressCounter();
1419 StartIncrementalMarking(kReduceMemoryFootprintMask, gc_reason,
1423 void Heap::MoveElements(FixedArray array,
int dst_index,
int src_index,
int len,
1424 WriteBarrierMode mode) {
1425 if (len == 0)
return;
1427 DCHECK(array->map() != ReadOnlyRoots(
this).fixed_cow_array_map());
1428 ObjectSlot dst = array->RawFieldOfElementAt(dst_index);
1429 ObjectSlot src = array->RawFieldOfElementAt(src_index);
1430 if (FLAG_concurrent_marking && incremental_marking()->IsMarking()) {
1432 for (
int i = 0;
i < len;
i++) {
1433 dst.Relaxed_Store(src.Relaxed_Load());
1441 for (
int i = 0;
i < len;
i++) {
1442 dst.Relaxed_Store(src.Relaxed_Load());
1448 MemMove(dst.ToVoidPtr(), src.ToVoidPtr(), len * kPointerSize);
1450 if (mode == SKIP_WRITE_BARRIER)
return;
1451 FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(
this, array, dst_index, len);
1456 class StringTableVerifier :
public ObjectVisitor {
1458 explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
1460 void VisitPointers(HeapObject* host, ObjectSlot start,
1461 ObjectSlot end)
override {
1463 for (ObjectSlot p = start; p < end; ++p) {
1464 DCHECK(!HasWeakHeapObjectTag(*p));
1465 if ((*p)->IsHeapObject()) {
1466 HeapObject*
object = HeapObject::cast(*p);
1468 CHECK(object->IsTheHole(isolate_) ||
object->IsUndefined(isolate_) ||
1469 object->IsInternalizedString());
1473 void VisitPointers(HeapObject* host, MaybeObjectSlot start,
1474 MaybeObjectSlot end)
override {
1482 static void VerifyStringTable(Isolate* isolate) {
1483 StringTableVerifier verifier(isolate);
1484 isolate->heap()->string_table()->IterateElements(&verifier);
1486 #endif // VERIFY_HEAP 1488 bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
1489 bool gc_performed =
true;
1491 static const int kThreshold = 20;
1492 while (gc_performed && counter++ < kThreshold) {
1493 gc_performed =
false;
1494 for (
int space = FIRST_SPACE;
1495 space < SerializerDeserializer::kNumberOfSpaces; space++) {
1496 Reservation* reservation = &reservations[space];
1497 DCHECK_LE(1, reservation->size());
1498 if (reservation->at(0).size == 0) {
1499 DCHECK_EQ(1, reservation->size());
1502 bool perform_gc =
false;
1503 if (space == MAP_SPACE) {
1506 DCHECK_LE(reservation->size(), 2);
1507 int reserved_size = 0;
1508 for (
const Chunk& c : *reservation) reserved_size += c.size;
1509 DCHECK_EQ(0, reserved_size % Map::kSize);
1510 int num_maps = reserved_size / Map::kSize;
1511 for (
int i = 0;
i < num_maps;
i++) {
1513 AllocationResult allocation = map_space()->AllocateRawUnaligned(
1514 Map::kSize, PagedSpace::IGNORE_SKIP_LIST);
1515 HeapObject* free_space =
nullptr;
1516 if (allocation.To(&free_space)) {
1519 Address free_space_address = free_space->address();
1520 CreateFillerObjectAt(free_space_address, Map::kSize,
1521 ClearRecordedSlots::kNo);
1522 maps->push_back(free_space_address);
1528 }
else if (space == LO_SPACE) {
1530 DCHECK_LE(reservation->size(), 2);
1531 int reserved_size = 0;
1532 for (
const Chunk& c : *reservation) reserved_size += c.size;
1533 perform_gc = !CanExpandOldGeneration(reserved_size);
1535 for (
auto& chunk : *reservation) {
1536 AllocationResult allocation;
1537 int size = chunk.size;
1538 DCHECK_LE(static_cast<size_t>(size),
1539 MemoryChunkLayout::AllocatableMemoryInMemoryChunk(
1540 static_cast<AllocationSpace>(space)));
1541 if (space == NEW_SPACE) {
1542 allocation = new_space()->AllocateRawUnaligned(size);
1545 allocation = paged_space(space)->AllocateRawUnaligned(
1546 size, PagedSpace::IGNORE_SKIP_LIST);
1548 HeapObject* free_space =
nullptr;
1549 if (allocation.To(&free_space)) {
1552 Address free_space_address = free_space->address();
1553 CreateFillerObjectAt(free_space_address, size,
1554 ClearRecordedSlots::kNo);
1555 DCHECK_GT(SerializerDeserializer::kNumberOfPreallocatedSpaces,
1557 chunk.start = free_space_address;
1558 chunk.end = free_space_address + size;
1569 if (!deserialization_complete_) {
1570 V8::FatalProcessOutOfMemory(
1571 isolate(),
"insufficient memory to create an Isolate");
1573 if (space == NEW_SPACE) {
1574 CollectGarbage(NEW_SPACE, GarbageCollectionReason::kDeserializer);
1577 CollectAllGarbage(kReduceMemoryFootprintMask,
1578 GarbageCollectionReason::kDeserializer);
1580 CollectAllGarbage(kNoGCFlags,
1581 GarbageCollectionReason::kDeserializer);
1584 gc_performed =
true;
1590 return !gc_performed;
1594 void Heap::EnsureFromSpaceIsCommitted() {
1595 if (new_space_->CommitFromSpaceIfNeeded())
return;
1599 FatalProcessOutOfMemory(
"Committing semi space failed.");
1603 void Heap::UpdateSurvivalStatistics(
int start_new_space_size) {
1604 if (start_new_space_size == 0)
return;
1606 promotion_ratio_ = (
static_cast<double>(promoted_objects_size_) /
1607 static_cast<double>(start_new_space_size) * 100);
1609 if (previous_semi_space_copied_object_size_ > 0) {
1611 (
static_cast<double>(promoted_objects_size_) /
1612 static_cast<double>(previous_semi_space_copied_object_size_) * 100);
1614 promotion_rate_ = 0;
1617 semi_space_copied_rate_ =
1618 (
static_cast<double>(semi_space_copied_object_size_) /
1619 static_cast<double>(start_new_space_size) * 100);
1621 double survival_rate = promotion_ratio_ + semi_space_copied_rate_;
1622 tracer()->AddSurvivalRatio(survival_rate);
1625 bool Heap::PerformGarbageCollection(
1626 GarbageCollector collector,
const v8::GCCallbackFlags gc_callback_flags) {
1627 int freed_global_handles = 0;
1629 if (!IsYoungGenerationCollector(collector)) {
1630 PROFILE(isolate_, CodeMovingGCEvent());
1634 if (FLAG_verify_heap) {
1635 VerifyStringTable(this->isolate());
1640 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
1643 GCCallbacksScope scope(
this);
1646 EmbedderStackStateScope embedder_scope(
1647 local_embedder_heap_tracer(),
1648 EmbedderHeapTracer::EmbedderStackState::kUnknown);
1649 if (scope.CheckReenter()) {
1650 AllowHeapAllocation allow_allocation;
1651 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
1652 VMState<EXTERNAL> state(isolate_);
1653 HandleScope handle_scope(isolate_);
1654 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
1658 EnsureFromSpaceIsCommitted();
1660 size_t start_new_space_size = Heap::new_space()->Size();
1663 Heap::SkipStoreBufferScope skip_store_buffer_scope(store_buffer_);
1665 switch (collector) {
1666 case MARK_COMPACTOR:
1667 UpdateOldGenerationAllocationCounter();
1670 old_generation_size_configured_ =
true;
1674 old_generation_allocation_counter_at_last_gc_ +=
1675 static_cast<size_t>(promoted_objects_size_);
1676 old_generation_size_at_last_gc_ = OldGenerationSizeOfObjects();
1678 case MINOR_MARK_COMPACTOR:
1682 if ((fast_promotion_mode_ &&
1683 CanExpandOldGeneration(new_space()->Size()))) {
1684 tracer()->NotifyYoungGenerationHandling(
1685 YoungGenerationHandling::kFastPromotionDuringScavenge);
1686 EvacuateYoungGeneration();
1688 tracer()->NotifyYoungGenerationHandling(
1689 YoungGenerationHandling::kRegularScavenge);
1696 ProcessPretenuringFeedback();
1699 UpdateSurvivalStatistics(static_cast<int>(start_new_space_size));
1700 ConfigureInitialOldGenerationSize();
1702 if (collector != MARK_COMPACTOR) {
1705 incremental_marking()->UpdateMarkedBytesAfterScavenge(
1706 start_new_space_size - SurvivedNewSpaceObjectSize());
1709 if (!fast_promotion_mode_ || collector == MARK_COMPACTOR) {
1710 ComputeFastPromotionMode();
1713 isolate_->counters()->objs_since_last_young()->Set(0);
1716 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
1719 freed_global_handles =
1720 isolate_->global_handles()->InvokeFirstPassWeakCallbacks();
1723 if (collector == MARK_COMPACTOR) {
1724 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EMBEDDER_TRACING_EPILOGUE);
1730 local_embedder_heap_tracer()->TraceEpilogue();
1734 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
1735 gc_post_processing_depth_++;
1737 AllowHeapAllocation allow_allocation;
1738 freed_global_handles +=
1739 isolate_->global_handles()->PostGarbageCollectionProcessing(
1740 collector, gc_callback_flags);
1742 gc_post_processing_depth_--;
1745 isolate_->eternal_handles()->PostGarbageCollectionProcessing();
1748 Relocatable::PostGarbageCollectionProcessing(isolate_);
1750 double gc_speed = tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond();
1751 double mutator_speed =
1752 tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond();
1753 size_t old_gen_size = OldGenerationSizeOfObjects();
1754 if (collector == MARK_COMPACTOR) {
1756 isolate()->isolate_data()->external_memory_at_last_mark_compact_ =
1757 isolate()->isolate_data()->external_memory_;
1758 isolate()->isolate_data()->external_memory_limit_ =
1759 isolate()->isolate_data()->external_memory_ +
1760 kExternalAllocationSoftLimit;
1763 heap_controller()->MaxGrowingFactor(max_old_generation_size_);
1764 size_t new_limit = heap_controller()->CalculateAllocationLimit(
1765 old_gen_size, max_old_generation_size_, max_factor, gc_speed,
1766 mutator_speed, new_space()->Capacity(), CurrentHeapGrowingMode());
1767 old_generation_allocation_limit_ = new_limit;
1769 CheckIneffectiveMarkCompact(
1770 old_gen_size, tracer()->AverageMarkCompactMutatorUtilization());
1771 }
else if (HasLowYoungGenerationAllocationRate() &&
1772 old_generation_size_configured_) {
1774 heap_controller()->MaxGrowingFactor(max_old_generation_size_);
1775 size_t new_limit = heap_controller()->CalculateAllocationLimit(
1776 old_gen_size, max_old_generation_size_, max_factor, gc_speed,
1777 mutator_speed, new_space()->Capacity(), CurrentHeapGrowingMode());
1778 if (new_limit < old_generation_allocation_limit_) {
1779 old_generation_allocation_limit_ = new_limit;
1784 GCCallbacksScope scope(
this);
1785 if (scope.CheckReenter()) {
1786 AllowHeapAllocation allow_allocation;
1787 TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
1788 VMState<EXTERNAL> state(isolate_);
1789 HandleScope handle_scope(isolate_);
1790 CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
1795 if (FLAG_verify_heap) {
1796 VerifyStringTable(this->isolate());
1800 return freed_global_handles > 0;
1804 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
1805 RuntimeCallTimerScope runtime_timer(
1806 isolate(), RuntimeCallCounterId::kGCPrologueCallback);
1807 for (
const GCCallbackTuple& info : gc_prologue_callbacks_) {
1808 if (gc_type & info.gc_type) {
1809 v8::Isolate* isolate =
reinterpret_cast<v8::Isolate*
>(this->isolate());
1810 info.callback(isolate, gc_type, flags, info.data);
1815 void Heap::CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags) {
1816 RuntimeCallTimerScope runtime_timer(
1817 isolate(), RuntimeCallCounterId::kGCEpilogueCallback);
1818 for (
const GCCallbackTuple& info : gc_epilogue_callbacks_) {
1819 if (gc_type & info.gc_type) {
1820 v8::Isolate* isolate =
reinterpret_cast<v8::Isolate*
>(this->isolate());
1821 info.callback(isolate, gc_type, flags, info.data);
1827 void Heap::MarkCompact() {
1828 PauseAllocationObserversScope pause_observers(
this);
1830 SetGCState(MARK_COMPACT);
1832 LOG(isolate_, ResourceEvent(
"markcompact",
"begin"));
1834 uint64_t size_of_objects_before_gc = SizeOfObjects();
1836 CodeSpaceMemoryModificationScope code_modifcation(
this);
1838 mark_compact_collector()->Prepare();
1842 MarkCompactPrologue();
1844 mark_compact_collector()->CollectGarbage();
1846 LOG(isolate_, ResourceEvent(
"markcompact",
"end"));
1848 MarkCompactEpilogue();
1850 if (FLAG_allocation_site_pretenuring) {
1851 EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
1855 void Heap::MinorMarkCompact() {
1856 #ifdef ENABLE_MINOR_MC 1857 DCHECK(FLAG_minor_mc);
1859 PauseAllocationObserversScope pause_observers(
this);
1860 SetGCState(MINOR_MARK_COMPACT);
1861 LOG(isolate_, ResourceEvent(
"MinorMarkCompact",
"begin"));
1863 TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC);
1864 AlwaysAllocateScope always_allocate(isolate());
1865 IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
1866 incremental_marking());
1867 ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
1869 minor_mark_compact_collector()->CollectGarbage();
1871 LOG(isolate_, ResourceEvent(
"MinorMarkCompact",
"end"));
1872 SetGCState(NOT_IN_GC);
1875 #endif // ENABLE_MINOR_MC 1878 void Heap::MarkCompactEpilogue() {
1879 TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE);
1880 SetGCState(NOT_IN_GC);
1882 isolate_->counters()->objs_since_last_full()->Set(0);
1884 incremental_marking()->Epilogue();
1886 DCHECK(incremental_marking()->IsStopped());
1890 void Heap::MarkCompactPrologue() {
1891 TRACE_GC(tracer(), GCTracer::Scope::MC_PROLOGUE);
1892 isolate_->descriptor_lookup_cache()->Clear();
1893 RegExpResultsCache::Clear(string_split_cache());
1894 RegExpResultsCache::Clear(regexp_multiple_cache());
1896 isolate_->compilation_cache()->MarkCompactPrologue();
1898 FlushNumberStringCache();
1902 void Heap::CheckNewSpaceExpansionCriteria() {
1903 if (FLAG_experimental_new_space_growth_heuristic) {
1904 if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() &&
1905 survived_last_scavenge_ * 100 / new_space_->TotalCapacity() >= 10) {
1909 survived_since_last_expansion_ = 0;
1911 }
else if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() &&
1912 survived_since_last_expansion_ > new_space_->TotalCapacity()) {
1916 survived_since_last_expansion_ = 0;
1920 void Heap::EvacuateYoungGeneration() {
1921 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_FAST_PROMOTE);
1922 base::MutexGuard guard(relocation_mutex());
1923 ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
1924 if (!FLAG_concurrent_marking) {
1925 DCHECK(fast_promotion_mode_);
1926 DCHECK(CanExpandOldGeneration(new_space()->Size()));
1929 mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
1931 SetGCState(SCAVENGE);
1932 LOG(isolate_, ResourceEvent(
"scavenge",
"begin"));
1935 PageRange range(new_space()->first_allocatable_address(), new_space()->top());
1936 for (
auto it = range.begin(); it != range.end();) {
1937 Page* p = (*++it)->prev_page();
1938 new_space()->from_space().RemovePage(p);
1939 Page::ConvertNewToOld(p);
1940 if (incremental_marking()->IsMarking())
1941 mark_compact_collector()->RecordLiveSlotsOnPage(p);
1945 if (!new_space()->Rebalance()) {
1946 FatalProcessOutOfMemory(
"NewSpace::Rebalance");
1948 new_space()->ResetLinearAllocationArea();
1949 new_space()->set_age_mark(new_space()->top());
1952 external_string_table_.PromoteAllNewSpaceStrings();
1955 IncrementYoungSurvivorsCounter(new_space()->Size());
1956 IncrementPromotedObjectsSize(new_space()->Size());
1957 IncrementSemiSpaceCopiedObjectSize(0);
1959 LOG(isolate_, ResourceEvent(
"scavenge",
"end"));
1960 SetGCState(NOT_IN_GC);
1963 void Heap::Scavenge() {
1964 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
1965 base::MutexGuard guard(relocation_mutex());
1966 ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
1970 AlwaysAllocateScope scope(isolate());
1974 PauseAllocationObserversScope pause_observers(
this);
1975 IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
1976 incremental_marking());
1979 mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
1981 SetGCState(SCAVENGE);
1985 new_space()->Flip();
1986 new_space()->ResetLinearAllocationArea();
1990 new_lo_space()->Flip();
1993 LOG(isolate_, ResourceEvent(
"scavenge",
"begin"));
1995 scavenger_collector_->CollectGarbage();
1997 LOG(isolate_, ResourceEvent(
"scavenge",
"end"));
1999 SetGCState(NOT_IN_GC);
2002 void Heap::ComputeFastPromotionMode() {
2003 const size_t survived_in_new_space =
2004 survived_last_scavenge_ * 100 / new_space_->Capacity();
2005 fast_promotion_mode_ =
2006 !FLAG_optimize_for_size && FLAG_fast_promotion_new_space &&
2007 !ShouldReduceMemory() && new_space_->IsAtMaximumCapacity() &&
2008 survived_in_new_space >= kMinPromotedPercentForFastPromotionMode;
2009 if (FLAG_trace_gc_verbose && !FLAG_trace_gc_ignore_scavenger) {
2011 isolate(),
"Fast promotion mode: %s survival rate: %" PRIuS
"%%\n",
2012 fast_promotion_mode_ ?
"true" :
"false", survived_in_new_space);
2016 void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk) {
2017 if (unprotected_memory_chunks_registry_enabled_) {
2018 base::MutexGuard guard(&unprotected_memory_chunks_mutex_);
2019 if (unprotected_memory_chunks_.insert(chunk).second) {
2020 chunk->SetReadAndWritable();
2025 void Heap::UnprotectAndRegisterMemoryChunk(HeapObject*
object) {
2026 UnprotectAndRegisterMemoryChunk(MemoryChunk::FromAddress(object->address()));
2029 void Heap::UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk) {
2030 unprotected_memory_chunks_.erase(chunk);
2033 void Heap::ProtectUnprotectedMemoryChunks() {
2034 DCHECK(unprotected_memory_chunks_registry_enabled_);
2035 for (
auto chunk = unprotected_memory_chunks_.begin();
2036 chunk != unprotected_memory_chunks_.end(); chunk++) {
2037 CHECK(memory_allocator()->IsMemoryChunkExecutable(*chunk));
2038 (*chunk)->SetReadAndExecutable();
2040 unprotected_memory_chunks_.clear();
2043 bool Heap::ExternalStringTable::Contains(String
string) {
2044 for (
size_t i = 0;
i < new_space_strings_.size(); ++
i) {
2045 if (new_space_strings_[
i] ==
string)
return true;
2047 for (
size_t i = 0;
i < old_space_strings_.size(); ++
i) {
2048 if (old_space_strings_[
i] ==
string)
return true;
2053 String Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
2055 MapWord first_word = HeapObject::cast(*p)->map_word();
2057 if (!first_word.IsForwardingAddress()) {
2059 String
string = String::cast(*p);
2060 if (!string->IsExternalString()) {
2062 DCHECK(string->IsThinString());
2065 heap->FinalizeExternalString(
string);
2070 String new_string = String::cast(first_word.ToForwardingAddress());
2071 if (new_string->IsThinString()) {
2074 }
else if (new_string->IsExternalString()) {
2075 MemoryChunk::MoveExternalBackingStoreBytes(
2076 ExternalBackingStoreType::kExternalString,
2077 Page::FromAddress(reinterpret_cast<Address>(*p)),
2078 Page::FromHeapObject(new_string),
2079 ExternalString::cast(new_string)->ExternalPayloadSize());
2084 return new_string->IsExternalString() ? new_string : String();
2087 void Heap::ExternalStringTable::VerifyNewSpace() {
2089 std::set<String> visited_map;
2090 std::map<MemoryChunk*, size_t> size_map;
2091 ExternalBackingStoreType type = ExternalBackingStoreType::kExternalString;
2092 for (
size_t i = 0;
i < new_space_strings_.size(); ++
i) {
2093 String obj = String::cast(new_space_strings_[
i]);
2094 MemoryChunk* mc = MemoryChunk::FromHeapObject(obj);
2095 DCHECK(mc->InNewSpace());
2096 DCHECK(heap_->InNewSpace(obj));
2097 DCHECK(!obj->IsTheHole(heap_->isolate()));
2098 DCHECK(obj->IsExternalString());
2100 DCHECK_EQ(0, visited_map.count(obj));
2101 visited_map.insert(obj);
2102 size_map[mc] += ExternalString::cast(obj)->ExternalPayloadSize();
2104 for (std::map<MemoryChunk*, size_t>::iterator it = size_map.begin();
2105 it != size_map.end(); it++)
2106 DCHECK_EQ(it->first->ExternalBackingStoreBytes(type), it->second);
2110 void Heap::ExternalStringTable::Verify() {
2112 std::set<String> visited_map;
2113 std::map<MemoryChunk*, size_t> size_map;
2114 ExternalBackingStoreType type = ExternalBackingStoreType::kExternalString;
2116 for (
size_t i = 0;
i < old_space_strings_.size(); ++
i) {
2117 String obj = String::cast(old_space_strings_[
i]);
2118 MemoryChunk* mc = MemoryChunk::FromHeapObject(obj);
2119 DCHECK(!mc->InNewSpace());
2120 DCHECK(!heap_->InNewSpace(obj));
2121 DCHECK(!obj->IsTheHole(heap_->isolate()));
2122 DCHECK(obj->IsExternalString());
2124 DCHECK_EQ(0, visited_map.count(obj));
2125 visited_map.insert(obj);
2126 size_map[mc] += ExternalString::cast(obj)->ExternalPayloadSize();
2128 for (std::map<MemoryChunk*, size_t>::iterator it = size_map.begin();
2129 it != size_map.end(); it++)
2130 DCHECK_EQ(it->first->ExternalBackingStoreBytes(type), it->second);
2134 void Heap::ExternalStringTable::UpdateNewSpaceReferences(
2135 Heap::ExternalStringTableUpdaterCallback updater_func) {
2136 if (new_space_strings_.empty())
return;
2138 ObjectSlot start(new_space_strings_.data());
2139 ObjectSlot end(new_space_strings_.data() + new_space_strings_.size());
2140 ObjectSlot last = start;
2142 for (ObjectSlot p = start; p < end; ++p) {
2143 String target = updater_func(heap_, p);
2145 if (target.is_null())
continue;
2147 DCHECK(target->IsExternalString());
2149 if (InNewSpace(target)) {
2155 old_space_strings_.push_back(target);
2159 DCHECK(last <= end);
2160 new_space_strings_.resize(last - start);
2162 if (FLAG_verify_heap) {
2168 void Heap::ExternalStringTable::PromoteAllNewSpaceStrings() {
2169 old_space_strings_.reserve(old_space_strings_.size() +
2170 new_space_strings_.size());
2171 std::move(std::begin(new_space_strings_), std::end(new_space_strings_),
2172 std::back_inserter(old_space_strings_));
2173 new_space_strings_.clear();
2176 void Heap::ExternalStringTable::IterateNewSpaceStrings(RootVisitor* v) {
2177 if (!new_space_strings_.empty()) {
2178 v->VisitRootPointers(
2179 Root::kExternalStringsTable,
nullptr,
2180 ObjectSlot(new_space_strings_.data()),
2181 ObjectSlot(new_space_strings_.data() + new_space_strings_.size()));
2185 void Heap::ExternalStringTable::IterateAll(RootVisitor* v) {
2186 IterateNewSpaceStrings(v);
2187 if (!old_space_strings_.empty()) {
2188 v->VisitRootPointers(
2189 Root::kExternalStringsTable,
nullptr,
2190 ObjectSlot(old_space_strings_.data()),
2191 ObjectSlot(old_space_strings_.data() + old_space_strings_.size()));
2195 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
2196 ExternalStringTableUpdaterCallback updater_func) {
2197 external_string_table_.UpdateNewSpaceReferences(updater_func);
2200 void Heap::ExternalStringTable::UpdateReferences(
2201 Heap::ExternalStringTableUpdaterCallback updater_func) {
2202 if (old_space_strings_.size() > 0) {
2203 ObjectSlot start(old_space_strings_.data());
2204 ObjectSlot end(old_space_strings_.data() + old_space_strings_.size());
2205 for (ObjectSlot p = start; p < end; ++p) p.store(updater_func(heap_, p));
2208 UpdateNewSpaceReferences(updater_func);
2211 void Heap::UpdateReferencesInExternalStringTable(
2212 ExternalStringTableUpdaterCallback updater_func) {
2213 external_string_table_.UpdateReferences(updater_func);
2217 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
2218 ProcessNativeContexts(retainer);
2219 ProcessAllocationSites(retainer);
2223 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
2224 ProcessNativeContexts(retainer);
2228 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
2230 VisitWeakList2<Context>(
this, native_contexts_list(), retainer);
2232 set_native_contexts_list(head);
2236 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
2237 Object* allocation_site_obj =
2238 VisitWeakList<AllocationSite>(
this, allocation_sites_list(), retainer);
2239 set_allocation_sites_list(allocation_site_obj);
2242 void Heap::ProcessWeakListRoots(WeakObjectRetainer* retainer) {
2243 set_native_contexts_list(retainer->RetainAs(native_contexts_list()));
2244 set_allocation_sites_list(retainer->RetainAs(allocation_sites_list()));
2247 void Heap::ForeachAllocationSite(
2248 Object* list,
const std::function<
void(AllocationSite*)>& visitor) {
2249 DisallowHeapAllocation disallow_heap_allocation;
2250 Object* current = list;
2251 while (current->IsAllocationSite()) {
2252 AllocationSite* site = AllocationSite::cast(current);
2254 Object* current_nested = site->nested_site();
2255 while (current_nested->IsAllocationSite()) {
2256 AllocationSite* nested_site = AllocationSite::cast(current_nested);
2257 visitor(nested_site);
2258 current_nested = nested_site->nested_site();
2260 current = site->weak_next();
2264 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
2265 DisallowHeapAllocation no_allocation_scope;
2266 bool marked =
false;
2268 ForeachAllocationSite(allocation_sites_list(),
2269 [&marked, flag,
this](AllocationSite* site) {
2270 if (site->GetPretenureMode() == flag) {
2271 site->ResetPretenureDecision();
2272 site->set_deopt_dependent_code(
true);
2274 RemoveAllocationSitePretenuringFeedback(site);
2278 if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
2282 void Heap::EvaluateOldSpaceLocalPretenuring(
2283 uint64_t size_of_objects_before_gc) {
2284 uint64_t size_of_objects_after_gc = SizeOfObjects();
2285 double old_generation_survival_rate =
2286 (
static_cast<double>(size_of_objects_after_gc) * 100) /
2287 static_cast<double>(size_of_objects_before_gc);
2289 if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
2294 ResetAllAllocationSitesDependentCode(TENURED);
2295 if (FLAG_trace_pretenuring) {
2297 "Deopt all allocation sites dependent code due to low survival " 2298 "rate in the old generation %f\n",
2299 old_generation_survival_rate);
2305 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
2306 DisallowHeapAllocation no_allocation;
2309 class ExternalStringTableVisitorAdapter :
public RootVisitor {
2311 explicit ExternalStringTableVisitorAdapter(
2312 Isolate* isolate, v8::ExternalResourceVisitor* visitor)
2313 : isolate_(isolate), visitor_(visitor) {}
2314 void VisitRootPointers(Root root,
const char* description, ObjectSlot start,
2315 ObjectSlot end)
override {
2316 for (ObjectSlot p = start; p < end; ++p) {
2317 DCHECK((*p)->IsExternalString());
2318 visitor_->VisitExternalString(
2319 Utils::ToLocal(Handle<String>(String::cast(*p), isolate_)));
2325 v8::ExternalResourceVisitor* visitor_;
2326 } external_string_table_visitor(isolate(), visitor);
2328 external_string_table_.IterateAll(&external_string_table_visitor);
2331 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) ==
2333 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) ==
2335 #ifdef V8_HOST_ARCH_32_BIT 2336 STATIC_ASSERT((HeapNumber::kValueOffset & kDoubleAlignmentMask) !=
2341 int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) {
2342 switch (alignment) {
2345 case kDoubleAligned:
2346 case kDoubleUnaligned:
2347 return kDoubleSize - kPointerSize;
2355 int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
2356 if (alignment == kDoubleAligned && (address & kDoubleAlignmentMask) != 0)
2357 return kPointerSize;
2358 if (alignment == kDoubleUnaligned && (address & kDoubleAlignmentMask) == 0)
2359 return kDoubleSize - kPointerSize;
2364 HeapObject* Heap::PrecedeWithFiller(HeapObject*
object,
int filler_size) {
2365 CreateFillerObjectAt(object->address(), filler_size, ClearRecordedSlots::kNo);
2366 return HeapObject::FromAddress(object->address() + filler_size);
2370 HeapObject* Heap::AlignWithFiller(HeapObject*
object,
int object_size,
2371 int allocation_size,
2372 AllocationAlignment alignment) {
2373 int filler_size = allocation_size - object_size;
2374 DCHECK_LT(0, filler_size);
2375 int pre_filler = GetFillToAlign(object->address(), alignment);
2377 object = PrecedeWithFiller(
object, pre_filler);
2378 filler_size -= pre_filler;
2381 CreateFillerObjectAt(object->address() + object_size, filler_size,
2382 ClearRecordedSlots::kNo);
2386 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
2387 ArrayBufferTracker::RegisterNew(
this, buffer);
2391 void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
2392 ArrayBufferTracker::Unregister(
this, buffer);
2395 void Heap::ConfigureInitialOldGenerationSize() {
2396 if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
2397 old_generation_allocation_limit_ =
2398 Max(heap_controller()->MinimumAllocationLimitGrowingStep(
2399 CurrentHeapGrowingMode()),
2400 static_cast<size_t>(
2401 static_cast<double>(old_generation_allocation_limit_) *
2402 (tracer()->AverageSurvivalRatio() / 100)));
2406 void Heap::CreateJSEntryStub() {
2407 JSEntryStub stub(isolate(), StackFrame::ENTRY);
2408 set_js_entry_code(*stub.GetCode());
2412 void Heap::CreateJSConstructEntryStub() {
2413 JSEntryStub stub(isolate(), StackFrame::CONSTRUCT_ENTRY);
2414 set_js_construct_entry_code(*stub.GetCode());
2417 void Heap::CreateJSRunMicrotasksEntryStub() {
2418 JSEntryStub stub(isolate(), JSEntryStub::SpecialTarget::kRunMicrotasks);
2419 set_js_run_microtasks_entry_code(*stub.GetCode());
2422 void Heap::CreateFixedStubs() {
2427 HandleScope scope(isolate());
2430 CanonicalHandleScope canonical(isolate());
2439 Heap::CreateJSEntryStub();
2440 Heap::CreateJSConstructEntryStub();
2441 Heap::CreateJSRunMicrotasksEntryStub();
2444 void Heap::FlushNumberStringCache() {
2446 int len = number_string_cache()->length();
2447 for (
int i = 0;
i < len;
i++) {
2448 number_string_cache()->set_undefined(
i);
2452 HeapObject* Heap::CreateFillerObjectAt(Address addr,
int size,
2453 ClearRecordedSlots clear_slots_mode,
2454 ClearFreedMemoryMode clear_memory_mode) {
2455 if (size == 0)
return nullptr;
2456 HeapObject* filler = HeapObject::FromAddress(addr);
2457 if (size == kPointerSize) {
2458 filler->set_map_after_allocation(
2459 Map::unchecked_cast(isolate()->root(RootIndex::kOnePointerFillerMap)),
2460 SKIP_WRITE_BARRIER);
2461 }
else if (size == 2 * kPointerSize) {
2462 filler->set_map_after_allocation(
2463 Map::unchecked_cast(isolate()->root(RootIndex::kTwoPointerFillerMap)),
2464 SKIP_WRITE_BARRIER);
2465 if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
2466 Memory<Address>(addr + kPointerSize) =
2467 static_cast<Address>(kClearedFreeMemoryValue);
2470 DCHECK_GT(size, 2 * kPointerSize);
2471 filler->set_map_after_allocation(
2472 Map::unchecked_cast(isolate()->root(RootIndex::kFreeSpaceMap)),
2473 SKIP_WRITE_BARRIER);
2474 FreeSpace::cast(filler)->relaxed_write_size(size);
2475 if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
2476 memset(reinterpret_cast<void*>(addr + 2 * kPointerSize),
2477 kClearedFreeMemoryValue, size - 2 * kPointerSize);
2480 if (clear_slots_mode == ClearRecordedSlots::kYes) {
2481 ClearRecordedSlotRange(addr, addr + size);
2486 DCHECK((filler->map().is_null() && !deserialization_complete_) ||
2487 filler->map()->IsMap());
2492 bool Heap::CanMoveObjectStart(HeapObject*
object) {
2493 if (!FLAG_move_object_start)
return false;
2496 if (isolate()->heap_profiler()->is_sampling_allocations())
return false;
2498 Address address =
object->address();
2500 if (IsLargeObject(
object))
return false;
2503 return Page::FromAddress(address)->SweepingDone();
2506 bool Heap::IsImmovable(HeapObject*
object) {
2507 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
2508 return chunk->NeverEvacuate() || IsLargeObject(
object);
2511 bool Heap::IsLargeObject(HeapObject*
object) {
2512 return lo_space()->Contains(
object) || code_lo_space()->Contains(
object) ||
2513 new_lo_space()->Contains(
object);
2516 bool Heap::IsInYoungGeneration(HeapObject*
object) {
2517 if (MemoryChunk::FromHeapObject(
object)->IsInNewLargeObjectSpace()) {
2518 return !
object->map_word().IsForwardingAddress();
2520 return Heap::InNewSpace(
object);
2523 #ifdef ENABLE_SLOW_DCHECKS 2526 class LeftTrimmerVerifierRootVisitor :
public RootVisitor {
2528 explicit LeftTrimmerVerifierRootVisitor(FixedArrayBase to_check)
2529 : to_check_(to_check) {}
2531 void VisitRootPointers(Root root,
const char* description, ObjectSlot start,
2532 ObjectSlot end)
override {
2533 for (ObjectSlot p = start; p < end; ++p) {
2534 DCHECK_NE(*p, to_check_);
2539 FixedArrayBase to_check_;
2541 DISALLOW_COPY_AND_ASSIGN(LeftTrimmerVerifierRootVisitor);
2544 #endif // ENABLE_SLOW_DCHECKS 2547 bool MayContainRecordedSlots(HeapObject*
object) {
2549 if (MemoryChunk::FromHeapObject(
object)->InNewSpace())
return false;
2551 if (object->IsByteArray() ||
object->IsFixedDoubleArray())
return false;
2557 FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase
object,
2558 int elements_to_trim) {
2559 if (elements_to_trim == 0) {
2563 CHECK(!
object.is_null());
2564 DCHECK(CanMoveObjectStart(
object));
2567 DCHECK(object->IsFixedArray() ||
object->IsFixedDoubleArray());
2568 const int element_size =
object->IsFixedArray() ? kPointerSize : kDoubleSize;
2569 const int bytes_to_trim = elements_to_trim * element_size;
2570 Map map =
object->map();
2575 DCHECK(!IsLargeObject(
object));
2576 DCHECK(object->map() != ReadOnlyRoots(
this).fixed_cow_array_map());
2578 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
2579 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
2580 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
2582 const int len =
object->length();
2583 DCHECK(elements_to_trim <= len);
2586 Address old_start =
object->address();
2587 Address new_start = old_start + bytes_to_trim;
2589 if (incremental_marking()->IsMarking()) {
2590 incremental_marking()->NotifyLeftTrimming(
2591 object, HeapObject::FromAddress(new_start));
2597 HeapObject* filler =
2598 CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
2603 RELAXED_WRITE_FIELD(
object, bytes_to_trim, map);
2604 RELAXED_WRITE_FIELD(
object, bytes_to_trim + kPointerSize,
2605 Smi::FromInt(len - elements_to_trim));
2607 FixedArrayBase new_object =
2608 FixedArrayBase::cast(HeapObject::FromAddress(new_start));
2611 ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0));
2612 ClearRecordedSlot(new_object, HeapObject::RawField(
2613 new_object, FixedArrayBase::kLengthOffset));
2616 if (incremental_marking()->IsCompacting() &&
2617 MayContainRecordedSlots(new_object)) {
2620 MemoryChunk::FromHeapObject(new_object)
2621 ->MoveObjectWithInvalidatedSlots(filler, new_object);
2626 if (filler->Size() > FreeSpace::kSize) {
2627 MemsetPointer(HeapObject::RawField(filler, FreeSpace::kSize),
2628 ReadOnlyRoots(
this).undefined_value(),
2629 (filler->Size() - FreeSpace::kSize) / kPointerSize);
2633 OnMoveEvent(new_object,
object, new_object->Size());
2635 #ifdef ENABLE_SLOW_DCHECKS 2636 if (FLAG_enable_slow_asserts) {
2639 LeftTrimmerVerifierRootVisitor root_visitor(
object);
2640 ReadOnlyRoots(
this).Iterate(&root_visitor);
2641 IterateRoots(&root_visitor, VISIT_ALL);
2643 #endif // ENABLE_SLOW_DCHECKS 2648 void Heap::RightTrimFixedArray(FixedArrayBase
object,
int elements_to_trim) {
2649 const int len =
object->length();
2650 DCHECK_LE(elements_to_trim, len);
2651 DCHECK_GE(elements_to_trim, 0);
2654 DCHECK(!object->IsFixedTypedArrayBase());
2655 if (object->IsByteArray()) {
2656 int new_size = ByteArray::SizeFor(len - elements_to_trim);
2657 bytes_to_trim = ByteArray::SizeFor(len) - new_size;
2658 DCHECK_GE(bytes_to_trim, 0);
2659 }
else if (object->IsFixedArray()) {
2660 CHECK_NE(elements_to_trim, len);
2661 bytes_to_trim = elements_to_trim * kPointerSize;
2663 DCHECK(object->IsFixedDoubleArray());
2664 CHECK_NE(elements_to_trim, len);
2665 bytes_to_trim = elements_to_trim * kDoubleSize;
2668 CreateFillerForArray<FixedArrayBase>(object, elements_to_trim, bytes_to_trim);
2671 void Heap::RightTrimWeakFixedArray(WeakFixedArray*
object,
2672 int elements_to_trim) {
2676 DCHECK_EQ(gc_state(), MARK_COMPACT);
2677 CreateFillerForArray<WeakFixedArray*>(object, elements_to_trim,
2678 elements_to_trim * kPointerSize);
2681 template <
typename T>
2682 void Heap::CreateFillerForArray(T
object,
int elements_to_trim,
2683 int bytes_to_trim) {
2684 DCHECK(object->IsFixedArrayBase() ||
object->IsByteArray() ||
2685 object->IsWeakFixedArray());
2688 DCHECK(object->map() != ReadOnlyRoots(
this).fixed_cow_array_map());
2690 if (bytes_to_trim == 0) {
2691 DCHECK_EQ(elements_to_trim, 0);
2697 int old_size =
object->Size();
2698 Address old_end =
object->address() + old_size;
2699 Address new_end = old_end - bytes_to_trim;
2705 if (incremental_marking()->IsCompacting() &&
2706 MayContainRecordedSlots(
object)) {
2709 incremental_marking()->WhiteToGreyAndPush(
object);
2710 MemoryChunk::FromHeapObject(
object)->RegisterObjectWithInvalidatedSlots(
2718 if (!IsLargeObject(
object)) {
2719 HeapObject* filler =
2720 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes);
2721 DCHECK_NOT_NULL(filler);
2724 if (incremental_marking()->black_allocation() &&
2725 incremental_marking()->marking_state()->IsBlackOrGrey(filler)) {
2726 Page* page = Page::FromAddress(new_end);
2727 incremental_marking()->marking_state()->bitmap(page)->ClearRange(
2728 page->AddressToMarkbitIndex(new_end),
2729 page->AddressToMarkbitIndex(new_end + bytes_to_trim));
2736 object->synchronized_set_length(object->length() - elements_to_trim);
2740 for (
auto& tracker : allocation_trackers_) {
2741 tracker->UpdateObjectSizeEvent(object->address(),
object->Size());
2745 void Heap::MakeHeapIterable() {
2746 mark_compact_collector()->EnsureSweepingCompleted();
2750 static double ComputeMutatorUtilization(
double mutator_speed,
double gc_speed) {
2751 const double kMinMutatorUtilization = 0.0;
2752 const double kConservativeGcSpeedInBytesPerMillisecond = 200000;
2753 if (mutator_speed == 0)
return kMinMutatorUtilization;
2754 if (gc_speed == 0) gc_speed = kConservativeGcSpeedInBytesPerMillisecond;
2762 return gc_speed / (mutator_speed + gc_speed);
2766 double Heap::YoungGenerationMutatorUtilization() {
2767 double mutator_speed =
static_cast<double>(
2768 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond());
2770 tracer()->ScavengeSpeedInBytesPerMillisecond(kForSurvivedObjects);
2771 double result = ComputeMutatorUtilization(mutator_speed, gc_speed);
2772 if (FLAG_trace_mutator_utilization) {
2773 isolate()->PrintWithTimestamp(
2774 "Young generation mutator utilization = %.3f (" 2775 "mutator_speed=%.f, gc_speed=%.f)\n",
2776 result, mutator_speed, gc_speed);
2782 double Heap::OldGenerationMutatorUtilization() {
2783 double mutator_speed =
static_cast<double>(
2784 tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond());
2785 double gc_speed =
static_cast<double>(
2786 tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond());
2787 double result = ComputeMutatorUtilization(mutator_speed, gc_speed);
2788 if (FLAG_trace_mutator_utilization) {
2789 isolate()->PrintWithTimestamp(
2790 "Old generation mutator utilization = %.3f (" 2791 "mutator_speed=%.f, gc_speed=%.f)\n",
2792 result, mutator_speed, gc_speed);
2798 bool Heap::HasLowYoungGenerationAllocationRate() {
2799 const double high_mutator_utilization = 0.993;
2800 return YoungGenerationMutatorUtilization() > high_mutator_utilization;
2804 bool Heap::HasLowOldGenerationAllocationRate() {
2805 const double high_mutator_utilization = 0.993;
2806 return OldGenerationMutatorUtilization() > high_mutator_utilization;
2810 bool Heap::HasLowAllocationRate() {
2811 return HasLowYoungGenerationAllocationRate() &&
2812 HasLowOldGenerationAllocationRate();
2815 bool Heap::IsIneffectiveMarkCompact(
size_t old_generation_size,
2816 double mutator_utilization) {
2817 const double kHighHeapPercentage = 0.8;
2818 const double kLowMutatorUtilization = 0.4;
2819 return old_generation_size >=
2820 kHighHeapPercentage * max_old_generation_size_ &&
2821 mutator_utilization < kLowMutatorUtilization;
2824 void Heap::CheckIneffectiveMarkCompact(
size_t old_generation_size,
2825 double mutator_utilization) {
2826 const int kMaxConsecutiveIneffectiveMarkCompacts = 4;
2827 if (!FLAG_detect_ineffective_gcs_near_heap_limit)
return;
2828 if (!IsIneffectiveMarkCompact(old_generation_size, mutator_utilization)) {
2829 consecutive_ineffective_mark_compacts_ = 0;
2832 ++consecutive_ineffective_mark_compacts_;
2833 if (consecutive_ineffective_mark_compacts_ ==
2834 kMaxConsecutiveIneffectiveMarkCompacts) {
2835 if (InvokeNearHeapLimitCallback()) {
2837 consecutive_ineffective_mark_compacts_ = 0;
2840 FatalProcessOutOfMemory(
"Ineffective mark-compacts near heap limit");
2844 bool Heap::HasHighFragmentation() {
2845 size_t used = OldGenerationSizeOfObjects();
2846 size_t committed = CommittedOldGenerationMemory();
2847 return HasHighFragmentation(used, committed);
2850 bool Heap::HasHighFragmentation(
size_t used,
size_t committed) {
2851 const size_t kSlack = 16 * MB;
2854 DCHECK_GE(committed, used);
2855 return committed - used > used + kSlack;
2858 bool Heap::ShouldOptimizeForMemoryUsage() {
2859 const size_t kOldGenerationSlack = max_old_generation_size_ / 8;
2860 return FLAG_optimize_for_size || isolate()->IsIsolateInBackground() ||
2861 isolate()->IsMemorySavingsModeActive() || HighMemoryPressure() ||
2862 !CanExpandOldGeneration(kOldGenerationSlack);
2865 void Heap::ActivateMemoryReducerIfNeeded() {
2870 const int kMinCommittedMemory = 7 * Page::kPageSize;
2871 if (ms_count_ == 0 && CommittedMemory() > kMinCommittedMemory &&
2872 isolate()->IsIsolateInBackground()) {
2873 MemoryReducer::Event event;
2874 event.type = MemoryReducer::kPossibleGarbage;
2875 event.time_ms = MonotonicallyIncreasingTimeInMs();
2876 memory_reducer_->NotifyPossibleGarbage(event);
2880 void Heap::ReduceNewSpaceSize() {
2883 static const size_t kLowAllocationThroughput = 1000;
2884 const double allocation_throughput =
2885 tracer()->CurrentAllocationThroughputInBytesPerMillisecond();
2887 if (FLAG_predictable)
return;
2889 if (ShouldReduceMemory() ||
2890 ((allocation_throughput != 0) &&
2891 (allocation_throughput < kLowAllocationThroughput))) {
2892 new_space_->Shrink();
2893 UncommitFromSpace();
2897 void Heap::FinalizeIncrementalMarkingIfComplete(
2898 GarbageCollectionReason gc_reason) {
2899 if (incremental_marking()->IsMarking() &&
2900 (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
2901 (!incremental_marking()->finalize_marking_completed() &&
2902 mark_compact_collector()->marking_worklist()->IsEmpty() &&
2903 local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) {
2904 FinalizeIncrementalMarkingIncrementally(gc_reason);
2905 }
else if (incremental_marking()->IsComplete() ||
2906 (mark_compact_collector()->marking_worklist()->IsEmpty() &&
2907 local_embedder_heap_tracer()
2908 ->ShouldFinalizeIncrementalMarking())) {
2909 CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_);
2913 void Heap::FinalizeIncrementalMarkingAtomically(
2914 GarbageCollectionReason gc_reason) {
2915 DCHECK(!incremental_marking()->IsStopped());
2916 CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_);
2919 void Heap::FinalizeIncrementalMarkingIncrementally(
2920 GarbageCollectionReason gc_reason) {
2921 if (FLAG_trace_incremental_marking) {
2922 isolate()->PrintWithTimestamp(
2923 "[IncrementalMarking] (%s).\n",
2924 Heap::GarbageCollectionReasonToString(gc_reason));
2927 HistogramTimerScope incremental_marking_scope(
2928 isolate()->counters()->gc_incremental_marking_finalize());
2929 TRACE_EVENT0(
"v8",
"V8.GCIncrementalMarkingFinalize");
2930 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE);
2933 GCCallbacksScope scope(
this);
2934 if (scope.CheckReenter()) {
2935 AllowHeapAllocation allow_allocation;
2936 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
2937 VMState<EXTERNAL> state(isolate_);
2938 HandleScope handle_scope(isolate_);
2939 CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags);
2942 incremental_marking()->FinalizeIncrementally();
2944 GCCallbacksScope scope(
this);
2945 if (scope.CheckReenter()) {
2946 AllowHeapAllocation allow_allocation;
2947 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE);
2948 VMState<EXTERNAL> state(isolate_);
2949 HandleScope handle_scope(isolate_);
2950 CallGCEpilogueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags);
2955 void Heap::RegisterDeserializedObjectsForBlackAllocation(
2956 Reservation* reservations,
const std::vector<HeapObject*>& large_objects,
2957 const std::vector<Address>& maps) {
2961 if (!incremental_marking()->black_allocation())
return;
2965 IncrementalMarking::MarkingState* marking_state =
2966 incremental_marking()->marking_state();
2967 for (
int i = OLD_SPACE;
i < Serializer::kNumberOfSpaces;
i++) {
2968 const Heap::Reservation& res = reservations[
i];
2969 for (
auto& chunk : res) {
2970 Address addr = chunk.start;
2971 while (addr < chunk.end) {
2972 HeapObject* obj = HeapObject::FromAddress(addr);
2975 if (marking_state->IsBlack(obj)) {
2976 incremental_marking()->ProcessBlackAllocatedObject(obj);
2978 addr += obj->Size();
2984 for (HeapObject*
object : large_objects) {
2985 incremental_marking()->ProcessBlackAllocatedObject(
object);
2989 for (Address addr : maps) {
2990 incremental_marking()->ProcessBlackAllocatedObject(
2991 HeapObject::FromAddress(addr));
2995 void Heap::NotifyObjectLayoutChange(HeapObject*
object,
int size,
2996 const DisallowHeapAllocation&) {
2997 if (incremental_marking()->IsMarking()) {
2998 incremental_marking()->MarkBlackAndPush(
object);
2999 if (incremental_marking()->IsCompacting() &&
3000 MayContainRecordedSlots(
object)) {
3001 MemoryChunk::FromHeapObject(
object)->RegisterObjectWithInvalidatedSlots(
3006 if (FLAG_verify_heap) {
3007 DCHECK_NULL(pending_layout_change_object_);
3008 pending_layout_change_object_ = object;
3015 class SlotCollectingVisitor final :
public ObjectVisitor {
3017 void VisitPointers(HeapObject* host, ObjectSlot start,
3018 ObjectSlot end)
override {
3019 VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
3021 void VisitPointers(HeapObject* host, MaybeObjectSlot start,
3022 MaybeObjectSlot end)
final {
3023 for (MaybeObjectSlot p = start; p < end; ++p) {
3024 slots_.push_back(p);
3028 int number_of_slots() {
return static_cast<int>(slots_.size()); }
3030 MaybeObjectSlot slot(
int i) {
return slots_[
i]; }
3033 std::vector<MaybeObjectSlot> slots_;
3036 void Heap::VerifyObjectLayoutChange(HeapObject*
object, Map new_map) {
3037 if (!FLAG_verify_heap)
return;
3043 if (pending_layout_change_object_ ==
nullptr) {
3044 if (object->IsJSObject()) {
3045 DCHECK(!object->map()->TransitionRequiresSynchronizationWithGC(new_map));
3048 SlotCollectingVisitor old_visitor;
3049 object->IterateFast(&old_visitor);
3050 MapWord old_map_word =
object->map_word();
3052 object->set_map_word(MapWord::FromMap(new_map));
3053 SlotCollectingVisitor new_visitor;
3054 object->IterateFast(&new_visitor);
3056 object->set_map_word(old_map_word);
3057 DCHECK_EQ(new_visitor.number_of_slots(), old_visitor.number_of_slots());
3058 for (
int i = 0;
i < new_visitor.number_of_slots();
i++) {
3059 DCHECK(new_visitor.slot(
i) == old_visitor.slot(
i));
3063 DCHECK_EQ(pending_layout_change_object_,
object);
3064 pending_layout_change_object_ =
nullptr;
3069 GCIdleTimeHeapState Heap::ComputeHeapState() {
3070 GCIdleTimeHeapState heap_state;
3071 heap_state.contexts_disposed = contexts_disposed_;
3072 heap_state.contexts_disposal_rate =
3073 tracer()->ContextDisposalRateInMilliseconds();
3074 heap_state.size_of_objects =
static_cast<size_t>(SizeOfObjects());
3075 heap_state.incremental_marking_stopped = incremental_marking()->IsStopped();
3080 bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
3081 GCIdleTimeHeapState heap_state,
3082 double deadline_in_ms) {
3083 bool result =
false;
3084 switch (action.type) {
3088 case DO_INCREMENTAL_STEP: {
3089 const double remaining_idle_time_in_ms =
3090 incremental_marking()->AdvanceIncrementalMarking(
3091 deadline_in_ms, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
3093 if (remaining_idle_time_in_ms > 0.0) {
3094 FinalizeIncrementalMarkingIfComplete(
3095 GarbageCollectionReason::kFinalizeMarkingViaTask);
3097 result = incremental_marking()->IsStopped();
3101 DCHECK_LT(0, contexts_disposed_);
3102 HistogramTimerScope scope(isolate_->counters()->gc_context());
3103 TRACE_EVENT0(
"v8",
"V8.GCContext");
3104 CollectAllGarbage(kNoGCFlags, GarbageCollectionReason::kContextDisposal);
3114 void Heap::IdleNotificationEpilogue(GCIdleTimeAction action,
3115 GCIdleTimeHeapState heap_state,
3116 double start_ms,
double deadline_in_ms) {
3117 double idle_time_in_ms = deadline_in_ms - start_ms;
3118 double current_time = MonotonicallyIncreasingTimeInMs();
3119 last_idle_notification_time_ = current_time;
3120 double deadline_difference = deadline_in_ms - current_time;
3122 contexts_disposed_ = 0;
3124 if ((FLAG_trace_idle_notification && action.type > DO_NOTHING) ||
3125 FLAG_trace_idle_notification_verbose) {
3126 isolate_->PrintWithTimestamp(
3127 "Idle notification: requested idle time %.2f ms, used idle time %.2f " 3128 "ms, deadline usage %.2f ms [",
3129 idle_time_in_ms, idle_time_in_ms - deadline_difference,
3130 deadline_difference);
3133 if (FLAG_trace_idle_notification_verbose) {
3143 double Heap::MonotonicallyIncreasingTimeInMs() {
3144 return V8::GetCurrentPlatform()->MonotonicallyIncreasingTime() *
3145 static_cast<double>(base::Time::kMillisecondsPerSecond);
3149 bool Heap::IdleNotification(
int idle_time_in_ms) {
3150 return IdleNotification(
3151 V8::GetCurrentPlatform()->MonotonicallyIncreasingTime() +
3152 (static_cast<double>(idle_time_in_ms) /
3153 static_cast<double>(base::Time::kMillisecondsPerSecond)));
3157 bool Heap::IdleNotification(
double deadline_in_seconds) {
3158 CHECK(HasBeenSetUp());
3159 double deadline_in_ms =
3160 deadline_in_seconds *
3161 static_cast<double>(base::Time::kMillisecondsPerSecond);
3162 HistogramTimerScope idle_notification_scope(
3163 isolate_->counters()->gc_idle_notification());
3164 TRACE_EVENT0(
"v8",
"V8.GCIdleNotification");
3165 double start_ms = MonotonicallyIncreasingTimeInMs();
3166 double idle_time_in_ms = deadline_in_ms - start_ms;
3168 tracer()->SampleAllocation(start_ms, NewSpaceAllocationCounter(),
3169 OldGenerationAllocationCounter());
3171 GCIdleTimeHeapState heap_state = ComputeHeapState();
3173 GCIdleTimeAction action =
3174 gc_idle_time_handler_->Compute(idle_time_in_ms, heap_state);
3176 bool result = PerformIdleTimeAction(action, heap_state, deadline_in_ms);
3178 IdleNotificationEpilogue(action, heap_state, start_ms, deadline_in_ms);
3183 bool Heap::RecentIdleNotificationHappened() {
3184 return (last_idle_notification_time_ +
3185 GCIdleTimeHandler::kMaxScheduledIdleTime) >
3186 MonotonicallyIncreasingTimeInMs();
3198 void RunInternal()
override { heap_->CheckMemoryPressure(); }
3204 void Heap::CheckMemoryPressure() {
3205 if (HighMemoryPressure()) {
3207 isolate()->AbortConcurrentOptimization(BlockingBehavior::kDontBlock);
3209 MemoryPressureLevel memory_pressure_level = memory_pressure_level_;
3213 memory_pressure_level_ = MemoryPressureLevel::kNone;
3214 if (memory_pressure_level == MemoryPressureLevel::kCritical) {
3215 CollectGarbageOnMemoryPressure();
3216 }
else if (memory_pressure_level == MemoryPressureLevel::kModerate) {
3217 if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
3218 StartIncrementalMarking(kReduceMemoryFootprintMask,
3219 GarbageCollectionReason::kMemoryPressure);
3222 if (memory_reducer_) {
3223 MemoryReducer::Event event;
3224 event.type = MemoryReducer::kPossibleGarbage;
3225 event.time_ms = MonotonicallyIncreasingTimeInMs();
3226 memory_reducer_->NotifyPossibleGarbage(event);
3230 void Heap::CollectGarbageOnMemoryPressure() {
3231 const int kGarbageThresholdInBytes = 8 * MB;
3232 const double kGarbageThresholdAsFractionOfTotalMemory = 0.1;
3234 const double kMaxMemoryPressurePauseMs = 100;
3236 double start = MonotonicallyIncreasingTimeInMs();
3237 CollectAllGarbage(kReduceMemoryFootprintMask,
3238 GarbageCollectionReason::kMemoryPressure,
3239 kGCCallbackFlagCollectAllAvailableGarbage);
3240 EagerlyFreeExternalMemory();
3241 double end = MonotonicallyIncreasingTimeInMs();
3244 int64_t potential_garbage = (CommittedMemory() - SizeOfObjects()) +
3245 isolate()->isolate_data()->external_memory_;
3248 if (potential_garbage >= kGarbageThresholdInBytes &&
3249 potential_garbage >=
3250 CommittedMemory() * kGarbageThresholdAsFractionOfTotalMemory) {
3253 if (end - start < kMaxMemoryPressurePauseMs / 2) {
3254 CollectAllGarbage(kReduceMemoryFootprintMask,
3255 GarbageCollectionReason::kMemoryPressure,
3256 kGCCallbackFlagCollectAllAvailableGarbage);
3258 if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
3259 StartIncrementalMarking(kReduceMemoryFootprintMask,
3260 GarbageCollectionReason::kMemoryPressure);
3266 void Heap::MemoryPressureNotification(MemoryPressureLevel level,
3267 bool is_isolate_locked) {
3268 MemoryPressureLevel previous = memory_pressure_level_;
3269 memory_pressure_level_ = level;
3270 if ((previous != MemoryPressureLevel::kCritical &&
3271 level == MemoryPressureLevel::kCritical) ||
3272 (previous == MemoryPressureLevel::kNone &&
3273 level == MemoryPressureLevel::kModerate)) {
3274 if (is_isolate_locked) {
3275 CheckMemoryPressure();
3277 ExecutionAccess access(isolate());
3278 isolate()->stack_guard()->RequestGC();
3279 auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(
3280 reinterpret_cast<v8::Isolate*>(isolate()));
3281 taskrunner->PostTask(
3282 base::make_unique<MemoryPressureInterruptTask>(
this));
3287 void Heap::EagerlyFreeExternalMemory() {
3288 for (Page* page : *old_space()) {
3289 if (!page->SweepingDone()) {
3290 base::MutexGuard guard(page->mutex());
3291 if (!page->SweepingDone()) {
3292 ArrayBufferTracker::FreeDead(
3293 page, mark_compact_collector()->non_atomic_marking_state());
3297 memory_allocator()->unmapper()->EnsureUnmappingCompleted();
3300 void Heap::AddNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
3302 const size_t kMaxCallbacks = 100;
3303 CHECK_LT(near_heap_limit_callbacks_.size(), kMaxCallbacks);
3304 for (
auto callback_data : near_heap_limit_callbacks_) {
3305 CHECK_NE(callback_data.first, callback);
3307 near_heap_limit_callbacks_.push_back(std::make_pair(callback, data));
3310 void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
3311 size_t heap_limit) {
3312 for (
size_t i = 0;
i < near_heap_limit_callbacks_.size();
i++) {
3313 if (near_heap_limit_callbacks_[
i].first == callback) {
3314 near_heap_limit_callbacks_.erase(near_heap_limit_callbacks_.begin() +
i);
3316 RestoreHeapLimit(heap_limit);
3324 bool Heap::InvokeNearHeapLimitCallback() {
3325 if (near_heap_limit_callbacks_.size() > 0) {
3326 HandleScope scope(isolate());
3327 v8::NearHeapLimitCallback callback =
3328 near_heap_limit_callbacks_.back().first;
3329 void* data = near_heap_limit_callbacks_.back().second;
3330 size_t heap_limit = callback(data, max_old_generation_size_,
3331 initial_max_old_generation_size_);
3332 if (heap_limit > max_old_generation_size_) {
3333 max_old_generation_size_ = heap_limit;
3340 void Heap::CollectCodeStatistics() {
3341 TRACE_EVENT0(
"v8",
"Heap::CollectCodeStatistics");
3342 CodeStatistics::ResetCodeAndMetadataStatistics(isolate());
3345 CodeStatistics::CollectCodeStatistics(code_space_, isolate());
3346 CodeStatistics::CollectCodeStatistics(old_space_, isolate());
3347 CodeStatistics::CollectCodeStatistics(code_lo_space_, isolate());
3352 void Heap::Print() {
3353 if (!HasBeenSetUp())
return;
3354 isolate()->PrintStack(stdout);
3356 for (SpaceIterator it(
this); it.has_next();) {
3362 void Heap::ReportCodeStatistics(
const char* title) {
3363 PrintF(
">>>>>> Code Stats (%s) >>>>>>\n", title);
3364 CollectCodeStatistics();
3365 CodeStatistics::ReportCodeStatistics(isolate());
3370 const char* Heap::GarbageCollectionReasonToString(
3371 GarbageCollectionReason gc_reason) {
3372 switch (gc_reason) {
3373 case GarbageCollectionReason::kAllocationFailure:
3374 return "allocation failure";
3375 case GarbageCollectionReason::kAllocationLimit:
3376 return "allocation limit";
3377 case GarbageCollectionReason::kContextDisposal:
3378 return "context disposal";
3379 case GarbageCollectionReason::kCountersExtension:
3380 return "counters extension";
3381 case GarbageCollectionReason::kDebugger:
3383 case GarbageCollectionReason::kDeserializer:
3384 return "deserialize";
3385 case GarbageCollectionReason::kExternalMemoryPressure:
3386 return "external memory pressure";
3387 case GarbageCollectionReason::kFinalizeMarkingViaStackGuard:
3388 return "finalize incremental marking via stack guard";
3389 case GarbageCollectionReason::kFinalizeMarkingViaTask:
3390 return "finalize incremental marking via task";
3391 case GarbageCollectionReason::kFullHashtable:
3392 return "full hash-table";
3393 case GarbageCollectionReason::kHeapProfiler:
3394 return "heap profiler";
3395 case GarbageCollectionReason::kIdleTask:
3397 case GarbageCollectionReason::kLastResort:
3398 return "last resort";
3399 case GarbageCollectionReason::kLowMemoryNotification:
3400 return "low memory notification";
3401 case GarbageCollectionReason::kMakeHeapIterable:
3402 return "make heap iterable";
3403 case GarbageCollectionReason::kMemoryPressure:
3404 return "memory pressure";
3405 case GarbageCollectionReason::kMemoryReducer:
3406 return "memory reducer";
3407 case GarbageCollectionReason::kRuntime:
3409 case GarbageCollectionReason::kSamplingProfiler:
3410 return "sampling profiler";
3411 case GarbageCollectionReason::kSnapshotCreator:
3412 return "snapshot creator";
3413 case GarbageCollectionReason::kTesting:
3415 case GarbageCollectionReason::kExternalFinalize:
3416 return "external finalize";
3417 case GarbageCollectionReason::kUnknown:
3423 bool Heap::Contains(HeapObject* value) {
3424 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
3427 return HasBeenSetUp() &&
3428 (new_space_->ToSpaceContains(value) || old_space_->Contains(value) ||
3429 code_space_->Contains(value) || map_space_->Contains(value) ||
3430 lo_space_->Contains(value) || read_only_space_->Contains(value) ||
3431 code_lo_space_->Contains(value) || new_lo_space_->Contains(value));
3434 bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
3435 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
3438 if (!HasBeenSetUp())
return false;
3442 return new_space_->ToSpaceContains(value);
3444 return old_space_->Contains(value);
3446 return code_space_->Contains(value);
3448 return map_space_->Contains(value);
3450 return lo_space_->Contains(value);
3452 return code_lo_space_->Contains(value);
3454 return new_lo_space_->Contains(value);
3456 return read_only_space_->Contains(value);
3461 bool Heap::InSpaceSlow(Address addr, AllocationSpace space) {
3462 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
3465 if (!HasBeenSetUp())
return false;
3469 return new_space_->ToSpaceContainsSlow(addr);
3471 return old_space_->ContainsSlow(addr);
3473 return code_space_->ContainsSlow(addr);
3475 return map_space_->ContainsSlow(addr);
3477 return lo_space_->ContainsSlow(addr);
3479 return code_lo_space_->ContainsSlow(addr);
3481 return new_lo_space_->ContainsSlow(addr);
3483 return read_only_space_->ContainsSlow(addr);
3488 bool Heap::IsValidAllocationSpace(AllocationSpace space) {
3505 class VerifyReadOnlyPointersVisitor :
public VerifyPointersVisitor {
3507 explicit VerifyReadOnlyPointersVisitor(Heap* heap)
3508 : VerifyPointersVisitor(heap) {}
3511 void VerifyPointers(HeapObject* host, MaybeObjectSlot start,
3512 MaybeObjectSlot end)
override {
3513 if (host !=
nullptr) {
3514 CHECK(heap_->InReadOnlySpace(host->map()));
3516 VerifyPointersVisitor::VerifyPointers(host, start, end);
3518 for (MaybeObjectSlot current = start; current < end; ++current) {
3520 if ((*current)->GetHeapObject(&
object)) {
3521 CHECK(heap_->InReadOnlySpace(
object));
3527 void Heap::Verify() {
3528 CHECK(HasBeenSetUp());
3529 HandleScope scope(isolate());
3532 mark_compact_collector()->EnsureSweepingCompleted();
3534 VerifyPointersVisitor visitor(
this);
3535 IterateRoots(&visitor, VISIT_ONLY_STRONG);
3537 VerifySmisVisitor smis_visitor;
3538 IterateSmiRoots(&smis_visitor);
3540 new_space_->Verify(isolate());
3542 old_space_->Verify(isolate(), &visitor);
3543 map_space_->Verify(isolate(), &visitor);
3545 VerifyPointersVisitor no_dirty_regions_visitor(
this);
3546 code_space_->Verify(isolate(), &no_dirty_regions_visitor);
3548 lo_space_->Verify(isolate());
3549 code_lo_space_->Verify(isolate());
3550 new_lo_space_->Verify(isolate());
3552 VerifyReadOnlyPointersVisitor read_only_visitor(
this);
3553 read_only_space_->Verify(isolate(), &read_only_visitor);
3556 class SlotVerifyingVisitor :
public ObjectVisitor {
3558 SlotVerifyingVisitor(std::set<Address>* untyped,
3559 std::set<std::pair<SlotType, Address> >* typed)
3560 : untyped_(untyped), typed_(typed) {}
3562 virtual bool ShouldHaveBeenRecorded(HeapObject* host, MaybeObject target) = 0;
3564 bool ShouldHaveBeenRecorded(Code host, MaybeObject target) {
3565 return ShouldHaveBeenRecorded(reinterpret_cast<HeapObject*>(host.ptr()),
3569 void VisitPointers(HeapObject* host, ObjectSlot start,
3570 ObjectSlot end)
override {
3572 for (ObjectSlot slot = start; slot < end; ++slot) {
3573 DCHECK(!HasWeakHeapObjectTag(*slot));
3576 VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
3579 void VisitPointers(HeapObject* host, MaybeObjectSlot start,
3580 MaybeObjectSlot end)
final {
3581 for (MaybeObjectSlot slot = start; slot < end; ++slot) {
3582 if (ShouldHaveBeenRecorded(host, *slot)) {
3583 CHECK_GT(untyped_->count(slot.address()), 0);
3588 void VisitCodeTarget(Code host, RelocInfo* rinfo)
override {
3589 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
3590 if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
3592 InTypedSet(CODE_TARGET_SLOT, rinfo->pc()) ||
3593 (rinfo->IsInConstantPool() &&
3594 InTypedSet(CODE_ENTRY_SLOT, rinfo->constant_pool_entry_address())));
3598 void VisitEmbeddedPointer(Code host, RelocInfo* rinfo)
override {
3599 Object* target = rinfo->target_object();
3600 if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
3601 CHECK(InTypedSet(EMBEDDED_OBJECT_SLOT, rinfo->pc()) ||
3602 (rinfo->IsInConstantPool() &&
3603 InTypedSet(OBJECT_SLOT, rinfo->constant_pool_entry_address())));
3608 bool InTypedSet(SlotType type, Address slot) {
3609 return typed_->count(std::make_pair(type, slot)) > 0;
3611 std::set<Address>* untyped_;
3612 std::set<std::pair<SlotType, Address> >* typed_;
3615 class OldToNewSlotVerifyingVisitor :
public SlotVerifyingVisitor {
3617 OldToNewSlotVerifyingVisitor(std::set<Address>* untyped,
3618 std::set<std::pair<SlotType, Address>>* typed)
3619 : SlotVerifyingVisitor(untyped, typed) {}
3621 bool ShouldHaveBeenRecorded(HeapObject* host, MaybeObject target)
override {
3622 DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InNewSpace(target),
3623 Heap::InToSpace(target));
3624 return target->IsStrongOrWeak() && Heap::InNewSpace(target) &&
3625 !Heap::InNewSpace(host);
3629 template <RememberedSetType direction>
3630 void CollectSlots(MemoryChunk* chunk, Address start, Address end,
3631 std::set<Address>* untyped,
3632 std::set<std::pair<SlotType, Address> >* typed) {
3633 RememberedSet<direction>::Iterate(
3635 [start, end, untyped](MaybeObjectSlot slot) {
3636 if (start <= slot.address() && slot.address() < end) {
3637 untyped->insert(slot.address());
3641 SlotSet::PREFREE_EMPTY_BUCKETS);
3642 RememberedSet<direction>::IterateTyped(
3643 chunk, [start, end, typed](SlotType type, Address host, Address slot) {
3644 if (start <= slot && slot < end) {
3645 typed->insert(std::make_pair(type, slot));
3651 void Heap::VerifyRememberedSetFor(HeapObject*
object) {
3652 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
3653 DCHECK_IMPLIES(chunk->mutex() ==
nullptr, InReadOnlySpace(
object));
3655 base::LockGuard<base::Mutex, base::NullBehavior::kIgnoreIfNull> lock_guard(
3657 Address start =
object->address();
3658 Address end = start +
object->Size();
3659 std::set<Address> old_to_new;
3660 std::set<std::pair<SlotType, Address> > typed_old_to_new;
3661 if (!InNewSpace(
object)) {
3662 store_buffer()->MoveAllEntriesToRememberedSet();
3663 CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
3664 OldToNewSlotVerifyingVisitor visitor(&old_to_new, &typed_old_to_new);
3665 object->IterateBody(&visitor);
3673 void Heap::VerifyCountersAfterSweeping() {
3674 PagedSpaces spaces(
this);
3675 for (PagedSpace* space = spaces.next(); space !=
nullptr;
3676 space = spaces.next()) {
3677 space->VerifyCountersAfterSweeping();
3681 void Heap::VerifyCountersBeforeConcurrentSweeping() {
3682 PagedSpaces spaces(
this);
3683 for (PagedSpace* space = spaces.next(); space !=
nullptr;
3684 space = spaces.next()) {
3685 space->VerifyCountersBeforeConcurrentSweeping();
3690 void Heap::ZapFromSpace() {
3691 if (!new_space_->IsFromSpaceCommitted())
return;
3692 for (Page* page : PageRange(new_space_->from_space().first_page(),
nullptr)) {
3693 memory_allocator()->ZapBlock(page->area_start(),
3694 page->HighWaterMark() - page->area_start(),
3699 void Heap::ZapCodeObject(Address start_address,
int size_in_bytes) {
3701 DCHECK(IsAligned(start_address, kIntSize));
3702 for (
int i = 0;
i < size_in_bytes / kIntSize;
i++) {
3703 Memory<int>(start_address +
i * kIntSize) = kCodeZapValue;
3708 Code Heap::builtin(
int index) {
3709 DCHECK(Builtins::IsBuiltinId(index));
3710 return Code::cast(ObjectPtr(isolate()->builtins_table()[index]));
3713 Address Heap::builtin_address(
int index) {
3714 DCHECK(Builtins::IsBuiltinId(index) || index == Builtins::builtin_count);
3715 return reinterpret_cast<Address
>(&isolate()->builtins_table()[index]);
3718 void Heap::set_builtin(
int index, Code builtin) {
3719 DCHECK(Builtins::IsBuiltinId(index));
3720 DCHECK(Internals::HasHeapObjectTag(builtin.ptr()));
3723 isolate()->builtins_table()[index] = builtin.ptr();
3726 void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
3727 IterateStrongRoots(v, mode);
3728 IterateWeakRoots(v, mode);
3731 void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
3732 const bool isMinorGC = mode == VISIT_ALL_IN_SCAVENGE ||
3733 mode == VISIT_ALL_IN_MINOR_MC_MARK ||
3734 mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
3735 v->VisitRootPointer(Root::kStringTable,
nullptr,
3736 ObjectSlot(&roots_table()[RootIndex::kStringTable]));
3737 v->Synchronize(VisitorSynchronization::kStringTable);
3738 if (!isMinorGC && mode != VISIT_ALL_IN_SWEEP_NEWSPACE &&
3739 mode != VISIT_FOR_SERIALIZATION) {
3743 external_string_table_.IterateAll(v);
3745 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
3748 void Heap::IterateSmiRoots(RootVisitor* v) {
3750 ExecutionAccess access(isolate());
3751 v->VisitRootPointers(Root::kSmiRootList,
nullptr,
3752 roots_table().smi_roots_begin(),
3753 roots_table().smi_roots_end());
3754 v->Synchronize(VisitorSynchronization::kSmiRootList);
3767 void VisitRootPointer(Root root,
const char* description,
3772 void VisitRootPointers(Root root,
const char* description,
ObjectSlot start,
3774 for (
ObjectSlot p = start; p < end; ++p) FixHandle(p);
3779 if (!(*p)->IsHeapObject())
return;
3781 const MapWord map_word = current->map_word();
3782 if (!map_word.IsForwardingAddress() && current->IsFiller()) {
3785 while (current->IsFiller()) {
3787 if (current->map() ==
ReadOnlyRoots(heap_).one_pointer_filler_map()) {
3788 next += kPointerSize;
3789 }
else if (current->map() ==
3791 next += 2 * kPointerSize;
3793 next += current->Size();
3795 current =
reinterpret_cast<HeapObject*
>(next);
3797 DCHECK(current->IsFixedArrayBase());
3806 void Heap::IterateStrongRoots(
RootVisitor* v, VisitMode mode) {
3807 const bool isMinorGC = mode == VISIT_ALL_IN_SCAVENGE ||
3808 mode == VISIT_ALL_IN_MINOR_MC_MARK ||
3809 mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
3810 v->VisitRootPointers(Root::kStrongRootList,
nullptr,
3811 roots_table().strong_roots_begin(),
3812 roots_table().strong_roots_end());
3813 v->Synchronize(VisitorSynchronization::kStrongRootList);
3815 isolate_->bootstrapper()->Iterate(v);
3816 v->Synchronize(VisitorSynchronization::kBootstrapper);
3817 isolate_->Iterate(v);
3818 v->Synchronize(VisitorSynchronization::kTop);
3819 Relocatable::Iterate(isolate_, v);
3820 v->Synchronize(VisitorSynchronization::kRelocatable);
3821 isolate_->debug()->Iterate(v);
3822 v->Synchronize(VisitorSynchronization::kDebug);
3824 isolate_->compilation_cache()->Iterate(v);
3825 v->Synchronize(VisitorSynchronization::kCompilationCache);
3829 isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
3830 isolate_->handle_scope_implementer()->Iterate(v);
3831 isolate_->IterateDeferredHandles(v);
3832 v->Synchronize(VisitorSynchronization::kHandleScope);
3839 v->Synchronize(VisitorSynchronization::kBuiltins);
3843 if (mode != VISIT_FOR_SERIALIZATION) {
3848 isolate_->interpreter()->IterateDispatchTable(v);
3849 v->Synchronize(VisitorSynchronization::kDispatchTable);
3855 case VISIT_FOR_SERIALIZATION:
3859 case VISIT_ONLY_STRONG:
3860 isolate_->global_handles()->IterateStrongRoots(v);
3862 case VISIT_ALL_IN_SCAVENGE:
3863 isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v);
3865 case VISIT_ALL_IN_MINOR_MC_MARK:
3868 case VISIT_ALL_IN_MINOR_MC_UPDATE:
3871 case VISIT_ALL_IN_SWEEP_NEWSPACE:
3873 isolate_->global_handles()->IterateAllRoots(v);
3876 v->Synchronize(VisitorSynchronization::kGlobalHandles);
3880 if (mode != VISIT_FOR_SERIALIZATION) {
3882 isolate_->eternal_handles()->IterateNewSpaceRoots(v);
3884 isolate_->eternal_handles()->IterateAllRoots(v);
3887 v->Synchronize(VisitorSynchronization::kEternalHandles);
3890 isolate_->thread_manager()->Iterate(v);
3891 v->Synchronize(VisitorSynchronization::kThreadManager);
3894 for (StrongRootsList* list = strong_roots_list_; list; list = list->next) {
3895 v->VisitRootPointers(Root::kStrongRoots,
nullptr, list->start, list->end);
3897 v->Synchronize(VisitorSynchronization::kStrongRoots);
3900 MicrotaskQueue* default_microtask_queue = isolate_->default_microtask_queue();
3901 if (default_microtask_queue) {
3902 MicrotaskQueue* microtask_queue = default_microtask_queue;
3904 microtask_queue->IterateMicrotasks(v);
3905 microtask_queue = microtask_queue->next();
3906 }
while (microtask_queue != default_microtask_queue);
3911 if (mode != VISIT_FOR_SERIALIZATION) {
3912 SerializerDeserializer::Iterate(isolate_, v);
3913 v->Synchronize(VisitorSynchronization::kPartialSnapshotCache);
3917 void Heap::IterateWeakGlobalHandles(RootVisitor* v) {
3918 isolate_->global_handles()->IterateWeakRoots(v);
3921 void Heap::IterateBuiltins(RootVisitor* v) {
3922 for (
int i = 0;
i < Builtins::builtin_count;
i++) {
3923 v->VisitRootPointer(Root::kBuiltins, Builtins::name(
i),
3924 ObjectSlot(builtin_address(
i)));
3931 void Heap::ConfigureHeap(
size_t max_semi_space_size_in_kb,
3932 size_t max_old_generation_size_in_mb,
3933 size_t code_range_size_in_mb) {
3935 if (max_semi_space_size_in_kb != 0) {
3936 max_semi_space_size_ =
3937 RoundUp<Page::kPageSize>(max_semi_space_size_in_kb * KB);
3939 if (max_old_generation_size_in_mb != 0) {
3940 max_old_generation_size_ = max_old_generation_size_in_mb * MB;
3944 if (FLAG_max_semi_space_size > 0) {
3945 max_semi_space_size_ =
static_cast<size_t>(FLAG_max_semi_space_size) * MB;
3947 if (FLAG_max_old_space_size > 0) {
3948 max_old_generation_size_ =
3949 static_cast<size_t>(FLAG_max_old_space_size) * MB;
3952 if (Page::kPageSize > MB) {
3953 max_semi_space_size_ = RoundUp<Page::kPageSize>(max_semi_space_size_);
3954 max_old_generation_size_ =
3955 RoundUp<Page::kPageSize>(max_old_generation_size_);
3958 if (FLAG_stress_compaction) {
3960 max_semi_space_size_ = MB;
3965 max_semi_space_size_ =
static_cast<size_t>(base::bits::RoundUpToPowerOfTwo64(
3966 static_cast<uint64_t>(max_semi_space_size_)));
3968 if (max_semi_space_size_ == kMaxSemiSpaceSizeInKB * KB) {
3970 initial_semispace_size_ =
3971 Max(initial_semispace_size_, static_cast<size_t>(1 * MB));
3974 if (FLAG_min_semi_space_size > 0) {
3975 size_t initial_semispace_size =
3976 static_cast<size_t>(FLAG_min_semi_space_size) * MB;
3977 if (initial_semispace_size > max_semi_space_size_) {
3978 initial_semispace_size_ = max_semi_space_size_;
3979 if (FLAG_trace_gc) {
3980 PrintIsolate(isolate_,
3981 "Min semi-space size cannot be more than the maximum " 3982 "semi-space size of %" PRIuS
" MB\n",
3983 max_semi_space_size_ / MB);
3986 initial_semispace_size_ =
3987 RoundUp<Page::kPageSize>(initial_semispace_size);
3991 initial_semispace_size_ = Min(initial_semispace_size_, max_semi_space_size_);
3993 if (FLAG_semi_space_growth_factor < 2) {
3994 FLAG_semi_space_growth_factor = 2;
3998 int paged_space_count =
3999 LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
4000 initial_max_old_generation_size_ = max_old_generation_size_ =
4001 Max(static_cast<size_t>(paged_space_count * Page::kPageSize),
4002 max_old_generation_size_);
4004 if (FLAG_initial_old_space_size > 0) {
4005 initial_old_generation_size_ = FLAG_initial_old_space_size * MB;
4007 initial_old_generation_size_ =
4008 max_old_generation_size_ / kInitalOldGenerationLimitFactor;
4010 old_generation_allocation_limit_ = initial_old_generation_size_;
4013 DCHECK(kMaxRegularHeapObjectSize >=
4015 FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) +
4016 AllocationMemento::kSize));
4018 code_range_size_ = code_range_size_in_mb * MB;
4024 void Heap::AddToRingBuffer(
const char*
string) {
4026 Min(strlen(
string), kTraceRingBufferSize - ring_buffer_end_);
4027 memcpy(trace_ring_buffer_ + ring_buffer_end_,
string, first_part);
4028 ring_buffer_end_ += first_part;
4029 if (first_part < strlen(
string)) {
4030 ring_buffer_full_ =
true;
4031 size_t second_part = strlen(
string) - first_part;
4032 memcpy(trace_ring_buffer_,
string + first_part, second_part);
4033 ring_buffer_end_ = second_part;
4038 void Heap::GetFromRingBuffer(
char* buffer) {
4040 if (ring_buffer_full_) {
4041 copied = kTraceRingBufferSize - ring_buffer_end_;
4042 memcpy(buffer, trace_ring_buffer_ + ring_buffer_end_, copied);
4044 memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
4047 void Heap::ConfigureHeapDefault() { ConfigureHeap(0, 0, 0); }
4049 void Heap::RecordStats(HeapStats* stats,
bool take_snapshot) {
4050 *stats->start_marker = HeapStats::kStartMarker;
4051 *stats->end_marker = HeapStats::kEndMarker;
4052 *stats->ro_space_size = read_only_space_->Size();
4053 *stats->ro_space_capacity = read_only_space_->Capacity();
4054 *stats->new_space_size = new_space_->Size();
4055 *stats->new_space_capacity = new_space_->Capacity();
4056 *stats->old_space_size = old_space_->SizeOfObjects();
4057 *stats->old_space_capacity = old_space_->Capacity();
4058 *stats->code_space_size = code_space_->SizeOfObjects();
4059 *stats->code_space_capacity = code_space_->Capacity();
4060 *stats->map_space_size = map_space_->SizeOfObjects();
4061 *stats->map_space_capacity = map_space_->Capacity();
4062 *stats->lo_space_size = lo_space_->Size();
4063 *stats->code_lo_space_size = code_lo_space_->Size();
4064 isolate_->global_handles()->RecordStats(stats);
4065 *stats->memory_allocator_size = memory_allocator()->Size();
4066 *stats->memory_allocator_capacity =
4067 memory_allocator()->Size() + memory_allocator()->Available();
4068 *stats->os_error = base::OS::GetLastError();
4069 *stats->malloced_memory = isolate_->allocator()->GetCurrentMemoryUsage();
4070 *stats->malloced_peak_memory = isolate_->allocator()->GetMaxMemoryUsage();
4071 if (take_snapshot) {
4072 HeapIterator iterator(
this);
4073 for (HeapObject* obj = iterator.next(); obj !=
nullptr;
4074 obj = iterator.next()) {
4075 InstanceType type = obj->map()->instance_type();
4076 DCHECK(0 <= type && type <= LAST_TYPE);
4077 stats->objects_per_type[type]++;
4078 stats->size_per_type[type] += obj->Size();
4081 if (stats->last_few_messages !=
nullptr)
4082 GetFromRingBuffer(stats->last_few_messages);
4083 if (stats->js_stacktrace !=
nullptr) {
4084 FixedStringAllocator fixed(stats->js_stacktrace, kStacktraceBufferSize - 1);
4085 StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
4086 if (gc_state() == Heap::NOT_IN_GC) {
4087 isolate()->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
4089 accumulator.Add(
"Cannot get stack trace in GC.");
4094 size_t Heap::OldGenerationSizeOfObjects() {
4095 PagedSpaces spaces(
this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
4097 for (PagedSpace* space = spaces.next(); space !=
nullptr;
4098 space = spaces.next()) {
4099 total += space->SizeOfObjects();
4101 return total + lo_space_->SizeOfObjects();
4104 uint64_t Heap::PromotedExternalMemorySize() {
4105 IsolateData* isolate_data = isolate()->isolate_data();
4106 if (isolate_data->external_memory_ <=
4107 isolate_data->external_memory_at_last_mark_compact_) {
4110 return static_cast<uint64_t
>(
4111 isolate_data->external_memory_ -
4112 isolate_data->external_memory_at_last_mark_compact_);
4115 bool Heap::ShouldOptimizeForLoadTime() {
4116 return isolate()->rail_mode() == PERFORMANCE_LOAD &&
4117 !AllocationLimitOvershotByLargeMargin() &&
4118 MonotonicallyIncreasingTimeInMs() <
4119 isolate()->LoadStartTimeMs() + kMaxLoadTimeMs;
4127 bool Heap::ShouldExpandOldGenerationOnSlowAllocation() {
4128 if (always_allocate() || OldGenerationSpaceAvailable() > 0)
return true;
4131 if (ShouldOptimizeForMemoryUsage())
return false;
4133 if (ShouldOptimizeForLoadTime())
return true;
4135 if (incremental_marking()->NeedsFinalization()) {
4136 return !AllocationLimitOvershotByLargeMargin();
4139 if (incremental_marking()->IsStopped() &&
4140 IncrementalMarkingLimitReached() == IncrementalMarkingLimit::kNoLimit) {
4147 Heap::HeapGrowingMode Heap::CurrentHeapGrowingMode() {
4148 if (ShouldReduceMemory() || FLAG_stress_compaction) {
4149 return Heap::HeapGrowingMode::kMinimal;
4152 if (ShouldOptimizeForMemoryUsage()) {
4153 return Heap::HeapGrowingMode::kConservative;
4156 if (memory_reducer()->ShouldGrowHeapSlowly()) {
4157 return Heap::HeapGrowingMode::kSlow;
4160 return Heap::HeapGrowingMode::kDefault;
4168 Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
4171 if (!incremental_marking()->CanBeActivated() || always_allocate()) {
4173 return IncrementalMarkingLimit::kNoLimit;
4175 if (FLAG_stress_incremental_marking) {
4176 return IncrementalMarkingLimit::kHardLimit;
4178 if (OldGenerationSizeOfObjects() <=
4179 IncrementalMarking::kActivationThreshold) {
4181 return IncrementalMarkingLimit::kNoLimit;
4183 if ((FLAG_stress_compaction && (gc_count_ & 1) != 0) ||
4184 HighMemoryPressure()) {
4187 return IncrementalMarkingLimit::kHardLimit;
4190 if (FLAG_stress_marking > 0) {
4191 double gained_since_last_gc =
4192 PromotedSinceLastGC() +
4193 (isolate()->isolate_data()->external_memory_ -
4194 isolate()->isolate_data()->external_memory_at_last_mark_compact_);
4195 double size_before_gc =
4196 OldGenerationObjectsAndPromotedExternalMemorySize() -
4197 gained_since_last_gc;
4198 double bytes_to_limit = old_generation_allocation_limit_ - size_before_gc;
4199 if (bytes_to_limit > 0) {
4200 double current_percent = (gained_since_last_gc / bytes_to_limit) * 100.0;
4202 if (FLAG_trace_stress_marking) {
4203 isolate()->PrintWithTimestamp(
4204 "[IncrementalMarking] %.2lf%% of the memory limit reached\n",
4208 if (FLAG_fuzzer_gc_analysis) {
4210 if (current_percent < 100.0) {
4211 max_marking_limit_reached_ =
4212 std::max(max_marking_limit_reached_, current_percent);
4214 }
else if (static_cast<int>(current_percent) >=
4215 stress_marking_percentage_) {
4216 stress_marking_percentage_ = NextStressMarkingLimit();
4217 return IncrementalMarkingLimit::kHardLimit;
4222 size_t old_generation_space_available = OldGenerationSpaceAvailable();
4224 if (old_generation_space_available > new_space_->Capacity()) {
4225 return IncrementalMarkingLimit::kNoLimit;
4227 if (ShouldOptimizeForMemoryUsage()) {
4228 return IncrementalMarkingLimit::kHardLimit;
4230 if (ShouldOptimizeForLoadTime()) {
4231 return IncrementalMarkingLimit::kNoLimit;
4233 if (old_generation_space_available == 0) {
4234 return IncrementalMarkingLimit::kHardLimit;
4236 return IncrementalMarkingLimit::kSoftLimit;
4239 void Heap::EnableInlineAllocation() {
4240 if (!inline_allocation_disabled_)
return;
4241 inline_allocation_disabled_ =
false;
4244 new_space()->UpdateInlineAllocationLimit(0);
4248 void Heap::DisableInlineAllocation() {
4249 if (inline_allocation_disabled_)
return;
4250 inline_allocation_disabled_ =
true;
4253 new_space()->UpdateInlineAllocationLimit(0);
4256 PagedSpaces spaces(
this);
4257 CodeSpaceMemoryModificationScope modification_scope(
this);
4258 for (PagedSpace* space = spaces.next(); space !=
nullptr;
4259 space = spaces.next()) {
4260 space->FreeLinearAllocationArea();
4264 HeapObject* Heap::EnsureImmovableCode(HeapObject* heap_object,
4269 DCHECK(heap_object);
4270 DCHECK(code_space_->Contains(heap_object));
4271 DCHECK_GE(object_size, 0);
4272 if (!Heap::IsImmovable(heap_object)) {
4273 if (isolate()->serializer_enabled() ||
4274 code_space_->first_page()->Contains(heap_object->address())) {
4275 MemoryChunk::FromAddress(heap_object->address())->MarkNeverEvacuate();
4279 CreateFillerObjectAt(heap_object->address(), object_size,
4280 ClearRecordedSlots::kNo);
4281 heap_object = AllocateRawCodeInLargeObjectSpace(object_size);
4282 UnprotectAndRegisterMemoryChunk(heap_object);
4283 ZapCodeObject(heap_object->address(), object_size);
4284 OnAllocationEvent(heap_object, object_size);
4290 HeapObject* Heap::AllocateRawWithLightRetry(
int size, AllocationSpace space,
4291 AllocationAlignment alignment) {
4293 AllocationResult alloc = AllocateRaw(size, space, alignment);
4294 if (alloc.To(&result)) {
4295 DCHECK(result != ReadOnlyRoots(
this).exception());
4299 for (
int i = 0;
i < 2;
i++) {
4300 CollectGarbage(alloc.RetrySpace(),
4301 GarbageCollectionReason::kAllocationFailure);
4302 alloc = AllocateRaw(size, space, alignment);
4303 if (alloc.To(&result)) {
4304 DCHECK(result != ReadOnlyRoots(
this).exception());
4311 HeapObject* Heap::AllocateRawWithRetryOrFail(
int size, AllocationSpace space,
4312 AllocationAlignment alignment) {
4313 AllocationResult alloc;
4314 HeapObject* result = AllocateRawWithLightRetry(size, space, alignment);
4315 if (result)
return result;
4317 isolate()->counters()->gc_last_resort_from_handles()->Increment();
4318 CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
4320 AlwaysAllocateScope scope(isolate());
4321 alloc = AllocateRaw(size, space, alignment);
4323 if (alloc.To(&result)) {
4324 DCHECK(result != ReadOnlyRoots(
this).exception());
4328 FatalProcessOutOfMemory(
"CALL_AND_RETRY_LAST");
4334 HeapObject* Heap::AllocateRawCodeInLargeObjectSpace(
int size) {
4335 AllocationResult alloc = code_lo_space()->AllocateRaw(size);
4337 if (alloc.To(&result)) {
4338 DCHECK(result != ReadOnlyRoots(
this).exception());
4342 for (
int i = 0;
i < 2;
i++) {
4343 CollectGarbage(alloc.RetrySpace(),
4344 GarbageCollectionReason::kAllocationFailure);
4345 alloc = code_lo_space()->AllocateRaw(size);
4346 if (alloc.To(&result)) {
4347 DCHECK(result != ReadOnlyRoots(
this).exception());
4351 isolate()->counters()->gc_last_resort_from_handles()->Increment();
4352 CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
4354 AlwaysAllocateScope scope(isolate());
4355 alloc = code_lo_space()->AllocateRaw(size);
4357 if (alloc.To(&result)) {
4358 DCHECK(result != ReadOnlyRoots(
this).exception());
4362 FatalProcessOutOfMemory(
"CALL_AND_RETRY_LAST");
4366 void Heap::SetUp() {
4367 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT 4368 allocation_timeout_ = NextAllocationTimeout();
4377 if (!configured_) ConfigureHeapDefault();
4380 reinterpret_cast<uintptr_t>(v8::internal::GetRandomMmapAddr()) &
4385 new MemoryAllocator(isolate_, MaxReserved(), code_range_size_);
4387 store_buffer_ =
new StoreBuffer(
this);
4389 heap_controller_ =
new HeapController(
this);
4391 mark_compact_collector_ =
new MarkCompactCollector(
this);
4393 scavenger_collector_ =
new ScavengerCollector(
this);
4395 incremental_marking_ =
4396 new IncrementalMarking(
this, mark_compact_collector_->marking_worklist(),
4397 mark_compact_collector_->weak_objects());
4399 if (FLAG_concurrent_marking || FLAG_parallel_marking) {
4400 MarkCompactCollector::MarkingWorklist* marking_worklist =
4401 mark_compact_collector_->marking_worklist();
4402 concurrent_marking_ =
new ConcurrentMarking(
4403 this, marking_worklist->shared(), marking_worklist->bailout(),
4404 marking_worklist->on_hold(), mark_compact_collector_->weak_objects(),
4405 marking_worklist->embedder());
4407 concurrent_marking_ =
new ConcurrentMarking(
this,
nullptr,
nullptr,
nullptr,
4411 for (
int i = FIRST_SPACE;
i <= LAST_SPACE;
i++) {
4412 space_[
i] =
nullptr;
4415 space_[RO_SPACE] = read_only_space_ =
new ReadOnlySpace(
this);
4416 space_[NEW_SPACE] = new_space_ =
4417 new NewSpace(
this, memory_allocator_->data_page_allocator(),
4418 initial_semispace_size_, max_semi_space_size_);
4419 space_[OLD_SPACE] = old_space_ =
new OldSpace(
this);
4420 space_[CODE_SPACE] = code_space_ =
new CodeSpace(
this);
4421 space_[MAP_SPACE] = map_space_ =
new MapSpace(
this);
4422 space_[LO_SPACE] = lo_space_ =
new LargeObjectSpace(
this);
4423 space_[NEW_LO_SPACE] = new_lo_space_ =
new NewLargeObjectSpace(
this);
4424 space_[CODE_LO_SPACE] = code_lo_space_ =
new CodeLargeObjectSpace(
this);
4426 for (
int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
4428 deferred_counters_[
i] = 0;
4431 tracer_ =
new GCTracer(
this);
4432 #ifdef ENABLE_MINOR_MC 4433 minor_mark_compact_collector_ =
new MinorMarkCompactCollector(
this);
4435 minor_mark_compact_collector_ =
nullptr;
4436 #endif // ENABLE_MINOR_MC 4437 array_buffer_collector_ =
new ArrayBufferCollector(
this);
4438 gc_idle_time_handler_ =
new GCIdleTimeHandler();
4439 memory_reducer_ =
new MemoryReducer(
this);
4440 if (V8_UNLIKELY(FLAG_gc_stats)) {
4441 live_object_stats_ =
new ObjectStats(
this);
4442 dead_object_stats_ =
new ObjectStats(
this);
4444 local_embedder_heap_tracer_ =
new LocalEmbedderHeapTracer(isolate());
4446 LOG(isolate_, IntPtrTEvent(
"heap-capacity", Capacity()));
4447 LOG(isolate_, IntPtrTEvent(
"heap-available", Available()));
4449 store_buffer()->SetUp();
4451 mark_compact_collector()->SetUp();
4452 #ifdef ENABLE_MINOR_MC 4453 if (minor_mark_compact_collector() !=
nullptr) {
4454 minor_mark_compact_collector()->SetUp();
4456 #endif // ENABLE_MINOR_MC 4458 if (FLAG_idle_time_scavenge) {
4459 scavenge_job_ =
new ScavengeJob();
4460 idle_scavenge_observer_ =
new IdleScavengeObserver(
4461 *
this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
4462 new_space()->AddAllocationObserver(idle_scavenge_observer_);
4465 SetGetExternallyAllocatedMemoryInBytesCallback(
4466 DefaultGetExternallyAllocatedMemoryInBytesCallback);
4468 if (FLAG_stress_marking > 0) {
4469 stress_marking_percentage_ = NextStressMarkingLimit();
4470 stress_marking_observer_ =
new StressMarkingObserver(*
this);
4471 AddAllocationObserversToAllSpaces(stress_marking_observer_,
4472 stress_marking_observer_);
4474 if (FLAG_stress_scavenge > 0) {
4475 stress_scavenge_observer_ =
new StressScavengeObserver(*
this);
4476 new_space()->AddAllocationObserver(stress_scavenge_observer_);
4479 write_protect_code_memory_ = FLAG_write_protect_code_memory;
4482 void Heap::InitializeHashSeed() {
4483 DCHECK(!deserialization_complete_);
4484 uint64_t new_hash_seed;
4485 if (FLAG_hash_seed == 0) {
4486 int64_t rnd = isolate()->random_number_generator()->NextInt64();
4487 new_hash_seed =
static_cast<uint64_t
>(rnd);
4489 new_hash_seed =
static_cast<uint64_t
>(FLAG_hash_seed);
4491 ReadOnlyRoots(
this).hash_seed()->copy_in(
4492 0, reinterpret_cast<byte*>(&new_hash_seed), kInt64Size);
4495 void Heap::SetStackLimits() {
4496 DCHECK_NOT_NULL(isolate_);
4497 DCHECK(isolate_ == isolate());
4503 roots_table()[RootIndex::kStackLimit] =
reinterpret_cast<Object*
>(
4504 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
4505 roots_table()[RootIndex::kRealStackLimit] =
reinterpret_cast<Object*
>(
4506 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
4509 void Heap::ClearStackLimits() {
4510 roots_table()[RootIndex::kStackLimit] = Smi::kZero;
4511 roots_table()[RootIndex::kRealStackLimit] = Smi::kZero;
4514 int Heap::NextAllocationTimeout(
int current_timeout) {
4515 if (FLAG_random_gc_interval > 0) {
4518 if (current_timeout <= 0) {
4519 return isolate()->fuzzer_rng()->NextInt(FLAG_random_gc_interval + 1);
4521 return current_timeout;
4524 return FLAG_gc_interval;
4527 void Heap::PrintAllocationsHash() {
4528 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
4529 PrintF(
"\n### Allocations = %u, hash = 0x%08x\n", allocations_count(), hash);
4532 void Heap::PrintMaxMarkingLimitReached() {
4533 PrintF(
"\n### Maximum marking limit reached = %.02lf\n",
4534 max_marking_limit_reached_);
4537 void Heap::PrintMaxNewSpaceSizeReached() {
4538 PrintF(
"\n### Maximum new space size reached = %.02lf\n",
4539 stress_scavenge_observer_->MaxNewSpaceSizeReached());
4542 int Heap::NextStressMarkingLimit() {
4543 return isolate()->fuzzer_rng()->NextInt(FLAG_stress_marking + 1);
4546 void Heap::NotifyDeserializationComplete() {
4547 PagedSpaces spaces(
this);
4548 for (PagedSpace* s = spaces.next(); s !=
nullptr; s = spaces.next()) {
4549 if (isolate()->snapshot_available()) s->ShrinkImmortalImmovablePages();
4552 for (Page* p : *s) {
4553 DCHECK(p->NeverEvacuate());
4558 read_only_space()->MarkAsReadOnly();
4559 deserialization_complete_ =
true;
4562 void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
4563 DCHECK_EQ(gc_state_, HeapState::NOT_IN_GC);
4564 local_embedder_heap_tracer()->SetRemoteTracer(tracer);
4567 EmbedderHeapTracer* Heap::GetEmbedderHeapTracer()
const {
4568 return local_embedder_heap_tracer()->remote_tracer();
4571 void Heap::RegisterExternallyReferencedObject(Address* location) {
4574 ObjectPtr object(*location);
4575 if (!object->IsHeapObject())
return;
4576 HeapObject* heap_object = HeapObject::cast(
object);
4577 DCHECK(Contains(heap_object));
4578 if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
4579 incremental_marking()->WhiteToGreyAndPush(heap_object);
4581 DCHECK(mark_compact_collector()->in_use());
4582 mark_compact_collector()->MarkExternallyReferencedObject(heap_object);
4586 void Heap::StartTearDown() { SetGCState(TEAR_DOWN); }
4588 void Heap::TearDown() {
4589 DCHECK_EQ(gc_state_, TEAR_DOWN);
4591 if (FLAG_verify_heap) {
4596 UpdateMaximumCommitted();
4598 if (FLAG_verify_predictable || FLAG_fuzzer_gc_analysis) {
4599 PrintAllocationsHash();
4602 if (FLAG_fuzzer_gc_analysis) {
4603 if (FLAG_stress_marking > 0) {
4604 PrintMaxMarkingLimitReached();
4606 if (FLAG_stress_scavenge > 0) {
4607 PrintMaxNewSpaceSizeReached();
4611 if (FLAG_idle_time_scavenge) {
4612 new_space()->RemoveAllocationObserver(idle_scavenge_observer_);
4613 delete idle_scavenge_observer_;
4614 idle_scavenge_observer_ =
nullptr;
4615 delete scavenge_job_;
4616 scavenge_job_ =
nullptr;
4619 if (FLAG_stress_marking > 0) {
4620 RemoveAllocationObserversFromAllSpaces(stress_marking_observer_,
4621 stress_marking_observer_);
4622 delete stress_marking_observer_;
4623 stress_marking_observer_ =
nullptr;
4625 if (FLAG_stress_scavenge > 0) {
4626 new_space()->RemoveAllocationObserver(stress_scavenge_observer_);
4627 delete stress_scavenge_observer_;
4628 stress_scavenge_observer_ =
nullptr;
4631 if (heap_controller_ !=
nullptr) {
4632 delete heap_controller_;
4633 heap_controller_ =
nullptr;
4636 if (mark_compact_collector_ !=
nullptr) {
4637 mark_compact_collector_->TearDown();
4638 delete mark_compact_collector_;
4639 mark_compact_collector_ =
nullptr;
4642 #ifdef ENABLE_MINOR_MC 4643 if (minor_mark_compact_collector_ !=
nullptr) {
4644 minor_mark_compact_collector_->TearDown();
4645 delete minor_mark_compact_collector_;
4646 minor_mark_compact_collector_ =
nullptr;
4648 #endif // ENABLE_MINOR_MC 4650 if (scavenger_collector_ !=
nullptr) {
4651 delete scavenger_collector_;
4652 scavenger_collector_ =
nullptr;
4655 if (array_buffer_collector_ !=
nullptr) {
4656 delete array_buffer_collector_;
4657 array_buffer_collector_ =
nullptr;
4660 delete incremental_marking_;
4661 incremental_marking_ =
nullptr;
4663 delete concurrent_marking_;
4664 concurrent_marking_ =
nullptr;
4666 delete gc_idle_time_handler_;
4667 gc_idle_time_handler_ =
nullptr;
4669 if (memory_reducer_ !=
nullptr) {
4670 memory_reducer_->TearDown();
4671 delete memory_reducer_;
4672 memory_reducer_ =
nullptr;
4675 if (live_object_stats_ !=
nullptr) {
4676 delete live_object_stats_;
4677 live_object_stats_ =
nullptr;
4680 if (dead_object_stats_ !=
nullptr) {
4681 delete dead_object_stats_;
4682 dead_object_stats_ =
nullptr;
4685 delete local_embedder_heap_tracer_;
4686 local_embedder_heap_tracer_ =
nullptr;
4688 isolate_->global_handles()->TearDown();
4690 external_string_table_.TearDown();
4695 ArrayBufferTracker::TearDown(
this);
4700 for (
int i = FIRST_SPACE;
i <= LAST_SPACE;
i++) {
4702 space_[
i] =
nullptr;
4705 store_buffer()->TearDown();
4707 memory_allocator()->TearDown();
4709 StrongRootsList* next =
nullptr;
4710 for (StrongRootsList* list = strong_roots_list_; list; list = next) {
4714 strong_roots_list_ =
nullptr;
4716 delete store_buffer_;
4717 store_buffer_ =
nullptr;
4719 delete memory_allocator_;
4720 memory_allocator_ =
nullptr;
4723 void Heap::AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
4724 GCType gc_type,
void* data) {
4725 DCHECK_NOT_NULL(callback);
4726 DCHECK(gc_prologue_callbacks_.end() ==
4727 std::find(gc_prologue_callbacks_.begin(), gc_prologue_callbacks_.end(),
4728 GCCallbackTuple(callback, gc_type, data)));
4729 gc_prologue_callbacks_.emplace_back(callback, gc_type, data);
4732 void Heap::RemoveGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
4734 DCHECK_NOT_NULL(callback);
4735 for (
size_t i = 0;
i < gc_prologue_callbacks_.size();
i++) {
4736 if (gc_prologue_callbacks_[
i].callback == callback &&
4737 gc_prologue_callbacks_[
i].data == data) {
4738 gc_prologue_callbacks_[
i] = gc_prologue_callbacks_.back();
4739 gc_prologue_callbacks_.pop_back();
4746 void Heap::AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
4747 GCType gc_type,
void* data) {
4748 DCHECK_NOT_NULL(callback);
4749 DCHECK(gc_epilogue_callbacks_.end() ==
4750 std::find(gc_epilogue_callbacks_.begin(), gc_epilogue_callbacks_.end(),
4751 GCCallbackTuple(callback, gc_type, data)));
4752 gc_epilogue_callbacks_.emplace_back(callback, gc_type, data);
4755 void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
4757 DCHECK_NOT_NULL(callback);
4758 for (
size_t i = 0;
i < gc_epilogue_callbacks_.size();
i++) {
4759 if (gc_epilogue_callbacks_[
i].callback == callback &&
4760 gc_epilogue_callbacks_[
i].data == data) {
4761 gc_epilogue_callbacks_[
i] = gc_epilogue_callbacks_.back();
4762 gc_epilogue_callbacks_.pop_back();
4770 Handle<WeakArrayList> CompactWeakArrayList(Heap* heap,
4771 Handle<WeakArrayList> array,
4772 PretenureFlag pretenure) {
4773 if (array->length() == 0) {
4776 int new_length = array->CountLiveWeakReferences();
4777 if (new_length == array->length()) {
4781 Handle<WeakArrayList> new_array = WeakArrayList::EnsureSpace(
4783 handle(ReadOnlyRoots(heap).empty_weak_array_list(), heap->isolate()),
4784 new_length, pretenure);
4789 for (
int i = 0;
i < array->length();
i++) {
4790 MaybeObject element = array->Get(
i);
4791 if (element->IsCleared())
continue;
4792 new_array->Set(copy_to++, element);
4794 new_array->set_length(copy_to);
4800 void Heap::CompactWeakArrayLists(PretenureFlag pretenure) {
4802 std::vector<Handle<PrototypeInfo>> prototype_infos;
4804 HeapIterator iterator(
this);
4805 for (HeapObject* o = iterator.next(); o !=
nullptr; o = iterator.next()) {
4806 if (o->IsPrototypeInfo()) {
4807 PrototypeInfo* prototype_info = PrototypeInfo::cast(o);
4808 if (prototype_info->prototype_users()->IsWeakArrayList()) {
4809 prototype_infos.emplace_back(handle(prototype_info, isolate()));
4814 for (
auto& prototype_info : prototype_infos) {
4815 Handle<WeakArrayList> array(
4816 WeakArrayList::cast(prototype_info->prototype_users()), isolate());
4817 DCHECK_IMPLIES(pretenure == TENURED,
4818 InOldSpace(*array) ||
4819 *array == ReadOnlyRoots(
this).empty_weak_array_list());
4820 WeakArrayList* new_array = PrototypeUsers::Compact(
4821 array,
this, JSObject::PrototypeRegistryCompactionCallback, pretenure);
4822 prototype_info->set_prototype_users(new_array);
4826 Handle<WeakArrayList> scripts(script_list(), isolate());
4827 DCHECK_IMPLIES(pretenure == TENURED, InOldSpace(*scripts));
4828 scripts = CompactWeakArrayList(
this, scripts, pretenure);
4829 set_script_list(*scripts);
4831 Handle<WeakArrayList> no_script_list(noscript_shared_function_infos(),
4833 DCHECK_IMPLIES(pretenure == TENURED, InOldSpace(*no_script_list));
4834 no_script_list = CompactWeakArrayList(
this, no_script_list, pretenure);
4835 set_noscript_shared_function_infos(*no_script_list);
4838 void Heap::AddRetainedMap(Handle<Map> map) {
4839 if (map->is_in_retained_map_list()) {
4842 Handle<WeakArrayList> array(retained_maps(), isolate());
4843 if (array->IsFull()) {
4844 CompactRetainedMaps(*array);
4847 WeakArrayList::AddToEnd(isolate(), array, MaybeObjectHandle::Weak(map));
4848 array = WeakArrayList::AddToEnd(
4850 MaybeObjectHandle(Smi::FromInt(FLAG_retain_maps_for_n_gc), isolate()));
4851 if (*array != retained_maps()) {
4852 set_retained_maps(*array);
4854 map->set_is_in_retained_map_list(
true);
4857 void Heap::CompactRetainedMaps(WeakArrayList* retained_maps) {
4858 DCHECK_EQ(retained_maps, this->retained_maps());
4859 int length = retained_maps->length();
4861 int new_number_of_disposed_maps = 0;
4863 for (
int i = 0;
i < length;
i += 2) {
4864 MaybeObject maybe_object = retained_maps->Get(
i);
4865 if (maybe_object->IsCleared()) {
4869 DCHECK(maybe_object->IsWeak());
4871 MaybeObject age = retained_maps->Get(
i + 1);
4872 DCHECK(age->IsSmi());
4873 if (
i != new_length) {
4874 retained_maps->Set(new_length, maybe_object);
4875 retained_maps->Set(new_length + 1, age);
4877 if (
i < number_of_disposed_maps_) {
4878 new_number_of_disposed_maps += 2;
4882 number_of_disposed_maps_ = new_number_of_disposed_maps;
4883 HeapObject* undefined = ReadOnlyRoots(
this).undefined_value();
4884 for (
int i = new_length;
i < length;
i++) {
4885 retained_maps->Set(
i, HeapObjectReference::Strong(undefined));
4887 if (new_length != length) retained_maps->set_length(new_length);
4890 void Heap::FatalProcessOutOfMemory(
const char* location) {
4891 v8::internal::V8::FatalProcessOutOfMemory(isolate(), location,
true);
4896 class PrintHandleVisitor :
public RootVisitor {
4898 void VisitRootPointers(Root root,
const char* description, ObjectSlot start,
4899 ObjectSlot end)
override {
4900 for (ObjectSlot p = start; p < end; ++p)
4901 PrintF(
" handle %p to %p\n", p.ToVoidPtr(),
reinterpret_cast<void*
>(*p));
4906 void Heap::PrintHandles() {
4907 PrintF(
"Handles:\n");
4908 PrintHandleVisitor v;
4909 isolate_->handle_scope_implementer()->Iterate(&v);
4918 CHECK_GT(HandleScope::kCheckHandleThreshold, handle_count_);
4920 void VisitRootPointers(Root root,
const char* description,
ObjectSlot start,
4922 handle_count_ += end - start;
4926 ptrdiff_t handle_count_;
4930 void Heap::CheckHandleCount() {
4932 isolate_->handle_scope_implementer()->Iterate(&v);
4935 Address* Heap::store_buffer_top_address() {
4936 return store_buffer()->top_address();
4940 intptr_t Heap::store_buffer_mask_constant() {
4941 return StoreBuffer::kStoreBufferMask;
4945 Address Heap::store_buffer_overflow_function_address() {
4946 return FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow);
4949 void Heap::ClearRecordedSlot(HeapObject*
object, ObjectSlot slot) {
4950 Page* page = Page::FromAddress(slot.address());
4951 if (!page->InNewSpace()) {
4952 DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
4953 store_buffer()->DeleteEntry(slot.address());
4958 void Heap::VerifyClearedSlot(HeapObject*
object, ObjectSlot slot) {
4959 if (InNewSpace(
object))
return;
4960 Page* page = Page::FromAddress(slot.address());
4961 DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
4962 store_buffer()->MoveAllEntriesToRememberedSet();
4963 CHECK(!RememberedSet<OLD_TO_NEW>::Contains(page, slot.address()));
4965 CHECK_IMPLIES(RememberedSet<OLD_TO_OLD>::Contains(page, slot.address()),
4966 page->RegisteredObjectWithInvalidatedSlots(
object));
4970 void Heap::ClearRecordedSlotRange(Address start, Address end) {
4971 Page* page = Page::FromAddress(start);
4972 if (!page->InNewSpace()) {
4973 DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
4974 store_buffer()->DeleteEntry(start, end);
4978 PagedSpace* PagedSpaces::next() {
4979 switch (counter_++) {
4983 return heap_->read_only_space();
4985 return heap_->old_space();
4987 return heap_->code_space();
4989 return heap_->map_space();
4995 SpaceIterator::SpaceIterator(Heap* heap)
4996 : heap_(heap), current_space_(FIRST_SPACE - 1) {}
4998 SpaceIterator::~SpaceIterator() =
default;
5000 bool SpaceIterator::has_next() {
5002 return current_space_ != LAST_SPACE;
5005 Space* SpaceIterator::next() {
5007 return heap_->space(++current_space_);
5014 virtual bool SkipObject(
HeapObject*
object) = 0;
5021 MarkReachableObjects();
5025 for (
auto it : reachable_) {
5027 it.second =
nullptr;
5031 bool SkipObject(
HeapObject*
object)
override {
5032 if (object->IsFiller())
return true;
5033 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
5034 if (reachable_.count(chunk) == 0)
return true;
5035 return reachable_[chunk]->count(
object) == 0;
5040 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
5041 if (reachable_.count(chunk) == 0) {
5042 reachable_[chunk] =
new std::unordered_set<HeapObject*>();
5044 if (reachable_[chunk]->count(
object))
return false;
5045 reachable_[chunk]->insert(
object);
5052 : filter_(filter) {}
5061 MarkPointers(start, end);
5064 void VisitRootPointers(Root root,
const char* description,
ObjectSlot start,
5069 void TransitiveClosure() {
5070 while (!marking_stack_.empty()) {
5072 marking_stack_.pop_back();
5082 if ((*p)->GetHeapObject(&heap_object)) {
5083 if (filter_->MarkAsReachable(heap_object)) {
5084 marking_stack_.push_back(heap_object);
5090 std::vector<HeapObject*> marking_stack_;
5093 friend class MarkingVisitor;
5095 void MarkReachableObjects() {
5096 MarkingVisitor visitor(
this);
5097 heap_->IterateRoots(&visitor, VISIT_ALL);
5098 visitor.TransitiveClosure();
5103 std::unordered_map<MemoryChunk*, std::unordered_set<HeapObject*>*> reachable_;
5106 HeapIterator::HeapIterator(
Heap* heap,
5107 HeapIterator::HeapObjectsFiltering filtering)
5109 filtering_(filtering),
5111 space_iterator_(nullptr),
5112 object_iterator_(nullptr) {
5113 heap_->MakeHeapIterable();
5114 heap_->heap_iterator_start();
5117 switch (filtering_) {
5118 case kFilterUnreachable:
5124 object_iterator_ = space_iterator_->next()->GetObjectIterator();
5128 HeapIterator::~HeapIterator() {
5129 heap_->heap_iterator_end();
5133 if (filtering_ != kNoFiltering) {
5134 DCHECK_NULL(object_iterator_);
5137 delete space_iterator_;
5142 HeapObject* HeapIterator::next() {
5143 if (filter_ ==
nullptr)
return NextObject();
5145 HeapObject* obj = NextObject();
5146 while ((obj !=
nullptr) && (filter_->SkipObject(obj))) obj = NextObject();
5151 HeapObject* HeapIterator::NextObject() {
5153 if (object_iterator_.get() ==
nullptr)
return nullptr;
5155 if (HeapObject* obj = object_iterator_.get()->Next()) {
5160 while (space_iterator_->has_next()) {
5161 object_iterator_ = space_iterator_->next()->GetObjectIterator();
5162 if (HeapObject* obj = object_iterator_.get()->Next()) {
5168 object_iterator_.reset(
nullptr);
5173 void Heap::UpdateTotalGCTime(
double duration) {
5174 if (FLAG_trace_gc_verbose) {
5175 total_gc_time_ms_ += duration;
5179 void Heap::ExternalStringTable::CleanUpNewSpaceStrings() {
5181 Isolate* isolate = heap_->isolate();
5182 for (
size_t i = 0;
i < new_space_strings_.size(); ++
i) {
5183 Object* o = new_space_strings_[
i];
5184 if (o->IsTheHole(isolate)) {
5189 if (o->IsThinString())
continue;
5190 DCHECK(o->IsExternalString());
5191 if (InNewSpace(o)) {
5192 new_space_strings_[last++] = o;
5194 old_space_strings_.push_back(o);
5197 new_space_strings_.resize(last);
5200 void Heap::ExternalStringTable::CleanUpAll() {
5201 CleanUpNewSpaceStrings();
5203 Isolate* isolate = heap_->isolate();
5204 for (
size_t i = 0;
i < old_space_strings_.size(); ++
i) {
5205 Object* o = old_space_strings_[
i];
5206 if (o->IsTheHole(isolate)) {
5211 if (o->IsThinString())
continue;
5212 DCHECK(o->IsExternalString());
5213 DCHECK(!InNewSpace(o));
5214 old_space_strings_[last++] = o;
5216 old_space_strings_.resize(last);
5218 if (FLAG_verify_heap) {
5224 void Heap::ExternalStringTable::TearDown() {
5225 for (
size_t i = 0;
i < new_space_strings_.size(); ++
i) {
5226 Object* o = new_space_strings_[
i];
5228 if (o->IsThinString())
continue;
5229 heap_->FinalizeExternalString(ExternalString::cast(o));
5231 new_space_strings_.clear();
5232 for (
size_t i = 0;
i < old_space_strings_.size(); ++
i) {
5233 Object* o = old_space_strings_[
i];
5235 if (o->IsThinString())
continue;
5236 heap_->FinalizeExternalString(ExternalString::cast(o));
5238 old_space_strings_.clear();
5242 void Heap::RememberUnmappedPage(Address page,
bool compacted) {
5245 page ^= 0xC1EAD & (Page::kPageSize - 1);
5247 page ^= 0x1D1ED & (Page::kPageSize - 1);
5249 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = page;
5250 remembered_unmapped_pages_index_++;
5251 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
5254 void Heap::RegisterStrongRoots(ObjectSlot start, ObjectSlot end) {
5255 StrongRootsList* list =
new StrongRootsList();
5256 list->next = strong_roots_list_;
5257 list->start = start;
5259 strong_roots_list_ = list;
5262 void Heap::UnregisterStrongRoots(ObjectSlot start) {
5263 StrongRootsList* prev =
nullptr;
5264 StrongRootsList* list = strong_roots_list_;
5265 while (list !=
nullptr) {
5266 StrongRootsList* next = list->next;
5267 if (list->start == start) {
5271 strong_roots_list_ = next;
5281 void Heap::SetBuiltinsConstantsTable(FixedArray cache) {
5282 set_builtins_constants_table(cache);
5285 void Heap::SetInterpreterEntryTrampolineForProfiling(Code code) {
5286 DCHECK_EQ(Builtins::kInterpreterEntryTrampoline, code->builtin_index());
5287 set_interpreter_entry_trampoline_for_profiling(code);
5290 void Heap::AddDirtyJSWeakFactory(
5291 JSWeakFactory* weak_factory,
5292 std::function<
void(HeapObject*
object, ObjectSlot slot, Object* target)>
5293 gc_notify_updated_slot) {
5294 DCHECK(dirty_js_weak_factories()->IsUndefined(isolate()) ||
5295 dirty_js_weak_factories()->IsJSWeakFactory());
5296 DCHECK(weak_factory->next()->IsUndefined(isolate()));
5297 DCHECK(!weak_factory->scheduled_for_cleanup());
5298 weak_factory->set_scheduled_for_cleanup(
true);
5299 weak_factory->set_next(dirty_js_weak_factories());
5300 gc_notify_updated_slot(
5302 HeapObject::RawField(weak_factory, JSWeakFactory::kNextOffset),
5303 dirty_js_weak_factories());
5304 set_dirty_js_weak_factories(weak_factory);
5309 void Heap::AddKeepDuringJobTarget(Handle<JSReceiver> target) {
5310 DCHECK(FLAG_harmony_weak_refs);
5311 DCHECK(weak_refs_keep_during_job()->IsUndefined() ||
5312 weak_refs_keep_during_job()->IsOrderedHashSet());
5313 Handle<OrderedHashSet> table;
5314 if (weak_refs_keep_during_job()->IsUndefined(isolate())) {
5315 table = isolate()->factory()->NewOrderedHashSet();
5318 handle(OrderedHashSet::cast(weak_refs_keep_during_job()), isolate());
5320 table = OrderedHashSet::Add(isolate(), table, target);
5321 set_weak_refs_keep_during_job(*table);
5324 void Heap::ClearKeepDuringJobSet() {
5325 set_weak_refs_keep_during_job(ReadOnlyRoots(isolate()).undefined_value());
5328 size_t Heap::NumberOfTrackedHeapObjectTypes() {
5329 return ObjectStats::OBJECT_STATS_COUNT;
5333 size_t Heap::ObjectCountAtLastGC(
size_t index) {
5334 if (live_object_stats_ ==
nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
5336 return live_object_stats_->object_count_last_gc(index);
5340 size_t Heap::ObjectSizeAtLastGC(
size_t index) {
5341 if (live_object_stats_ ==
nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
5343 return live_object_stats_->object_size_last_gc(index);
5347 bool Heap::GetObjectTypeName(
size_t index,
const char** object_type,
5348 const char** object_sub_type) {
5349 if (index >= ObjectStats::OBJECT_STATS_COUNT)
return false;
5351 switch (static_cast<int>(index)) {
5352 #define COMPARE_AND_RETURN_NAME(name) \ 5354 *object_type = #name; \ 5355 *object_sub_type = ""; \ 5357 INSTANCE_TYPE_LIST(COMPARE_AND_RETURN_NAME)
5358 #undef COMPARE_AND_RETURN_NAME 5360 #define COMPARE_AND_RETURN_NAME(name) \ 5361 case ObjectStats::FIRST_VIRTUAL_TYPE + ObjectStats::name: \ 5362 *object_type = #name; \ 5363 *object_sub_type = ""; \ 5365 VIRTUAL_INSTANCE_TYPE_LIST(COMPARE_AND_RETURN_NAME)
5366 #undef COMPARE_AND_RETURN_NAME 5371 size_t Heap::NumberOfNativeContexts() {
5373 Object* context = native_contexts_list();
5374 while (!context->IsUndefined(isolate())) {
5376 Context native_context = Context::cast(context);
5377 context = native_context->next_context_link();
5382 size_t Heap::NumberOfDetachedContexts() {
5384 return detached_contexts()->length() / 2;
5387 const char* AllocationSpaceName(AllocationSpace space) {
5394 return "CODE_SPACE";
5400 return "NEW_LO_SPACE";
5409 void VerifyPointersVisitor::VisitPointers(HeapObject* host, ObjectSlot start,
5411 VerifyPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
5414 void VerifyPointersVisitor::VisitPointers(HeapObject* host,
5415 MaybeObjectSlot start,
5416 MaybeObjectSlot end) {
5417 VerifyPointers(host, start, end);
5420 void VerifyPointersVisitor::VisitRootPointers(Root root,
5421 const char* description,
5424 VerifyPointers(
nullptr, MaybeObjectSlot(start), MaybeObjectSlot(end));
5427 void VerifyPointersVisitor::VerifyPointers(HeapObject* host,
5428 MaybeObjectSlot start,
5429 MaybeObjectSlot end) {
5430 for (MaybeObjectSlot current = start; current < end; ++current) {
5432 if ((*current)->GetHeapObject(&
object)) {
5433 CHECK(heap_->Contains(
object));
5434 CHECK(object->map()->IsMap());
5436 CHECK((*current)->IsSmi() || (*current)->IsCleared());
5441 void VerifySmisVisitor::VisitRootPointers(Root root,
const char* description,
5442 ObjectSlot start, ObjectSlot end) {
5443 for (ObjectSlot current = start; current < end; ++current) {
5444 CHECK((*current)->IsSmi());
5448 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
5461 if (obj->map() == ReadOnlyRoots(
this).one_pointer_filler_map())
return false;
5462 InstanceType type = obj->map()->instance_type();
5463 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
5464 AllocationSpace src = chunk->owner()->identity();
5467 return dst == NEW_SPACE || dst == OLD_SPACE;
5469 return dst == OLD_SPACE;
5471 return dst == CODE_SPACE && type == CODE_TYPE;
5482 void Heap::CreateObjectStats() {
5483 if (V8_LIKELY(FLAG_gc_stats == 0))
return;
5484 if (!live_object_stats_) {
5485 live_object_stats_ =
new ObjectStats(
this);
5487 if (!dead_object_stats_) {
5488 dead_object_stats_ =
new ObjectStats(
this);
5492 void AllocationObserver::AllocationStep(
int bytes_allocated,
5493 Address soon_object,
size_t size) {
5494 DCHECK_GE(bytes_allocated, 0);
5495 bytes_to_next_step_ -= bytes_allocated;
5496 if (bytes_to_next_step_ <= 0) {
5497 Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object, size);
5498 step_size_ = GetNextStepSize();
5499 bytes_to_next_step_ = step_size_;
5501 DCHECK_GE(bytes_to_next_step_, 0);
5506 Map GcSafeMapOfCodeSpaceObject(HeapObject*
object) {
5507 MapWord map_word =
object->map_word();
5508 return map_word.IsForwardingAddress() ? map_word.ToForwardingAddress()->map()
5512 int GcSafeSizeOfCodeSpaceObject(HeapObject*
object) {
5513 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(
object));
5516 Code GcSafeCastToCode(Heap* heap, HeapObject*
object, Address inner_pointer) {
5517 Code code = Code::unchecked_cast(
object);
5518 DCHECK(!code.is_null());
5519 DCHECK(heap->GcSafeCodeContains(code, inner_pointer));
5525 bool Heap::GcSafeCodeContains(Code code, Address addr) {
5526 Map map = GcSafeMapOfCodeSpaceObject(code);
5527 DCHECK(map == ReadOnlyRoots(
this).code_map());
5528 if (InstructionStream::TryLookupCode(isolate(), addr) == code)
return true;
5529 Address start = code->address();
5530 Address end = code->address() + code->SizeFromMap(map);
5531 return start <= addr && addr < end;
5534 Code Heap::GcSafeFindCodeForInnerPointer(Address inner_pointer) {
5535 Code code = InstructionStream::TryLookupCode(isolate(), inner_pointer);
5536 if (!code.is_null())
return code;
5539 LargePage* large_page = code_lo_space()->FindPage(inner_pointer);
5540 if (large_page !=
nullptr) {
5541 return GcSafeCastToCode(
this, large_page->GetObject(), inner_pointer);
5544 DCHECK(code_space()->Contains(inner_pointer));
5548 Page* page = Page::FromAddress(inner_pointer);
5549 DCHECK_EQ(page->owner(), code_space());
5550 mark_compact_collector()->sweeper()->EnsurePageIsIterable(page);
5552 Address addr = page->skip_list()->StartFor(inner_pointer);
5553 Address top = code_space()->top();
5554 Address limit = code_space()->limit();
5557 if (addr == top && addr != limit) {
5562 HeapObject* obj = HeapObject::FromAddress(addr);
5563 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
5564 Address next_addr = addr + obj_size;
5565 if (next_addr > inner_pointer) {
5566 return GcSafeCastToCode(
this, obj, inner_pointer);
5572 void Heap::WriteBarrierForCodeSlow(Code code) {
5573 for (RelocIterator it(code, RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT));
5574 !it.done(); it.next()) {
5575 GenerationalBarrierForCode(code, it.rinfo(), it.rinfo()->target_object());
5576 MarkingBarrierForCode(code, it.rinfo(), it.rinfo()->target_object());
5580 void Heap::GenerationalBarrierSlow(HeapObject*
object, Address slot,
5581 HeapObject* value) {
5582 Heap* heap = Heap::FromWritableHeapObject(
object);
5583 heap->store_buffer()->InsertEntry(slot);
5586 void Heap::GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
5587 int offset,
int length) {
5588 for (
int i = 0;
i < length;
i++) {
5589 if (!InNewSpace(array->get(offset +
i)))
continue;
5590 heap->store_buffer()->InsertEntry(
5591 array->RawFieldOfElementAt(offset +
i).address());
5595 void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
5596 HeapObject*
object) {
5597 DCHECK(InNewSpace(
object));
5598 Page* source_page = Page::FromAddress(host.ptr());
5599 RelocInfo::Mode rmode = rinfo->rmode();
5600 Address addr = rinfo->pc();
5601 SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
5602 if (rinfo->IsInConstantPool()) {
5603 addr = rinfo->constant_pool_entry_address();
5604 if (RelocInfo::IsCodeTargetMode(rmode)) {
5605 slot_type = CODE_ENTRY_SLOT;
5607 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
5608 slot_type = OBJECT_SLOT;
5611 RememberedSet<OLD_TO_NEW>::InsertTyped(source_page, host.ptr(), slot_type,
5615 void Heap::MarkingBarrierSlow(HeapObject*
object, Address slot,
5616 HeapObject* value) {
5617 Heap* heap = Heap::FromWritableHeapObject(
object);
5618 heap->incremental_marking()->RecordWriteSlow(
object, HeapObjectSlot(slot),
5622 void Heap::MarkingBarrierForElementsSlow(Heap* heap, HeapObject*
object) {
5623 if (FLAG_concurrent_marking ||
5624 heap->incremental_marking()->marking_state()->IsBlack(
object)) {
5625 heap->incremental_marking()->RevisitObject(
object);
5629 void Heap::MarkingBarrierForCodeSlow(Code host, RelocInfo* rinfo,
5630 HeapObject*
object) {
5631 Heap* heap = Heap::FromWritableHeapObject(host);
5632 DCHECK(heap->incremental_marking()->IsMarking());
5633 heap->incremental_marking()->RecordWriteIntoCode(host, rinfo,
object);
5636 bool Heap::PageFlagsAreConsistent(HeapObject*
object) {
5637 Heap* heap = Heap::FromWritableHeapObject(
object);
5638 MemoryChunk* chunk = MemoryChunk::FromHeapObject(
object);
5639 heap_internals::MemoryChunk* slim_chunk =
5640 heap_internals::MemoryChunk::FromHeapObject(
object);
5642 const bool generation_consistency =
5643 chunk->owner()->identity() != NEW_SPACE ||
5644 (chunk->InNewSpace() && slim_chunk->InNewSpace());
5645 const bool marking_consistency =
5646 !heap->incremental_marking()->IsMarking() ||
5647 (chunk->IsFlagSet(MemoryChunk::INCREMENTAL_MARKING) &&
5648 slim_chunk->IsMarking());
5650 return generation_consistency && marking_consistency;
5653 static_assert(MemoryChunk::Flag::INCREMENTAL_MARKING ==
5654 heap_internals::MemoryChunk::kMarkingBit,
5655 "Incremental marking flag inconsistent");
5656 static_assert(MemoryChunk::Flag::IN_FROM_SPACE ==
5657 heap_internals::MemoryChunk::kFromSpaceBit,
5658 "From space flag inconsistent");
5659 static_assert(MemoryChunk::Flag::IN_TO_SPACE ==
5660 heap_internals::MemoryChunk::kToSpaceBit,
5661 "To space flag inconsistent");
5662 static_assert(MemoryChunk::kFlagsOffset ==
5663 heap_internals::MemoryChunk::kFlagsOffset,
5664 "Flag offset inconsistent");
5666 void Heap::SetEmbedderStackStateForNextFinalizaton(
5667 EmbedderHeapTracer::EmbedderStackState stack_state) {
5668 local_embedder_heap_tracer()->SetEmbedderStackStateForNextFinalization(
5673 void Heap::IncrementObjectCounters() {
5674 isolate_->counters()->objs_since_last_full()->Increment();
5675 isolate_->counters()->objs_since_last_young()->Increment();