5 #ifndef V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_ 10 #include <unordered_map> 11 #include <unordered_set> 16 #include "include/v8-internal.h" 17 #include "include/v8.h" 18 #include "src/accessors.h" 19 #include "src/allocation.h" 20 #include "src/assert-scope.h" 21 #include "src/base/atomic-utils.h" 22 #include "src/globals.h" 23 #include "src/heap-symbols.h" 24 #include "src/objects.h" 25 #include "src/objects/fixed-array.h" 26 #include "src/objects/heap-object.h" 27 #include "src/objects/smi.h" 28 #include "src/objects/string-table.h" 29 #include "src/visitors.h" 34 typedef void (*OutOfMemoryCallback)(
void* data);
41 class TestMemoryAllocatorScope;
44 class AllocationMemento;
45 class ObjectBoilerplateDescription;
47 class CodeDataContainer;
48 class DeoptimizationData;
50 class IncrementalMarking;
53 using v8::MemoryPressureLevel;
55 class AllocationObserver;
56 class ArrayBufferCollector;
57 class ArrayBufferTracker;
58 class CodeLargeObjectSpace;
59 class ConcurrentMarking;
60 class GCIdleTimeAction;
61 class GCIdleTimeHandler;
62 class GCIdleTimeHeapState;
65 class HeapObjectAllocationTracker;
67 class HeapObjectsFilter;
72 class LocalEmbedderHeapTracer;
73 class MemoryAllocator;
75 class MinorMarkCompactCollector;
83 class ScavengerCollector;
86 class StressScavengeObserver;
88 class TracePossibleWrapperReporter;
89 class WeakObjectRetainer;
91 enum ArrayStorageAllocationMode {
92 DONT_INITIALIZE_ARRAY_ELEMENTS,
93 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
96 enum class ClearRecordedSlots { kYes, kNo };
98 enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory };
100 enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes };
102 enum class FixedArrayVisitationMode { kRegular, kIncremental };
104 enum class TraceRetainingPathMode { kEnabled, kDisabled };
106 enum class RetainingPathOption { kDefault, kTrackEphemeronPath };
108 enum class GarbageCollectionReason {
110 kAllocationFailure = 1,
111 kAllocationLimit = 2,
112 kContextDisposal = 3,
113 kCountersExtension = 4,
116 kExternalMemoryPressure = 7,
117 kFinalizeMarkingViaStackGuard = 8,
118 kFinalizeMarkingViaTask = 9,
123 kLowMemoryNotification = 14,
124 kMakeHeapIterable = 15,
125 kMemoryPressure = 16,
128 kSamplingProfiler = 19,
129 kSnapshotCreator = 20,
131 kExternalFinalize = 22
137 enum class YoungGenerationHandling {
138 kRegularScavenge = 0,
139 kFastPromotionDuringScavenge = 1,
161 CHECK(!object->IsSmi());
168 CHECK(!object->IsSmi());
173 inline bool IsRetry() {
return object_->IsSmi(); }
175 inline AllocationSpace RetrySpace();
177 template <
typename T,
typename =
typename std::enable_if<
178 std::is_base_of<Object, T>::value>
::type>
180 if (IsRetry())
return false;
181 *obj = T::cast(object_);
185 template <
typename T,
typename =
typename std::enable_if<
186 std::is_base_of<ObjectPtr, T>::value>
::type>
188 if (IsRetry())
return false;
189 *obj = T::cast(object_);
195 : object_(Smi::FromInt(static_cast<int>(space))) {}
203 struct CommentStatistic {
213 static const int kMaxComments = 64;
219 enum FindMementoMode { kForRuntime, kForGC };
229 using PretenuringFeedbackMap = std::unordered_map<AllocationSite*, size_t>;
233 base::Mutex* relocation_mutex() {
return &relocation_mutex_; }
242 typedef std::vector<Chunk> Reservation;
244 static const int kInitalOldGenerationLimitFactor = 2;
249 static const int kPointerMultiplier = 1;
251 static const int kPointerMultiplier = i::kPointerSize / 4;
255 static const size_t kMinSemiSpaceSizeInKB =
256 1 * kPointerMultiplier * ((1 << kPageSizeBits) / KB);
257 static const size_t kMaxSemiSpaceSizeInKB =
258 16 * kPointerMultiplier * ((1 << kPageSizeBits) / KB);
260 static const int kTraceRingBufferSize = 512;
261 static const int kStacktraceBufferSize = 512;
263 static const int kNoGCFlags = 0;
264 static const int kReduceMemoryFootprintMask = 1;
267 static const int kMinObjectSizeInTaggedWords = 2;
269 static const int kMinPromotedPercentForFastPromotionMode = 90;
271 STATIC_ASSERT(static_cast<int>(RootIndex::kUndefinedValue) ==
272 Internals::kUndefinedValueRootIndex);
273 STATIC_ASSERT(static_cast<int>(RootIndex::kTheHoleValue) ==
274 Internals::kTheHoleValueRootIndex);
275 STATIC_ASSERT(static_cast<int>(RootIndex::kNullValue) ==
276 Internals::kNullValueRootIndex);
277 STATIC_ASSERT(static_cast<int>(RootIndex::kTrueValue) ==
278 Internals::kTrueValueRootIndex);
279 STATIC_ASSERT(static_cast<int>(RootIndex::kFalseValue) ==
280 Internals::kFalseValueRootIndex);
281 STATIC_ASSERT(static_cast<int>(RootIndex::kempty_string) ==
282 Internals::kEmptyStringRootIndex);
286 static int GetMaximumFillToAlign(AllocationAlignment alignment);
289 static int GetFillToAlign(
Address address, AllocationAlignment alignment);
291 void FatalProcessOutOfMemory(
const char* location);
294 static bool IsValidAllocationSpace(AllocationSpace space);
297 static inline bool ShouldZapGarbage() {
302 return FLAG_verify_heap;
310 return FLAG_clear_free_memory ? kClearedFreeMemoryValue : kZapValue;
313 static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
314 return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
317 static inline GarbageCollector YoungGenerationCollector() {
319 return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
322 #endif // ENABLE_MINOR_MC 325 static inline const char* CollectorName(GarbageCollector collector) {
330 return "Mark-Compact";
331 case MINOR_MARK_COMPACTOR:
332 return "Minor Mark-Compact";
334 return "Unknown collector";
339 static inline void CopyBlock(Address dst, Address src,
int byte_size);
341 V8_EXPORT_PRIVATE
static void WriteBarrierForCodeSlow(Code host);
342 V8_EXPORT_PRIVATE
static void GenerationalBarrierSlow(HeapObject*
object,
345 V8_EXPORT_PRIVATE
static void GenerationalBarrierForElementsSlow(
346 Heap* heap, FixedArray array,
int offset,
int length);
347 V8_EXPORT_PRIVATE
static void GenerationalBarrierForCodeSlow(
348 Code host, RelocInfo* rinfo, HeapObject* value);
349 V8_EXPORT_PRIVATE
static void MarkingBarrierSlow(HeapObject*
object,
352 V8_EXPORT_PRIVATE
static void MarkingBarrierForElementsSlow(
353 Heap* heap, HeapObject*
object);
354 V8_EXPORT_PRIVATE
static void MarkingBarrierForCodeSlow(Code host,
357 V8_EXPORT_PRIVATE
static bool PageFlagsAreConsistent(HeapObject*
object);
361 void NotifyDeserializationComplete();
363 inline Address* NewSpaceAllocationTopAddress();
364 inline Address* NewSpaceAllocationLimitAddress();
365 inline Address* OldSpaceAllocationTopAddress();
366 inline Address* OldSpaceAllocationLimitAddress();
370 void MoveElements(FixedArray array,
int dst_index,
int src_index,
int len,
371 WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
379 V8_EXPORT_PRIVATE HeapObject* CreateFillerObjectAt(
380 Address addr,
int size, ClearRecordedSlots clear_slots_mode,
381 ClearFreedMemoryMode clear_memory_mode =
382 ClearFreedMemoryMode::kDontClearFreedMemory);
384 template <
typename T>
385 void CreateFillerForArray(T
object,
int elements_to_trim,
int bytes_to_trim);
387 bool CanMoveObjectStart(HeapObject*
object);
389 bool IsImmovable(HeapObject*
object);
391 bool IsLargeObject(HeapObject*
object);
392 inline bool IsWithinLargeObject(Address address);
394 bool IsInYoungGeneration(HeapObject*
object);
398 FixedArrayBase LeftTrimFixedArray(FixedArrayBase obj,
int elements_to_trim);
401 void RightTrimFixedArray(FixedArrayBase obj,
int elements_to_trim);
402 void RightTrimWeakFixedArray(WeakFixedArray* obj,
int elements_to_trim);
405 inline Oddball* ToBoolean(
bool condition);
408 int NotifyContextDisposed(
bool dependant_context);
410 void set_native_contexts_list(Object*
object) {
411 native_contexts_list_ = object;
413 Object* native_contexts_list()
const {
return native_contexts_list_; }
415 void set_allocation_sites_list(Object*
object) {
416 allocation_sites_list_ = object;
418 Object* allocation_sites_list() {
return allocation_sites_list_; }
421 Address allocation_sites_list_address() {
422 return reinterpret_cast<Address
>(&allocation_sites_list_);
427 void ForeachAllocationSite(
428 Object* list,
const std::function<
void(AllocationSite*)>& visitor);
431 int ms_count()
const {
return ms_count_; }
435 bool AllowedToBeMigrated(HeapObject*
object, AllocationSpace dest);
437 void CheckHandleCount();
440 uint32_t allocations_count() {
return allocations_count_; }
443 void PrintShortHeapStatistics();
445 bool write_protect_code_memory()
const {
return write_protect_code_memory_; }
447 uintptr_t code_space_memory_modification_scope_depth() {
448 return code_space_memory_modification_scope_depth_;
451 void increment_code_space_memory_modification_scope_depth() {
452 code_space_memory_modification_scope_depth_++;
455 void decrement_code_space_memory_modification_scope_depth() {
456 code_space_memory_modification_scope_depth_--;
459 void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk);
460 void UnprotectAndRegisterMemoryChunk(HeapObject*
object);
461 void UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk);
462 V8_EXPORT_PRIVATE
void ProtectUnprotectedMemoryChunks();
464 void EnableUnprotectedMemoryChunksRegistry() {
465 unprotected_memory_chunks_registry_enabled_ =
true;
468 void DisableUnprotectedMemoryChunksRegistry() {
469 unprotected_memory_chunks_registry_enabled_ =
false;
472 bool unprotected_memory_chunks_registry_enabled() {
473 return unprotected_memory_chunks_registry_enabled_;
476 inline HeapState gc_state() {
return gc_state_; }
477 void SetGCState(HeapState state);
478 bool IsTearingDown()
const {
return gc_state_ == TEAR_DOWN; }
480 inline bool IsInGCPostProcessing() {
return gc_post_processing_depth_ > 0; }
484 template <FindMementoMode mode>
485 inline AllocationMemento* FindAllocationMemento(Map map, HeapObject*
object);
488 bool ReserveSpace(Reservation* reservations, std::vector<Address>* maps);
494 void CreateApiObjects();
497 bool IdleNotification(
double deadline_in_seconds);
498 bool IdleNotification(
int idle_time_in_ms);
500 void MemoryPressureNotification(MemoryPressureLevel level,
501 bool is_isolate_locked);
502 void CheckMemoryPressure();
504 void AddNearHeapLimitCallback(v8::NearHeapLimitCallback,
void* data);
505 void RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
508 double MonotonicallyIncreasingTimeInMs();
510 void RecordStats(HeapStats* stats,
bool take_snapshot =
false);
513 void CheckNewSpaceExpansionCriteria();
515 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
519 inline bool ShouldBePromoted(Address old_address);
521 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
523 inline uint64_t HashSeed();
525 inline int NextScriptId();
526 inline int NextDebuggingId();
527 inline int GetNextTemplateSerialNumber();
529 void SetSerializedObjects(FixedArray objects);
530 void SetSerializedGlobalProxySizes(FixedArray sizes);
533 void RememberUnmappedPage(Address page,
bool compacted);
535 int64_t external_memory_hard_limit() {
return MaxOldGenerationSize() / 2; }
537 V8_INLINE
int64_t external_memory();
538 V8_INLINE
void update_external_memory(
int64_t delta);
539 V8_INLINE
void update_external_memory_concurrently_freed(intptr_t freed);
540 V8_INLINE
void account_external_memory_concurrently_freed();
542 size_t backing_store_bytes()
const {
return backing_store_bytes_; }
544 void CompactWeakArrayLists(PretenureFlag pretenure);
546 void AddRetainedMap(Handle<Map> map);
552 inline void OnAllocationEvent(HeapObject*
object,
int size_in_bytes);
555 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
558 inline bool CanAllocateInReadOnlySpace();
559 bool deserialization_complete()
const {
return deserialization_complete_; }
561 bool HasLowAllocationRate();
562 bool HasHighFragmentation();
563 bool HasHighFragmentation(
size_t used,
size_t committed);
565 void ActivateMemoryReducerIfNeeded();
567 bool ShouldOptimizeForMemoryUsage();
569 bool HighMemoryPressure() {
570 return memory_pressure_level_ != MemoryPressureLevel::kNone;
573 void RestoreHeapLimit(
size_t heap_limit) {
575 size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
576 max_old_generation_size_ =
577 Min(max_old_generation_size_, Max(heap_limit, min_limit));
588 void ConfigureHeap(
size_t max_semi_space_size_in_kb,
589 size_t max_old_generation_size_in_mb,
590 size_t code_range_size_in_mb);
591 void ConfigureHeapDefault();
598 void InitializeHashSeed();
602 bool CreateHeapObjects();
605 void CreateObjectStats();
608 void StartTearDown();
620 inline Address NewSpaceTop();
622 NewSpace* new_space() {
return new_space_; }
623 OldSpace* old_space() {
return old_space_; }
624 CodeSpace* code_space() {
return code_space_; }
625 MapSpace* map_space() {
return map_space_; }
626 LargeObjectSpace* lo_space() {
return lo_space_; }
627 CodeLargeObjectSpace* code_lo_space() {
return code_lo_space_; }
628 NewLargeObjectSpace* new_lo_space() {
return new_lo_space_; }
629 ReadOnlySpace* read_only_space() {
return read_only_space_; }
631 inline PagedSpace* paged_space(
int idx);
632 inline Space* space(
int idx);
635 const char* GetSpaceName(
int idx);
641 GCTracer* tracer() {
return tracer_; }
643 MemoryAllocator* memory_allocator() {
return memory_allocator_; }
645 inline Isolate* isolate();
647 MarkCompactCollector* mark_compact_collector() {
648 return mark_compact_collector_;
651 MinorMarkCompactCollector* minor_mark_compact_collector() {
652 return minor_mark_compact_collector_;
655 ArrayBufferCollector* array_buffer_collector() {
656 return array_buffer_collector_;
664 V8_INLINE RootsTable& roots_table();
667 #define ROOT_ACCESSOR(type, name, CamelName) inline type name(); 668 MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
672 V8_INLINE
void SetRootCodeStubs(SimpleNumberDictionary value);
673 V8_INLINE
void SetRootMaterializedObjects(FixedArray objects);
674 V8_INLINE
void SetRootScriptList(Object* value);
675 V8_INLINE
void SetRootStringTable(StringTable value);
676 V8_INLINE
void SetRootNoScriptSharedFunctionInfos(Object* value);
677 V8_INLINE
void SetMessageListeners(TemplateList value);
682 void SetStackLimits();
686 void ClearStackLimits();
688 void RegisterStrongRoots(ObjectSlot start, ObjectSlot end);
689 void UnregisterStrongRoots(ObjectSlot start);
691 void SetBuiltinsConstantsTable(FixedArray cache);
699 void SetInterpreterEntryTrampolineForProfiling(Code code);
702 void AddDirtyJSWeakFactory(
703 JSWeakFactory* weak_factory,
704 std::function<
void(HeapObject*
object, ObjectSlot slot, Object* target)>
705 gc_notify_updated_slot);
707 void AddKeepDuringJobTarget(Handle<JSReceiver> target);
708 void ClearKeepDuringJobSet();
715 bool inline_allocation_disabled() {
return inline_allocation_disabled_; }
718 void EnableInlineAllocation();
719 void DisableInlineAllocation();
728 V8_EXPORT_PRIVATE
bool CollectGarbage(
729 AllocationSpace space, GarbageCollectionReason gc_reason,
730 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
733 V8_EXPORT_PRIVATE
void CollectAllGarbage(
734 int flags, GarbageCollectionReason gc_reason,
735 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
738 void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
743 void PreciseCollectAllGarbage(
744 int flags, GarbageCollectionReason gc_reason,
745 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
749 void ReportExternalMemoryPressure();
751 typedef v8::Isolate::GetExternallyAllocatedMemoryInBytesCallback
752 GetExternallyAllocatedMemoryInBytesCallback;
754 void SetGetExternallyAllocatedMemoryInBytesCallback(
755 GetExternallyAllocatedMemoryInBytesCallback callback) {
756 external_memory_callback_ = callback;
760 void HandleGCRequest();
766 Code builtin(
int index);
767 Address builtin_address(
int index);
768 void set_builtin(
int index, Code builtin);
780 void IterateRoots(RootVisitor* v, VisitMode mode);
782 void IterateStrongRoots(RootVisitor* v, VisitMode mode);
785 void IterateSmiRoots(RootVisitor* v);
787 void IterateWeakRoots(RootVisitor* v, VisitMode mode);
789 void IterateWeakGlobalHandles(RootVisitor* v);
791 void IterateBuiltins(RootVisitor* v);
798 Address* IsMarkingFlagAddress() {
799 return reinterpret_cast<Address*
>(&is_marking_flag_);
802 void SetIsMarkingFlag(uint8_t flag) { is_marking_flag_ = flag; }
804 Address* store_buffer_top_address();
805 static intptr_t store_buffer_mask_constant();
806 static Address store_buffer_overflow_function_address();
808 void ClearRecordedSlot(HeapObject*
object, ObjectSlot slot);
809 void ClearRecordedSlotRange(Address start, Address end);
812 void VerifyClearedSlot(HeapObject*
object, ObjectSlot slot);
819 int GCFlagsForIncrementalMarking() {
820 return ShouldOptimizeForMemoryUsage() ? kReduceMemoryFootprintMask
826 void StartIdleIncrementalMarking(
827 GarbageCollectionReason gc_reason,
828 GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
832 void StartIncrementalMarking(
833 int gc_flags, GarbageCollectionReason gc_reason,
834 GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
836 void StartIncrementalMarkingIfAllocationLimitIsReached(
838 GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
840 void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
842 void FinalizeIncrementalMarkingAtomically(GarbageCollectionReason gc_reason);
844 void RegisterDeserializedObjectsForBlackAllocation(
845 Reservation* reservations,
const std::vector<HeapObject*>& large_objects,
846 const std::vector<Address>& maps);
848 IncrementalMarking* incremental_marking() {
return incremental_marking_; }
854 ConcurrentMarking* concurrent_marking() {
return concurrent_marking_; }
859 void NotifyObjectLayoutChange(HeapObject*
object,
int old_size,
860 const DisallowHeapAllocation&);
866 void VerifyObjectLayoutChange(HeapObject*
object, Map new_map);
874 void SetArgumentsAdaptorDeoptPCOffset(
int pc_offset);
875 void SetConstructStubCreateDeoptPCOffset(
int pc_offset);
876 void SetConstructStubInvokeDeoptPCOffset(
int pc_offset);
877 void SetInterpreterEntryReturnPCOffset(
int pc_offset);
881 void InvalidateCodeDeoptimizationData(Code code);
883 void DeoptMarkedAllocationSites();
885 bool DeoptMaybeTenuredAllocationSites();
891 LocalEmbedderHeapTracer* local_embedder_heap_tracer()
const {
892 return local_embedder_heap_tracer_;
895 void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
896 EmbedderHeapTracer* GetEmbedderHeapTracer()
const;
898 void RegisterExternallyReferencedObject(Address* location);
899 void SetEmbedderStackStateForNextFinalizaton(
900 EmbedderHeapTracer::EmbedderStackState stack_state);
907 inline void RegisterExternalString(String
string);
911 inline void UpdateExternalString(String
string,
size_t old_payload,
916 inline void FinalizeExternalString(String
string);
918 static String UpdateNewSpaceReferenceInExternalStringTableEntry(
919 Heap* heap, ObjectSlot pointer);
926 static inline bool InNewSpace(Object*
object);
927 static inline bool InNewSpace(MaybeObject
object);
928 static inline bool InNewSpace(HeapObject* heap_object);
929 static inline bool InNewSpace(HeapObjectPtr heap_object);
930 static inline bool InFromSpace(Object*
object);
931 static inline bool InFromSpace(MaybeObject
object);
932 static inline bool InFromSpace(HeapObject* heap_object);
933 static inline bool InToSpace(Object*
object);
934 static inline bool InToSpace(MaybeObject
object);
935 static inline bool InToSpace(HeapObject* heap_object);
936 static inline bool InToSpace(HeapObjectPtr heap_object);
939 inline bool InOldSpace(Object*
object);
942 inline bool InReadOnlySpace(Object*
object);
946 bool Contains(HeapObject* value);
950 bool InSpace(HeapObject* value, AllocationSpace space);
954 bool InSpaceSlow(Address addr, AllocationSpace space);
958 static inline Heap* FromWritableHeapObject(
const HeapObject* obj);
963 static inline Heap* FromWritableHeapObject(
const HeapObjectPtr* obj);
972 size_t NumberOfTrackedHeapObjectTypes();
977 size_t ObjectCountAtLastGC(
size_t index);
978 size_t ObjectSizeAtLastGC(
size_t index);
981 bool GetObjectTypeName(
size_t index,
const char** object_type,
982 const char** object_sub_type);
985 size_t NumberOfNativeContexts();
988 size_t NumberOfDetachedContexts();
995 void CollectCodeStatistics();
1002 size_t MaxReserved();
1003 size_t MaxSemiSpaceSize() {
return max_semi_space_size_; }
1004 size_t InitialSemiSpaceSize() {
return initial_semispace_size_; }
1005 size_t MaxOldGenerationSize() {
return max_old_generation_size_; }
1007 V8_EXPORT_PRIVATE
static size_t ComputeMaxOldGenerationSize(
1008 uint64_t physical_memory);
1010 static size_t ComputeMaxSemiSpaceSize(uint64_t physical_memory) {
1011 const uint64_t min_physical_memory = 512 * MB;
1012 const uint64_t max_physical_memory = 3 *
static_cast<uint64_t
>(GB);
1014 uint64_t capped_physical_memory =
1015 Max(Min(physical_memory, max_physical_memory), min_physical_memory);
1017 size_t semi_space_size_in_kb =
1018 static_cast<size_t>(((capped_physical_memory - min_physical_memory) *
1019 (kMaxSemiSpaceSizeInKB - kMinSemiSpaceSizeInKB)) /
1020 (max_physical_memory - min_physical_memory) +
1021 kMinSemiSpaceSizeInKB);
1022 return RoundUp(semi_space_size_in_kb, (1 << kPageSizeBits) / KB);
1030 size_t OldGenerationCapacity();
1033 size_t CommittedMemoryOfUnmapper();
1036 size_t CommittedMemory();
1039 size_t CommittedOldGenerationMemory();
1042 size_t CommittedMemoryExecutable();
1045 size_t CommittedPhysicalMemory();
1048 size_t MaximumCommittedMemory() {
return maximum_committed_; }
1052 void UpdateMaximumCommitted();
1060 size_t SizeOfObjects();
1062 void UpdateSurvivalStatistics(
int start_new_space_size);
1064 inline void IncrementPromotedObjectsSize(
size_t object_size) {
1065 promoted_objects_size_ += object_size;
1067 inline size_t promoted_objects_size() {
return promoted_objects_size_; }
1069 inline void IncrementSemiSpaceCopiedObjectSize(
size_t object_size) {
1070 semi_space_copied_object_size_ += object_size;
1072 inline size_t semi_space_copied_object_size() {
1073 return semi_space_copied_object_size_;
1076 inline size_t SurvivedNewSpaceObjectSize() {
1077 return promoted_objects_size_ + semi_space_copied_object_size_;
1080 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1082 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1084 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1086 inline void IncrementYoungSurvivorsCounter(
size_t survived) {
1087 survived_last_scavenge_ = survived;
1088 survived_since_last_expansion_ += survived;
1091 inline uint64_t OldGenerationObjectsAndPromotedExternalMemorySize() {
1092 return OldGenerationSizeOfObjects() + PromotedExternalMemorySize();
1095 inline void UpdateNewSpaceAllocationCounter();
1097 inline size_t NewSpaceAllocationCounter();
1100 void set_new_space_allocation_counter(
size_t new_value) {
1101 new_space_allocation_counter_ = new_value;
1104 void UpdateOldGenerationAllocationCounter() {
1105 old_generation_allocation_counter_at_last_gc_ =
1106 OldGenerationAllocationCounter();
1107 old_generation_size_at_last_gc_ = 0;
1110 size_t OldGenerationAllocationCounter() {
1111 return old_generation_allocation_counter_at_last_gc_ +
1112 PromotedSinceLastGC();
1116 void set_old_generation_allocation_counter_at_last_gc(
size_t new_value) {
1117 old_generation_allocation_counter_at_last_gc_ = new_value;
1120 size_t PromotedSinceLastGC() {
1121 size_t old_generation_size = OldGenerationSizeOfObjects();
1122 DCHECK_GE(old_generation_size, old_generation_size_at_last_gc_);
1123 return old_generation_size - old_generation_size_at_last_gc_;
1128 void NotifyRefinedOldGenerationSize(
size_t decreased_bytes) {
1129 if (old_generation_size_at_last_gc_ != 0) {
1133 DCHECK_GE(old_generation_size_at_last_gc_, decreased_bytes);
1134 old_generation_size_at_last_gc_ -= decreased_bytes;
1138 int gc_count()
const {
return gc_count_; }
1142 size_t OldGenerationSizeOfObjects();
1148 void AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
1149 GCType gc_type_filter,
void* data);
1150 void RemoveGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
1153 void AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
1154 GCType gc_type_filter,
void* data);
1155 void RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
1158 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1159 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1166 V8_WARN_UNUSED_RESULT HeapObject* PrecedeWithFiller(HeapObject*
object,
1172 V8_WARN_UNUSED_RESULT HeapObject* AlignWithFiller(
1173 HeapObject*
object,
int object_size,
int allocation_size,
1174 AllocationAlignment alignment);
1184 void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
1185 void UnregisterArrayBuffer(JSArrayBuffer* buffer);
1193 inline void UpdateAllocationSite(
1194 Map map, HeapObject*
object,
1195 PretenuringFeedbackMap* pretenuring_feedback);
1200 void MergeAllocationSitePretenuringFeedback(
1201 const PretenuringFeedbackMap& local_pretenuring_feedback);
1208 void AddAllocationObserversToAllSpaces(
1209 AllocationObserver* observer, AllocationObserver* new_space_observer);
1213 void RemoveAllocationObserversFromAllSpaces(
1214 AllocationObserver* observer, AllocationObserver* new_space_observer);
1216 bool allocation_step_in_progress() {
return allocation_step_in_progress_; }
1217 void set_allocation_step_in_progress(
bool val) {
1218 allocation_step_in_progress_ = val;
1225 void AddHeapObjectAllocationTracker(HeapObjectAllocationTracker* tracker);
1226 void RemoveHeapObjectAllocationTracker(HeapObjectAllocationTracker* tracker);
1227 bool has_heap_object_allocation_tracker()
const {
1228 return !allocation_trackers_.empty();
1238 void AddRetainingPathTarget(Handle<HeapObject>
object,
1239 RetainingPathOption option);
1246 Code GcSafeFindCodeForInnerPointer(Address inner_pointer);
1250 bool GcSafeCodeContains(Code code, Address addr);
1256 void VerifyRememberedSetFor(HeapObject*
object);
1259 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT 1260 void set_allocation_timeout(
int timeout) { allocation_timeout_ = timeout; }
1264 void VerifyCountersAfterSweeping();
1265 void VerifyCountersBeforeConcurrentSweeping();
1268 void PrintHandles();
1271 void ReportCodeStatistics(
const char* title);
1273 void* GetRandomMmapAddr() {
1274 void* result = v8::internal::GetRandomMmapAddr();
1275 #if V8_TARGET_ARCH_X64 1285 reinterpret_cast<uintptr_t>(v8::internal::GetRandomMmapAddr()) &
1287 result =
reinterpret_cast<void*
>(mmap_region_base_ + offset);
1288 #endif // V8_OS_MACOSX 1289 #endif // V8_TARGET_ARCH_X64 1293 static const char* GarbageCollectionReasonToString(
1294 GarbageCollectionReason gc_reason);
1297 inline int MaxNumberToStringCacheSize()
const;
1300 class SkipStoreBufferScope;
1302 typedef String (*ExternalStringTableUpdaterCallback)(Heap* heap,
1303 ObjectSlot pointer);
1308 class ExternalStringTable {
1310 explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
1313 inline void AddString(String
string);
1314 bool Contains(String
string);
1316 void IterateAll(RootVisitor* v);
1317 void IterateNewSpaceStrings(RootVisitor* v);
1318 void PromoteAllNewSpaceStrings();
1323 void CleanUpNewSpaceStrings();
1328 void UpdateNewSpaceReferences(
1329 Heap::ExternalStringTableUpdaterCallback updater_func);
1330 void UpdateReferences(
1331 Heap::ExternalStringTableUpdaterCallback updater_func);
1335 void VerifyNewSpace();
1341 std::vector<Object*> new_space_strings_;
1342 std::vector<Object*> old_space_strings_;
1344 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
1347 struct StrongRootsList;
1349 struct StringTypeTable {
1355 struct ConstantStringTable {
1356 const char* contents;
1360 struct StructTable {
1366 struct GCCallbackTuple {
1367 GCCallbackTuple(v8::Isolate::GCCallbackWithData callback, GCType gc_type,
1369 : callback(callback), gc_type(gc_type), data(data) {}
1371 bool operator==(
const GCCallbackTuple& other)
const;
1372 GCCallbackTuple& operator=(
const GCCallbackTuple& other);
1374 v8::Isolate::GCCallbackWithData callback;
1379 static const int kInitialStringTableSize = StringTable::kMinCapacity;
1380 static const int kInitialEvalCacheSize = 64;
1381 static const int kInitialNumberStringCacheSize = 256;
1383 static const int kRememberedUnmappedPages = 128;
1385 static const StringTypeTable string_type_table[];
1386 static const ConstantStringTable constant_string_table[];
1387 static const StructTable struct_table[];
1389 static const int kYoungSurvivalRateHighThreshold = 90;
1390 static const int kYoungSurvivalRateAllowedDeviation = 15;
1391 static const int kOldSurvivalRateLowThreshold = 10;
1393 static const int kMaxMarkCompactsInIdleRound = 7;
1394 static const int kIdleScavengeThreshold = 5;
1396 static const int kInitialFeedbackCapacity = 256;
1401 static AllocationSpace SelectSpace(PretenureFlag pretenure) {
1402 switch (pretenure) {
1403 case TENURED_READ_ONLY:
1414 static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
1418 #define ROOT_ACCESSOR(type, name, CamelName) inline void set_##name(type value); 1419 ROOT_LIST(ROOT_ACCESSOR)
1420 #undef ROOT_ACCESSOR 1422 StoreBuffer* store_buffer() {
return store_buffer_; }
1424 void set_current_gc_flags(
int flags) {
1425 current_gc_flags_ = flags;
1428 inline bool ShouldReduceMemory()
const {
1429 return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
1432 int NumberOfScavengeTasks();
1435 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1436 const char** reason);
1441 void EnsureFillerObjectAtTop();
1445 void MakeHeapIterable();
1450 bool PerformGarbageCollection(
1451 GarbageCollector collector,
1452 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1454 inline void UpdateOldSpaceLimits();
1456 bool CreateInitialMaps();
1457 void CreateInternalAccessorInfoObjects();
1458 void CreateInitialObjects();
1462 V8_NOINLINE
void CreateJSEntryStub();
1463 V8_NOINLINE
void CreateJSConstructEntryStub();
1464 V8_NOINLINE
void CreateJSRunMicrotasksEntryStub();
1466 void CreateFixedStubs();
1469 void EnsureFromSpaceIsCommitted();
1472 bool UncommitFromSpace();
1475 void ZapFromSpace();
1478 void ZapCodeObject(Address start_address,
int size_in_bytes);
1482 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1487 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1490 void ReportStatisticsAfterGC();
1493 void FlushNumberStringCache();
1495 void ConfigureInitialOldGenerationSize();
1497 bool HasLowYoungGenerationAllocationRate();
1498 bool HasLowOldGenerationAllocationRate();
1499 double YoungGenerationMutatorUtilization();
1500 double OldGenerationMutatorUtilization();
1502 void ReduceNewSpaceSize();
1504 GCIdleTimeHeapState ComputeHeapState();
1506 bool PerformIdleTimeAction(GCIdleTimeAction action,
1507 GCIdleTimeHeapState heap_state,
1508 double deadline_in_ms);
1510 void IdleNotificationEpilogue(GCIdleTimeAction action,
1511 GCIdleTimeHeapState heap_state,
double start_ms,
1512 double deadline_in_ms);
1514 int NextAllocationTimeout(
int current_timeout = 0);
1515 inline void UpdateAllocationsHash(HeapObject*
object);
1516 inline void UpdateAllocationsHash(
uint32_t value);
1517 void PrintAllocationsHash();
1519 void PrintMaxMarkingLimitReached();
1520 void PrintMaxNewSpaceSizeReached();
1522 int NextStressMarkingLimit();
1524 void AddToRingBuffer(
const char*
string);
1525 void GetFromRingBuffer(
char* buffer);
1527 void CompactRetainedMaps(WeakArrayList* retained_maps);
1529 void CollectGarbageOnMemoryPressure();
1531 void EagerlyFreeExternalMemory();
1533 bool InvokeNearHeapLimitCallback();
1535 void ComputeFastPromotionMode();
1541 void FinalizeIncrementalMarkingIncrementally(
1542 GarbageCollectionReason gc_reason);
1550 TimedHistogram* GCTypeTimer(GarbageCollector collector);
1551 TimedHistogram* GCTypePriorityTimer(GarbageCollector collector);
1560 void ProcessPretenuringFeedback();
1563 void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
1571 void GarbageCollectionPrologue();
1572 void GarbageCollectionEpilogue();
1577 void MinorMarkCompact();
1580 void MarkCompactPrologue();
1581 void MarkCompactEpilogue();
1585 void EvacuateYoungGeneration();
1587 void UpdateNewSpaceReferencesInExternalStringTable(
1588 ExternalStringTableUpdaterCallback updater_func);
1590 void UpdateReferencesInExternalStringTable(
1591 ExternalStringTableUpdaterCallback updater_func);
1593 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1594 void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1595 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1596 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1597 void ProcessWeakListRoots(WeakObjectRetainer* retainer);
1603 inline size_t OldGenerationSpaceAvailable() {
1604 if (old_generation_allocation_limit_ <=
1605 OldGenerationObjectsAndPromotedExternalMemorySize())
1607 return old_generation_allocation_limit_ -
1608 static_cast<size_t>(
1609 OldGenerationObjectsAndPromotedExternalMemorySize());
1615 inline bool AllocationLimitOvershotByLargeMargin() {
1618 size_t kMarginForSmallHeaps = 32u * MB;
1619 if (old_generation_allocation_limit_ >=
1620 OldGenerationObjectsAndPromotedExternalMemorySize())
1622 uint64_t overshoot = OldGenerationObjectsAndPromotedExternalMemorySize() -
1623 old_generation_allocation_limit_;
1627 Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
1628 (max_old_generation_size_ - old_generation_allocation_limit_) / 2);
1629 return overshoot >= margin;
1632 void UpdateTotalGCTime(
double duration);
1634 bool MaximumSizeScavenge() {
return maximum_size_scavenges_ > 0; }
1636 bool IsIneffectiveMarkCompact(
size_t old_generation_size,
1637 double mutator_utilization);
1638 void CheckIneffectiveMarkCompact(
size_t old_generation_size,
1639 double mutator_utilization);
1641 inline void IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
1644 inline void DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
1651 HeapController* heap_controller() {
return heap_controller_; }
1652 MemoryReducer* memory_reducer() {
return memory_reducer_; }
1658 static const int kMaxLoadTimeMs = 7000;
1660 bool ShouldOptimizeForLoadTime();
1662 size_t old_generation_allocation_limit()
const {
1663 return old_generation_allocation_limit_;
1666 bool always_allocate() {
return always_allocate_scope_count_ != 0; }
1668 bool CanExpandOldGeneration(
size_t size);
1670 bool ShouldExpandOldGenerationOnSlowAllocation();
1672 enum class HeapGrowingMode { kSlow, kConservative, kMinimal, kDefault };
1674 HeapGrowingMode CurrentHeapGrowingMode();
1676 enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
1677 IncrementalMarkingLimit IncrementalMarkingLimitReached();
1683 bool RecentIdleNotificationHappened();
1684 void ScheduleIdleScavengeIfNeeded(
int bytes_allocated);
1690 void heap_iterator_start() { heap_iterator_depth_++; }
1692 void heap_iterator_end() { heap_iterator_depth_--; }
1694 bool in_heap_iterator() {
return heap_iterator_depth_ > 0; }
1701 V8_WARN_UNUSED_RESULT AllocationResult
1702 AllocateMap(InstanceType instance_type,
int instance_size,
1703 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
1704 int inobject_properties = 0);
1710 V8_WARN_UNUSED_RESULT
inline AllocationResult AllocateRaw(
1711 int size_in_bytes, AllocationSpace space,
1712 AllocationAlignment aligment = kWordAligned);
1719 HeapObject* AllocateRawWithLightRetry(
1720 int size, AllocationSpace space,
1721 AllocationAlignment alignment = kWordAligned);
1729 HeapObject* AllocateRawWithRetryOrFail(
1730 int size, AllocationSpace space,
1731 AllocationAlignment alignment = kWordAligned);
1732 HeapObject* AllocateRawCodeInLargeObjectSpace(
int size);
1735 V8_WARN_UNUSED_RESULT AllocationResult Allocate(Map map,
1736 AllocationSpace space);
1741 HeapObject* EnsureImmovableCode(HeapObject* heap_object,
int object_size);
1744 V8_WARN_UNUSED_RESULT AllocationResult
1745 AllocatePartialMap(InstanceType instance_type,
int instance_size);
1747 void FinalizePartialMap(Map map);
1750 V8_WARN_UNUSED_RESULT AllocationResult
1751 AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
1753 void set_force_oom(
bool value) { force_oom_ = value; }
1759 void AddRetainer(HeapObject* retainer, HeapObject*
object);
1760 void AddEphemeronRetainer(HeapObject* retainer, HeapObject*
object);
1761 void AddRetainingRoot(Root root, HeapObject*
object);
1764 bool IsRetainingPathTarget(HeapObject*
object, RetainingPathOption* option);
1765 void PrintRetainingPath(HeapObject*
object, RetainingPathOption option);
1768 void IncrementObjectCounters();
1772 std::atomic<intptr_t> external_memory_concurrently_freed_{0};
1776 Isolate* isolate_ =
nullptr;
1778 size_t code_range_size_ = 0;
1779 size_t max_semi_space_size_ = 8 * (kPointerSize / 4) * MB;
1780 size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB;
1781 size_t max_old_generation_size_ = 700ul * (kPointerSize / 4) * MB;
1782 size_t initial_max_old_generation_size_;
1783 size_t initial_old_generation_size_;
1784 bool old_generation_size_configured_ =
false;
1785 size_t maximum_committed_ = 0;
1788 std::atomic<size_t> backing_store_bytes_{0};
1792 size_t survived_since_last_expansion_ = 0;
1795 size_t survived_last_scavenge_ = 0;
1799 std::atomic<size_t> always_allocate_scope_count_{0};
1803 std::atomic<MemoryPressureLevel> memory_pressure_level_;
1805 std::vector<std::pair<v8::NearHeapLimitCallback, void*> >
1806 near_heap_limit_callbacks_;
1809 int contexts_disposed_ = 0;
1814 int number_of_disposed_maps_ = 0;
1816 NewSpace* new_space_ =
nullptr;
1817 OldSpace* old_space_ =
nullptr;
1818 CodeSpace* code_space_ =
nullptr;
1819 MapSpace* map_space_ =
nullptr;
1820 LargeObjectSpace* lo_space_ =
nullptr;
1821 CodeLargeObjectSpace* code_lo_space_ =
nullptr;
1822 NewLargeObjectSpace* new_lo_space_ =
nullptr;
1823 ReadOnlySpace* read_only_space_ =
nullptr;
1825 Space* space_[LAST_SPACE + 1];
1829 bool write_protect_code_memory_ =
false;
1832 uintptr_t code_space_memory_modification_scope_depth_ = 0;
1834 HeapState gc_state_ = NOT_IN_GC;
1836 int gc_post_processing_depth_ = 0;
1839 uint64_t PromotedExternalMemorySize();
1845 uint32_t raw_allocations_hash_ = 0;
1849 int stress_marking_percentage_ = 0;
1853 AllocationObserver* stress_marking_observer_ =
nullptr;
1856 StressScavengeObserver* stress_scavenge_observer_ =
nullptr;
1858 bool allocation_step_in_progress_ =
false;
1862 double max_marking_limit_reached_ = 0.0;
1865 unsigned int ms_count_ = 0;
1868 unsigned int gc_count_ = 0;
1872 int consecutive_ineffective_mark_compacts_ = 0;
1874 static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu;
1878 int remembered_unmapped_pages_index_ = 0;
1879 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1885 size_t old_generation_allocation_limit_;
1889 bool inline_allocation_disabled_ =
false;
1893 Object* native_contexts_list_;
1894 Object* allocation_sites_list_;
1896 std::vector<GCCallbackTuple> gc_epilogue_callbacks_;
1897 std::vector<GCCallbackTuple> gc_prologue_callbacks_;
1899 GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_;
1901 int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
1903 size_t promoted_objects_size_ = 0;
1904 double promotion_ratio_ = 0.0;
1905 double promotion_rate_ = 0.0;
1906 size_t semi_space_copied_object_size_ = 0;
1907 size_t previous_semi_space_copied_object_size_ = 0;
1908 double semi_space_copied_rate_ = 0.0;
1909 int nodes_died_in_new_space_ = 0;
1910 int nodes_copied_in_new_space_ = 0;
1911 int nodes_promoted_ = 0;
1917 unsigned int maximum_size_scavenges_ = 0;
1920 double total_gc_time_ms_;
1923 double last_idle_notification_time_ = 0.0;
1926 double last_gc_time_ = 0.0;
1928 GCTracer* tracer_ =
nullptr;
1929 MarkCompactCollector* mark_compact_collector_ =
nullptr;
1930 MinorMarkCompactCollector* minor_mark_compact_collector_ =
nullptr;
1931 ScavengerCollector* scavenger_collector_ =
nullptr;
1932 ArrayBufferCollector* array_buffer_collector_ =
nullptr;
1933 MemoryAllocator* memory_allocator_ =
nullptr;
1934 StoreBuffer* store_buffer_ =
nullptr;
1935 HeapController* heap_controller_ =
nullptr;
1936 IncrementalMarking* incremental_marking_ =
nullptr;
1937 ConcurrentMarking* concurrent_marking_ =
nullptr;
1938 GCIdleTimeHandler* gc_idle_time_handler_ =
nullptr;
1939 MemoryReducer* memory_reducer_ =
nullptr;
1940 ObjectStats* live_object_stats_ =
nullptr;
1941 ObjectStats* dead_object_stats_ =
nullptr;
1942 ScavengeJob* scavenge_job_ =
nullptr;
1943 AllocationObserver* idle_scavenge_observer_ =
nullptr;
1944 LocalEmbedderHeapTracer* local_embedder_heap_tracer_ =
nullptr;
1945 StrongRootsList* strong_roots_list_ =
nullptr;
1950 size_t new_space_allocation_counter_ = 0;
1955 size_t old_generation_allocation_counter_at_last_gc_ = 0;
1958 size_t old_generation_size_at_last_gc_ = 0;
1965 PretenuringFeedbackMap global_pretenuring_feedback_;
1967 char trace_ring_buffer_[kTraceRingBufferSize];
1970 uint8_t is_marking_flag_ = 0;
1975 bool ring_buffer_full_ =
false;
1976 size_t ring_buffer_end_ = 0;
1980 bool configured_ =
false;
1983 int current_gc_flags_ = Heap::kNoGCFlags;
1987 GCCallbackFlags current_gc_callback_flags_;
1989 ExternalStringTable external_string_table_;
1991 base::Mutex relocation_mutex_;
1993 int gc_callbacks_depth_ = 0;
1995 bool deserialization_complete_ =
false;
1998 int heap_iterator_depth_ = 0;
2000 bool fast_promotion_mode_ =
false;
2003 bool force_oom_ =
false;
2004 bool delay_sweeper_tasks_for_testing_ =
false;
2006 HeapObject* pending_layout_change_object_ =
nullptr;
2008 base::Mutex unprotected_memory_chunks_mutex_;
2009 std::unordered_set<MemoryChunk*> unprotected_memory_chunks_;
2010 bool unprotected_memory_chunks_registry_enabled_ =
false;
2012 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT 2016 int allocation_timeout_ = 0;
2017 #endif // V8_ENABLE_ALLOCATION_TIMEOUT 2019 std::map<HeapObject*, HeapObject*> retainer_;
2020 std::map<HeapObject*, Root> retaining_root_;
2023 std::map<HeapObject*, HeapObject*> ephemeron_retainer_;
2026 std::map<int, RetainingPathOption> retaining_path_target_option_;
2028 std::vector<HeapObjectAllocationTracker*> allocation_trackers_;
2031 friend class AlwaysAllocateScope;
2032 friend class ArrayBufferCollector;
2033 friend class ConcurrentMarking;
2034 friend class EphemeronHashTableMarkingTask;
2035 friend class GCCallbacksScope;
2036 friend class GCTracer;
2037 friend class MemoryController;
2038 friend class HeapIterator;
2039 friend class IdleScavengeObserver;
2040 friend class IncrementalMarking;
2041 friend class IncrementalMarkingJob;
2042 friend class LargeObjectSpace;
2043 template <FixedArrayVisitationMode fixed_array_mode,
2044 TraceRetainingPathMode retaining_path_mode,
typename MarkingState>
2045 friend class MarkingVisitor;
2046 friend class MarkCompactCollector;
2047 friend class MarkCompactCollectorBase;
2048 friend class MinorMarkCompactCollector;
2049 friend class NewSpace;
2050 friend class ObjectStatsCollector;
2052 friend class PagedSpace;
2053 friend class ReadOnlyRoots;
2054 friend class Scavenger;
2055 friend class ScavengerCollector;
2057 friend class StoreBuffer;
2058 friend class Sweeper;
2059 friend class heap::TestMemoryAllocatorScope;
2062 friend class Factory;
2065 friend class Isolate;
2068 friend class heap::HeapTester;
2070 FRIEND_TEST(HeapControllerTest, OldGenerationAllocationLimit);
2071 FRIEND_TEST(HeapTest, ExternalLimitDefault);
2072 FRIEND_TEST(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling);
2073 DISALLOW_COPY_AND_ASSIGN(Heap);
2079 static const int kStartMarker = 0xDECADE00;
2080 static const int kEndMarker = 0xDECADE01;
2082 intptr_t* start_marker;
2083 size_t* ro_space_size;
2084 size_t* ro_space_capacity;
2085 size_t* new_space_size;
2086 size_t* new_space_capacity;
2087 size_t* old_space_size;
2088 size_t* old_space_capacity;
2089 size_t* code_space_size;
2090 size_t* code_space_capacity;
2091 size_t* map_space_size;
2092 size_t* map_space_capacity;
2093 size_t* lo_space_size;
2094 size_t* code_lo_space_size;
2095 size_t* global_handle_count;
2096 size_t* weak_global_handle_count;
2097 size_t* pending_global_handle_count;
2098 size_t* near_death_global_handle_count;
2099 size_t* free_global_handle_count;
2100 size_t* memory_allocator_size;
2101 size_t* memory_allocator_capacity;
2102 size_t* malloced_memory;
2103 size_t* malloced_peak_memory;
2104 size_t* objects_per_type;
2105 size_t* size_per_type;
2107 char* last_few_messages;
2108 char* js_stacktrace;
2109 intptr_t* end_marker;
2158 DISALLOW_HEAP_ALLOCATION(no_heap_allocation_);
2173 void VisitRootPointers(Root root,
const char* description,
ObjectSlot start,
2187 void VisitRootPointers(Root root,
const char* description,
ObjectSlot start,
2196 enum class SpacesSpecifier { kSweepablePagedSpaces, kAllPagedSpaces };
2199 SpacesSpecifier::kSweepablePagedSpaces)
2201 counter_(specifier == SpacesSpecifier::kAllPagedSpaces ? RO_SPACE
2239 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2242 HeapObjectsFiltering filtering = kNoFiltering);
2250 DISALLOW_HEAP_ALLOCATION(no_heap_allocation_);
2253 HeapObjectsFiltering filtering_;
2258 std::unique_ptr<ObjectIterator> object_iterator_;
2277 : step_size_(step_size), bytes_to_next_step_(step_size) {
2278 DCHECK_LE(kPointerSize, step_size);
2285 void AllocationStep(
int bytes_allocated,
Address soon_object,
size_t size);
2288 intptr_t step_size()
const {
return step_size_; }
2289 intptr_t bytes_to_next_step()
const {
return bytes_to_next_step_; }
2303 virtual void Step(
int bytes_allocated,
Address soon_object,
size_t size) = 0;
2306 virtual intptr_t GetNextStepSize() {
return step_size_; }
2308 intptr_t step_size_;
2309 intptr_t bytes_to_next_step_;
2316 V8_EXPORT_PRIVATE
const char* AllocationSpaceName(AllocationSpace space);
2322 virtual void AllocationEvent(
Address addr,
int size) = 0;
2324 virtual void UpdateObjectSizeEvent(
Address addr,
int size) {}
2331 #endif // V8_HEAP_HEAP_H_