5 #include "src/isolate-allocator.h" 6 #include "src/base/bounded-page-allocator.h" 7 #include "src/isolate.h" 8 #include "src/ptr-compr.h" 14 IsolateAllocator::IsolateAllocator(IsolateAllocationMode mode) {
15 #if V8_TARGET_ARCH_64_BIT 16 if (mode == IsolateAllocationMode::kInV8Heap) {
17 Address heap_base = InitReservation();
18 CommitPagesForIsolate(heap_base);
21 #endif // V8_TARGET_ARCH_64_BIT 24 CHECK_EQ(mode, IsolateAllocationMode::kInCppHeap);
25 page_allocator_ = GetPlatformPageAllocator();
26 isolate_memory_ = ::operator
new(
sizeof(Isolate));
27 DCHECK(!reservation_.IsReserved());
30 IsolateAllocator::~IsolateAllocator() {
31 if (reservation_.IsReserved()) {
37 ::operator
delete(isolate_memory_);
40 #if V8_TARGET_ARCH_64_BIT 41 Address IsolateAllocator::InitReservation() {
47 size_t reservation_size = kPtrComprHeapReservationSize;
48 size_t base_alignment = kPtrComprIsolateRootAlignment;
50 const int kMaxAttempts = 3;
51 for (
int attempt = 0; attempt < kMaxAttempts; ++attempt) {
52 Address hint = RoundDown(reinterpret_cast<Address>(
55 kPtrComprIsolateRootBias;
58 VirtualMemory padded_reservation(platform_page_allocator,
60 reinterpret_cast<void*>(hint));
61 if (!padded_reservation.IsReserved())
break;
66 RoundUp(padded_reservation.address() + kPtrComprIsolateRootBias,
68 kPtrComprIsolateRootBias;
69 CHECK(padded_reservation.InVM(address, reservation_size));
76 padded_reservation.Free();
78 VirtualMemory reservation(platform_page_allocator, reservation_size,
79 reinterpret_cast<void*>(address));
80 if (!reservation.IsReserved())
break;
84 Address aligned_address =
85 RoundUp(reservation.address() + kPtrComprIsolateRootBias,
87 kPtrComprIsolateRootBias;
89 if (reservation.address() == aligned_address) {
90 reservation_ = std::move(reservation);
94 if (!reservation_.IsReserved()) {
95 V8::FatalProcessOutOfMemory(
nullptr,
96 "Failed to reserve memory for new V8 Isolate");
99 CHECK_EQ(reservation_.size(), reservation_size);
101 Address heap_base = reservation_.address() + kPtrComprIsolateRootBias;
102 CHECK(IsAligned(heap_base, base_alignment));
107 void IsolateAllocator::CommitPagesForIsolate(Address heap_base) {
112 size_t page_size = RoundUp(
size_t{1} << kPageSizeBits,
115 page_allocator_instance_ = base::make_unique<base::BoundedPageAllocator>(
116 platform_page_allocator, reservation_.address(), reservation_.size(),
118 page_allocator_ = page_allocator_instance_.get();
120 Address isolate_address = heap_base - Isolate::isolate_root_bias();
121 Address isolate_end = isolate_address +
sizeof(Isolate);
125 Address reserved_region_address = RoundDown(isolate_address, page_size);
126 size_t reserved_region_size =
127 RoundUp(isolate_end, page_size) - reserved_region_address;
129 CHECK(page_allocator_instance_->AllocatePagesAt(
130 reserved_region_address, reserved_region_size,
131 PageAllocator::Permission::kNoAccess));
136 size_t commit_page_size = platform_page_allocator->CommitPageSize();
137 Address committed_region_address =
138 RoundDown(isolate_address, commit_page_size);
139 size_t committed_region_size =
140 RoundUp(isolate_end, commit_page_size) - committed_region_address;
144 CHECK(reservation_.SetPermissions(committed_region_address,
145 committed_region_size,
146 PageAllocator::kReadWrite));
148 if (Heap::ShouldZapGarbage()) {
149 for (Address address = committed_region_address;
150 address < committed_region_size; address += kPointerSize) {
151 Memory<Address>(address) = static_cast<Address>(kZapValue);
155 isolate_memory_ =
reinterpret_cast<void*
>(isolate_address);
157 #endif // V8_TARGET_ARCH_64_BIT
virtual void * GetRandomMmapAddr()=0
virtual size_t AllocatePageSize()=0