7 #include "src/counters.h" 8 #include "src/heap/heap-inl.h" 9 #include "src/objects-inl.h" 10 #include "src/objects/js-array-buffer-inl.h" 11 #include "src/wasm/wasm-engine.h" 12 #include "src/wasm/wasm-limits.h" 13 #include "src/wasm/wasm-memory.h" 14 #include "src/wasm/wasm-module.h" 22 constexpr
size_t kNegativeGuardSize = 1u << 31;
24 void AddAllocationStatusSample(Isolate* isolate,
25 WasmMemoryTracker::AllocationStatus status) {
26 isolate->counters()->wasm_memory_allocation_result()->AddSample(
27 static_cast<int>(status));
30 void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
31 size_t size,
void** allocation_base,
32 size_t* allocation_length) {
33 using AllocationStatus = WasmMemoryTracker::AllocationStatus;
34 #if V8_TARGET_ARCH_64_BIT 35 bool require_full_guard_regions =
true;
37 bool require_full_guard_regions =
false;
44 static constexpr
int kAllocationRetries = 2;
45 bool did_retry =
false;
46 for (
int trial = 0;; ++trial) {
55 require_full_guard_regions
56 ? RoundUp(kWasmMaxHeapOffset + kNegativeGuardSize, CommitPageSize())
57 : RoundUp(base::bits::RoundUpToPowerOfTwo32(
60 DCHECK_GE(*allocation_length, size);
61 DCHECK_GE(*allocation_length, kWasmPageSize);
63 auto limit = require_full_guard_regions ? WasmMemoryTracker::kSoftLimit
64 : WasmMemoryTracker::kHardLimit;
65 if (memory_tracker->ReserveAddressSpace(*allocation_length, limit))
break;
69 if (trial == kAllocationRetries) {
72 if (require_full_guard_regions && FLAG_wasm_trap_handler_fallback) {
73 require_full_guard_regions =
false;
84 if (FLAG_abort_on_stack_or_string_length_overflow) {
85 FATAL(
"could not allocate wasm memory");
87 AddAllocationStatusSample(
88 heap->isolate(), AllocationStatus::kAddressSpaceLimitReachedFailure);
92 heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
true);
96 DCHECK_NULL(*allocation_base);
97 for (
int trial = 0;; ++trial) {
99 AllocatePages(GetPlatformPageAllocator(),
nullptr, *allocation_length,
100 kWasmPageSize, PageAllocator::kNoAccess);
101 if (*allocation_base !=
nullptr)
break;
102 if (trial == kAllocationRetries) {
103 memory_tracker->ReleaseReservation(*allocation_length);
104 AddAllocationStatusSample(heap->isolate(),
105 AllocationStatus::kOtherFailure);
108 heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
true);
110 byte* memory =
reinterpret_cast<byte*
>(*allocation_base);
111 if (require_full_guard_regions) {
112 memory += kNegativeGuardSize;
118 SetPermissions(GetPlatformPageAllocator(), memory,
119 RoundUp(size, kWasmPageSize), PageAllocator::kReadWrite);
123 V8::FatalProcessOutOfMemory(
nullptr,
"TryAllocateBackingStore");
127 memory_tracker->RegisterAllocation(heap->isolate(), *allocation_base,
128 *allocation_length, memory, size);
129 AddAllocationStatusSample(heap->isolate(),
130 did_retry ? AllocationStatus::kSuccessAfterRetry
131 : AllocationStatus::kSuccess);
135 #if V8_TARGET_ARCH_MIPS64 138 constexpr
size_t kAddressSpaceSoftLimit = 0x2100000000L;
139 constexpr
size_t kAddressSpaceHardLimit = 0x4000000000L;
140 #elif V8_TARGET_ARCH_64_BIT 141 constexpr
size_t kAddressSpaceSoftLimit = 0x6000000000L;
142 constexpr
size_t kAddressSpaceHardLimit = 0x10100000000L;
144 constexpr
size_t kAddressSpaceSoftLimit = 0x90000000;
145 constexpr
size_t kAddressSpaceHardLimit = 0xC0000000;
150 WasmMemoryTracker::~WasmMemoryTracker() {
153 DCHECK_EQ(reserved_address_space_, 0u);
154 DCHECK_EQ(allocated_address_space_, 0u);
157 bool WasmMemoryTracker::ReserveAddressSpace(
size_t num_bytes,
158 ReservationLimit limit) {
159 size_t reservation_limit =
160 limit == kSoftLimit ? kAddressSpaceSoftLimit : kAddressSpaceHardLimit;
162 size_t old_count = reserved_address_space_.load();
163 if (old_count > reservation_limit)
return false;
164 if (reservation_limit - old_count < num_bytes)
return false;
165 if (reserved_address_space_.compare_exchange_weak(old_count,
166 old_count + num_bytes)) {
172 void WasmMemoryTracker::ReleaseReservation(
size_t num_bytes) {
173 size_t const old_reserved = reserved_address_space_.fetch_sub(num_bytes);
175 DCHECK_LE(num_bytes, old_reserved);
178 void WasmMemoryTracker::RegisterAllocation(Isolate* isolate,
179 void* allocation_base,
180 size_t allocation_length,
182 size_t buffer_length) {
183 base::MutexGuard scope_lock(&mutex_);
185 allocated_address_space_ += allocation_length;
186 AddAddressSpaceSample(isolate);
188 allocations_.emplace(buffer_start,
189 AllocationData{allocation_base, allocation_length,
190 buffer_start, buffer_length});
193 WasmMemoryTracker::AllocationData WasmMemoryTracker::ReleaseAllocation(
194 Isolate* isolate,
const void* buffer_start) {
195 base::MutexGuard scope_lock(&mutex_);
197 auto find_result = allocations_.find(buffer_start);
198 CHECK_NE(find_result, allocations_.end());
200 if (find_result != allocations_.end()) {
201 size_t num_bytes = find_result->second.allocation_length;
202 DCHECK_LE(num_bytes, reserved_address_space_);
203 DCHECK_LE(num_bytes, allocated_address_space_);
204 reserved_address_space_ -= num_bytes;
205 allocated_address_space_ -= num_bytes;
209 if (isolate) AddAddressSpaceSample(isolate);
211 AllocationData allocation_data = find_result->second;
212 allocations_.erase(find_result);
213 return allocation_data;
218 const WasmMemoryTracker::AllocationData* WasmMemoryTracker::FindAllocationData(
219 const void* buffer_start) {
220 base::MutexGuard scope_lock(&mutex_);
221 const auto& result = allocations_.find(buffer_start);
222 if (result != allocations_.end()) {
223 return &result->second;
228 bool WasmMemoryTracker::IsWasmMemory(
const void* buffer_start) {
229 base::MutexGuard scope_lock(&mutex_);
230 return allocations_.find(buffer_start) != allocations_.end();
233 bool WasmMemoryTracker::HasFullGuardRegions(
const void* buffer_start) {
234 base::MutexGuard scope_lock(&mutex_);
235 const auto allocation = allocations_.find(buffer_start);
237 if (allocation == allocations_.end()) {
241 Address start =
reinterpret_cast<Address
>(buffer_start);
243 reinterpret_cast<Address
>(allocation->second.allocation_base) +
244 allocation->second.allocation_length;
245 return start + kWasmMaxHeapOffset < limit;
248 bool WasmMemoryTracker::FreeMemoryIfIsWasmMemory(Isolate* isolate,
249 const void* buffer_start) {
250 if (IsWasmMemory(buffer_start)) {
251 const AllocationData allocation = ReleaseAllocation(isolate, buffer_start);
252 CHECK(FreePages(GetPlatformPageAllocator(), allocation.allocation_base,
253 allocation.allocation_length));
259 void WasmMemoryTracker::AddAddressSpaceSample(Isolate* isolate) {
262 isolate->counters()->wasm_address_space_usage_mb()->AddSample(
263 static_cast<int>(allocated_address_space_ >> 20));
266 Handle<JSArrayBuffer> SetupArrayBuffer(Isolate* isolate,
void* backing_store,
267 size_t size,
bool is_external,
269 Handle<JSArrayBuffer> buffer =
270 isolate->factory()->NewJSArrayBuffer(shared, TENURED);
271 constexpr
bool is_wasm_memory =
true;
272 JSArrayBuffer::Setup(buffer, isolate, is_external, backing_store, size,
273 shared, is_wasm_memory);
274 buffer->set_is_neuterable(
false);
275 buffer->set_is_growable(
true);
279 MaybeHandle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate,
size_t size,
282 if (size > max_mem_bytes())
return {};
284 WasmMemoryTracker* memory_tracker = isolate->wasm_engine()->memory_tracker();
287 void* allocation_base =
nullptr;
288 size_t allocation_length = 0;
290 void* memory = TryAllocateBackingStore(memory_tracker, isolate->heap(), size,
291 &allocation_base, &allocation_length);
292 if (memory ==
nullptr)
return {};
296 const byte* bytes =
reinterpret_cast<const byte*
>(memory);
297 for (
size_t i = 0;
i < size; ++
i) {
298 DCHECK_EQ(0, bytes[
i]);
302 reinterpret_cast<v8::Isolate*
>(isolate)
303 ->AdjustAmountOfExternalAllocatedMemory(size);
305 constexpr
bool is_external =
false;
306 return SetupArrayBuffer(isolate, memory, size, is_external, shared);
309 void DetachMemoryBuffer(Isolate* isolate, Handle<JSArrayBuffer> buffer,
311 if (buffer->is_shared())
return;
312 DCHECK(!buffer->is_neuterable());
314 const bool is_external = buffer->is_external();
315 DCHECK(!buffer->is_neuterable());
317 buffer->set_is_external(
true);
318 isolate->heap()->UnregisterArrayBuffer(*buffer);
325 buffer->FreeBackingStoreFromMainThread();
329 DCHECK(buffer->is_external());
330 buffer->set_is_wasm_memory(
false);
331 buffer->set_is_neuterable(
true);