5 #include "src/wasm/wasm-code-manager.h" 9 #include "src/assembler-inl.h" 10 #include "src/base/adapters.h" 11 #include "src/base/macros.h" 12 #include "src/base/platform/platform.h" 13 #include "src/codegen.h" 14 #include "src/disassembler.h" 15 #include "src/globals.h" 16 #include "src/macro-assembler-inl.h" 17 #include "src/macro-assembler.h" 18 #include "src/objects-inl.h" 19 #include "src/wasm/compilation-environment.h" 20 #include "src/wasm/function-compiler.h" 21 #include "src/wasm/jump-table-assembler.h" 22 #include "src/wasm/wasm-import-wrapper-cache-inl.h" 23 #include "src/wasm/wasm-module.h" 24 #include "src/wasm/wasm-objects-inl.h" 25 #include "src/wasm/wasm-objects.h" 27 #define TRACE_HEAP(...) \ 29 if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \ 40 struct WasmCodeUniquePtrComparator {
41 bool operator()(Address pc,
const std::unique_ptr<WasmCode>& code)
const {
42 DCHECK_NE(kNullAddress, pc);
43 DCHECK_NOT_NULL(code);
44 return pc < code->instruction_start();
50 void DisjointAllocationPool::Merge(base::AddressRegion region) {
51 auto dest_it = regions_.begin();
52 auto dest_end = regions_.end();
55 while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
58 if (dest_it == dest_end) {
59 regions_.push_back(region);
64 if (dest_it->begin() == region.end()) {
65 base::AddressRegion merged_region{region.begin(),
66 region.size() + dest_it->size()};
67 DCHECK_EQ(merged_region.end(), dest_it->end());
68 *dest_it = merged_region;
73 if (dest_it->begin() > region.end()) {
74 regions_.insert(dest_it, region);
80 DCHECK_EQ(dest_it->end(), region.begin());
81 dest_it->set_size(dest_it->size() + region.size());
82 DCHECK_EQ(dest_it->end(), region.end());
83 auto next_dest = dest_it;
85 if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
86 dest_it->set_size(dest_it->size() + next_dest->size());
87 DCHECK_EQ(dest_it->end(), next_dest->end());
88 regions_.erase(next_dest);
92 base::AddressRegion DisjointAllocationPool::Allocate(
size_t size) {
93 for (
auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
94 if (size > it->size())
continue;
95 base::AddressRegion ret{it->begin(), size};
96 if (size == it->size()) {
99 *it = base::AddressRegion{it->begin() + size, it->size() - size};
106 Address WasmCode::constant_pool()
const {
107 if (FLAG_enable_embedded_constant_pool) {
108 if (constant_pool_offset_ < instructions().size()) {
109 return instruction_start() + constant_pool_offset_;
115 size_t WasmCode::trap_handler_index()
const {
116 CHECK(HasTrapHandlerIndex());
117 return static_cast<size_t>(trap_handler_index_);
120 void WasmCode::set_trap_handler_index(
size_t value) {
121 trap_handler_index_ = value;
124 void WasmCode::RegisterTrapHandlerData() {
125 DCHECK(!HasTrapHandlerIndex());
126 if (kind() != WasmCode::kFunction)
return;
128 Address base = instruction_start();
130 size_t size = instructions().size();
132 RegisterHandlerData(base, size, protected_instructions().size(),
133 protected_instructions().start());
137 set_trap_handler_index(static_cast<size_t>(index));
140 bool WasmCode::HasTrapHandlerIndex()
const {
return trap_handler_index_ >= 0; }
142 bool WasmCode::ShouldBeLogged(Isolate* isolate) {
143 return isolate->logger()->is_listening_to_code_events() ||
144 isolate->is_profiling();
147 void WasmCode::LogCode(Isolate* isolate)
const {
148 DCHECK(ShouldBeLogged(isolate));
149 if (IsAnonymous())
return;
151 ModuleWireBytes wire_bytes(native_module()->wire_bytes());
153 WireBytesRef name_ref =
154 native_module()->module()->LookupFunctionName(wire_bytes, index());
155 WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
156 if (!name_vec.is_empty()) {
157 HandleScope scope(isolate);
158 MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
159 Vector<const char>::cast(name_vec));
161 if (!maybe_name.ToHandle(&name)) {
162 name = isolate->factory()->NewStringFromAsciiChecked(
"<name too long>");
166 name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
167 RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
169 CodeCreateEvent(CodeEventListener::FUNCTION_TAG,
this,
170 {cname.get(),
static_cast<size_t>(name_length)}));
172 EmbeddedVector<char, 32> generated_name;
173 int length = SNPrintF(generated_name,
"wasm-function[%d]", index());
174 generated_name.Truncate(length);
175 PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG,
this,
179 if (!source_positions().is_empty()) {
180 LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
181 source_positions()));
185 const char* WasmCode::GetRuntimeStubName()
const {
186 DCHECK_EQ(WasmCode::kRuntimeStub, kind());
187 #define RETURN_NAME(Name) \ 188 if (native_module_->runtime_stub_table_[WasmCode::k##Name] == this) { \ 191 #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name) 192 WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
193 #undef RETURN_NAME_TRAP 198 void WasmCode::Validate()
const {
203 for (RelocIterator it(instructions(), reloc_info(), constant_pool());
204 !it.done(); it.next()) {
205 RelocInfo::Mode mode = it.rinfo()->rmode();
207 case RelocInfo::WASM_CALL: {
208 Address target = it.rinfo()->wasm_call_address();
209 WasmCode* code = native_module_->Lookup(target);
210 CHECK_NOT_NULL(code);
211 CHECK_EQ(WasmCode::kJumpTable, code->kind());
212 CHECK(code->contains(target));
215 case RelocInfo::WASM_STUB_CALL: {
216 Address target = it.rinfo()->wasm_stub_call_address();
217 WasmCode* code = native_module_->Lookup(target);
218 CHECK_NOT_NULL(code);
219 CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
220 CHECK_EQ(target, code->instruction_start());
223 case RelocInfo::INTERNAL_REFERENCE:
224 case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
225 Address target = it.rinfo()->target_internal_reference();
226 CHECK(contains(target));
229 case RelocInfo::EXTERNAL_REFERENCE:
230 case RelocInfo::COMMENT:
231 case RelocInfo::CONST_POOL:
232 case RelocInfo::VENEER_POOL:
236 FATAL(
"Unexpected mode: %d", mode);
242 void WasmCode::Print(
const char* name)
const {
244 os <<
"--- WebAssembly code ---\n";
245 Disassemble(name, os);
246 os <<
"--- End code ---\n";
249 void WasmCode::Disassemble(
const char* name, std::ostream& os,
250 Address current_pc)
const {
251 if (name) os <<
"name: " << name <<
"\n";
252 if (!IsAnonymous()) os <<
"index: " << index() <<
"\n";
253 os <<
"kind: " << GetWasmCodeKindAsString(kind_) <<
"\n";
254 os <<
"compiler: " << (is_liftoff() ?
"Liftoff" :
"TurboFan") <<
"\n";
255 size_t body_size = instructions().size();
256 os <<
"Body (size = " << body_size <<
")\n";
258 #ifdef ENABLE_DISASSEMBLER 259 size_t instruction_size = body_size;
260 if (constant_pool_offset_ && constant_pool_offset_ < instruction_size) {
261 instruction_size = constant_pool_offset_;
263 if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
264 instruction_size = safepoint_table_offset_;
266 if (handler_table_offset_ && handler_table_offset_ < instruction_size) {
267 instruction_size = handler_table_offset_;
269 DCHECK_LT(0, instruction_size);
270 os <<
"Instructions (size = " << instruction_size <<
")\n";
271 Disassembler::Decode(
nullptr, &os, instructions().start(),
272 instructions().start() + instruction_size,
273 CodeReference(
this), current_pc);
276 if (handler_table_offset_ > 0) {
277 HandlerTable table(instruction_start(), handler_table_offset_);
278 os <<
"Exception Handler Table (size = " << table.NumberOfReturnEntries()
280 table.HandlerTableReturnPrint(os);
284 if (!protected_instructions_.is_empty()) {
285 os <<
"Protected instructions:\n pc offset land pad\n";
286 for (
auto& data : protected_instructions()) {
287 os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
288 << std::hex << data.landing_offset <<
"\n";
293 if (!source_positions().is_empty()) {
294 os <<
"Source positions:\n pc offset position\n";
295 for (SourcePositionTableIterator it(source_positions()); !it.done();
297 os << std::setw(10) << std::hex << it.code_offset() << std::dec
298 << std::setw(10) << it.source_position().ScriptOffset()
299 << (it.is_statement() ?
" statement" :
"") <<
"\n";
304 os <<
"RelocInfo (size = " << reloc_info_.size() <<
")\n";
305 for (RelocIterator it(instructions(), reloc_info(), constant_pool());
306 !it.done(); it.next()) {
307 it.rinfo()->Print(
nullptr, os);
310 #endif // ENABLE_DISASSEMBLER 313 const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
315 case WasmCode::kFunction:
316 return "wasm function";
317 case WasmCode::kWasmToJsWrapper:
319 case WasmCode::kLazyStub:
320 return "lazy-compile";
321 case WasmCode::kRuntimeStub:
322 return "runtime-stub";
323 case WasmCode::kInterpreterEntry:
324 return "interpreter entry";
325 case WasmCode::kJumpTable:
328 return "unknown kind";
331 WasmCode::~WasmCode() {
332 if (HasTrapHandlerIndex()) {
333 CHECK_LT(trap_handler_index(),
334 static_cast<size_t>(std::numeric_limits<int>::max()));
335 trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
339 NativeModule::NativeModule(Isolate* isolate,
const WasmFeatures& enabled,
340 bool can_request_more, VirtualMemory code_space,
341 WasmCodeManager* code_manager,
342 std::shared_ptr<const WasmModule> module)
343 : enabled_features_(enabled),
344 module_(
std::move(module)),
345 compilation_state_(CompilationState::New(isolate, this)),
346 import_wrapper_cache_(
std::unique_ptr<WasmImportWrapperCache>(
347 new WasmImportWrapperCache(this))),
348 free_code_space_(code_space.region()),
349 code_manager_(code_manager),
350 can_request_more_memory_(can_request_more),
351 use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
353 DCHECK_NOT_NULL(module_);
354 owned_code_space_.emplace_back(std::move(code_space));
355 owned_code_.reserve(num_functions());
357 uint32_t num_wasm_functions = module_->num_declared_functions;
358 if (num_wasm_functions > 0) {
359 code_table_.reset(
new WasmCode*[num_wasm_functions]);
360 memset(code_table_.get(), 0, num_wasm_functions *
sizeof(WasmCode*));
362 jump_table_ = CreateEmptyJumpTable(num_wasm_functions);
366 void NativeModule::ReserveCodeTableForTesting(
uint32_t max_functions) {
367 DCHECK_LE(num_functions(), max_functions);
368 WasmCode** new_table =
new WasmCode*[max_functions];
369 memset(new_table, 0, max_functions *
sizeof(*new_table));
370 memcpy(new_table, code_table_.get(),
371 module_->num_declared_functions *
sizeof(*new_table));
372 code_table_.reset(new_table);
375 jump_table_ = CreateEmptyJumpTable(max_functions);
378 void NativeModule::LogWasmCodes(Isolate* isolate) {
379 if (!WasmCode::ShouldBeLogged(isolate))
return;
383 for (WasmCode* code : code_table()) {
384 if (code !=
nullptr) code->LogCode(isolate);
388 CompilationEnv NativeModule::CreateCompilationEnv()
const {
389 return {module(), use_trap_handler_, kRuntimeExceptionSupport};
392 WasmCode* NativeModule::AddOwnedCode(
394 size_t safepoint_table_offset,
size_t handler_table_offset,
395 size_t constant_pool_offset,
396 OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
397 OwnedVector<const byte> reloc_info,
398 OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
399 WasmCode::Tier tier) {
404 base::MutexGuard lock(&allocation_mutex_);
405 Vector<byte> executable_buffer = AllocateForCode(instructions.size());
407 code =
new WasmCode(
this, index, executable_buffer, stack_slots,
408 safepoint_table_offset, handler_table_offset,
409 constant_pool_offset, std::move(protected_instructions),
410 std::move(reloc_info), std::move(source_position_table),
413 if (owned_code_.empty() ||
414 code->instruction_start() > owned_code_.back()->instruction_start()) {
416 owned_code_.emplace_back(code);
422 auto insert_before = std::upper_bound(
423 owned_code_.begin(), owned_code_.end(), code->instruction_start(),
424 WasmCodeUniquePtrComparator{});
425 owned_code_.emplace(insert_before, code);
428 memcpy(reinterpret_cast<void*>(code->instruction_start()),
429 instructions.start(), instructions.size());
434 WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
435 WasmCode* ret = AddAnonymousCode(code, WasmCode::kFunction);
439 void NativeModule::SetLazyBuiltin(Handle<Code> code) {
440 uint32_t num_wasm_functions = module_->num_declared_functions;
441 if (num_wasm_functions == 0)
return;
442 WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
444 Address lazy_compile_target = lazy_builtin->instruction_start();
445 for (
uint32_t i = 0;
i < num_wasm_functions; ++
i) {
446 JumpTableAssembler::EmitLazyCompileJumpSlot(
447 jump_table_->instruction_start(),
i,
448 i + module_->num_imported_functions, lazy_compile_target,
449 WasmCode::kNoFlushICache);
451 Assembler::FlushICache(jump_table_->instructions().start(),
452 jump_table_->instructions().size());
455 void NativeModule::SetRuntimeStubs(Isolate* isolate) {
456 HandleScope scope(isolate);
457 DCHECK_NULL(runtime_stub_table_[0]);
458 #define COPY_BUILTIN(Name) \ 459 runtime_stub_table_[WasmCode::k##Name] = \ 460 AddAnonymousCode(isolate->builtins()->builtin_handle(Builtins::k##Name), \ 461 WasmCode::kRuntimeStub, #Name); 462 #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name) 463 WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
464 #undef COPY_BUILTIN_TRAP 468 WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
473 const size_t relocation_size =
474 code->is_off_heap_trampoline() ? 0 : code->relocation_size();
475 OwnedVector<byte> reloc_info = OwnedVector<byte>::New(relocation_size);
476 memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
477 Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
479 OwnedVector<byte> source_pos =
480 OwnedVector<byte>::New(source_pos_table->length());
481 source_pos_table->copy_out(0, source_pos.start(), source_pos_table->length());
482 Vector<const byte> instructions(
483 reinterpret_cast<byte*>(code->InstructionStart()),
484 static_cast<size_t>(code->InstructionSize()));
485 int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
486 int safepoint_table_offset =
487 code->has_safepoint_info() ? code->safepoint_table_offset() : 0;
489 AddOwnedCode(WasmCode::kAnonymousFuncIndex,
492 safepoint_table_offset,
493 code->handler_table_offset(),
494 code->constant_pool_offset(),
496 std::move(reloc_info),
497 std::move(source_pos),
502 intptr_t delta = ret->instruction_start() - code->InstructionStart();
503 int mode_mask = RelocInfo::kApplyMask |
504 RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
505 RelocIterator orig_it(*code, mode_mask);
506 for (RelocIterator it(ret->instructions(), ret->reloc_info(),
507 ret->constant_pool(), mode_mask);
508 !it.done(); it.next(), orig_it.next()) {
509 RelocInfo::Mode mode = it.rinfo()->rmode();
510 if (RelocInfo::IsWasmStubCall(mode)) {
511 uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
512 DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
514 runtime_stub(static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
515 it.rinfo()->set_wasm_stub_call_address(code->instruction_start(),
518 it.rinfo()->apply(delta);
524 Assembler::FlushICache(ret->instructions().start(),
525 ret->instructions().size());
526 if (FLAG_print_code || FLAG_print_wasm_code) ret->Print(name);
531 WasmCode* NativeModule::AddCode(
533 size_t safepoint_table_offset,
size_t handler_table_offset,
534 OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
535 OwnedVector<const byte> source_pos_table, WasmCode::Kind kind,
536 WasmCode::Tier tier) {
537 OwnedVector<byte> reloc_info = OwnedVector<byte>::New(desc.reloc_size);
538 memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
541 AddOwnedCode(index, {desc.buffer,
static_cast<size_t>(desc.instr_size)},
542 stack_slots, safepoint_table_offset, handler_table_offset,
543 desc.instr_size - desc.constant_pool_size,
544 std::move(protected_instructions), std::move(reloc_info),
545 std::move(source_pos_table), kind, tier);
548 intptr_t delta = ret->instructions().start() - desc.buffer;
549 int mode_mask = RelocInfo::kApplyMask |
550 RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
551 RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
552 for (RelocIterator it(ret->instructions(), ret->reloc_info(),
553 ret->constant_pool(), mode_mask);
554 !it.done(); it.next()) {
555 RelocInfo::Mode mode = it.rinfo()->rmode();
556 if (RelocInfo::IsWasmCall(mode)) {
557 uint32_t call_tag = it.rinfo()->wasm_call_tag();
558 Address target = GetCallTargetForFunction(call_tag);
559 it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
560 }
else if (RelocInfo::IsWasmStubCall(mode)) {
561 uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
562 DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
564 runtime_stub(static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
565 it.rinfo()->set_wasm_stub_call_address(code->instruction_start(),
568 it.rinfo()->apply(delta);
574 Assembler::FlushICache(ret->instructions().start(),
575 ret->instructions().size());
576 if (FLAG_print_code || FLAG_print_wasm_code) ret->Print();
581 WasmCode* NativeModule::AddDeserializedCode(
583 size_t safepoint_table_offset,
size_t handler_table_offset,
584 size_t constant_pool_offset,
585 OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
586 OwnedVector<const byte> reloc_info,
587 OwnedVector<const byte> source_position_table, WasmCode::Tier tier) {
589 AddOwnedCode(index, instructions, stack_slots, safepoint_table_offset,
590 handler_table_offset, constant_pool_offset,
591 std::move(protected_instructions), std::move(reloc_info),
592 std::move(source_position_table), WasmCode::kFunction, tier);
594 if (!code->protected_instructions_.is_empty()) {
595 code->RegisterTrapHandlerData();
597 base::MutexGuard lock(&allocation_mutex_);
604 void NativeModule::PublishCode(WasmCode* code) {
605 base::MutexGuard lock(&allocation_mutex_);
608 if (has_interpreter_redirection(code->index()))
return;
610 if (!code->protected_instructions_.is_empty()) {
611 code->RegisterTrapHandlerData();
616 void NativeModule::PublishInterpreterEntry(WasmCode* code,
618 code->index_ = func_index;
619 base::MutexGuard lock(&allocation_mutex_);
621 SetInterpreterRedirection(func_index);
624 std::vector<WasmCode*> NativeModule::SnapshotCodeTable()
const {
625 base::MutexGuard lock(&allocation_mutex_);
626 std::vector<WasmCode*> result;
627 result.reserve(code_table().size());
628 for (WasmCode* code : code_table()) result.push_back(code);
632 WasmCode* NativeModule::CreateEmptyJumpTable(
uint32_t num_wasm_functions) {
634 DCHECK_LT(0, num_wasm_functions);
635 OwnedVector<byte> instructions = OwnedVector<byte>::New(
636 JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
637 memset(instructions.start(), 0, instructions.size());
638 return AddOwnedCode(WasmCode::kAnonymousFuncIndex,
639 instructions.as_vector(),
647 WasmCode::kJumpTable,
651 void NativeModule::InstallCode(WasmCode* code) {
652 DCHECK_LT(code->index(), num_functions());
653 DCHECK_LE(module_->num_imported_functions, code->index());
656 if (code->kind() != WasmCode::kInterpreterEntry) {
657 code_table_[code->index() - module_->num_imported_functions] = code;
661 uint32_t slot_idx = code->index() - module_->num_imported_functions;
662 JumpTableAssembler::PatchJumpTableSlot(jump_table_->instruction_start(),
663 slot_idx, code->instruction_start(),
664 WasmCode::kFlushICache);
667 Vector<byte> NativeModule::AllocateForCode(
size_t size) {
671 size = RoundUp(size, kCodeAlignment);
672 base::AddressRegion code_space = free_code_space_.Allocate(size);
673 if (code_space.is_empty()) {
674 if (!can_request_more_memory_) {
675 V8::FatalProcessOutOfMemory(
nullptr,
676 "NativeModule::AllocateForCode reservation");
680 Address hint = owned_code_space_.empty() ? kNullAddress
681 : owned_code_space_.back().end();
683 VirtualMemory new_mem =
684 code_manager_->TryAllocate(size, reinterpret_cast<void*>(hint));
685 if (!new_mem.IsReserved()) {
686 V8::FatalProcessOutOfMemory(
nullptr,
687 "NativeModule::AllocateForCode reservation");
690 code_manager_->AssignRanges(new_mem.address(), new_mem.end(),
this);
692 free_code_space_.Merge(new_mem.region());
693 owned_code_space_.emplace_back(std::move(new_mem));
694 code_space = free_code_space_.Allocate(size);
695 DCHECK(!code_space.is_empty());
698 Address commit_start = RoundUp(code_space.begin(), page_size);
699 Address commit_end = RoundUp(code_space.end(), page_size);
708 if (commit_start < commit_end) {
709 committed_code_space_.fetch_add(commit_end - commit_start);
711 DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
718 for (
auto& vmem : base::Reversed(owned_code_space_)) {
719 if (commit_end <= vmem.address() || vmem.end() <= commit_start)
continue;
720 Address start = std::max(commit_start, vmem.address());
721 Address end = std::min(commit_end, vmem.end());
722 size_t commit_size =
static_cast<size_t>(end - start);
723 if (!code_manager_->Commit(start, commit_size)) {
724 V8::FatalProcessOutOfMemory(
nullptr,
725 "NativeModule::AllocateForCode commit");
730 if (commit_start == start) commit_start = end;
731 if (commit_end == end) commit_end = start;
732 if (commit_start >= commit_end)
break;
735 if (!code_manager_->Commit(commit_start, commit_end - commit_start)) {
736 V8::FatalProcessOutOfMemory(
nullptr,
737 "NativeModule::AllocateForCode commit");
742 DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
743 allocated_code_space_.Merge(code_space);
744 TRACE_HEAP(
"Code alloc for %p: %" PRIxPTR
",+%zu\n",
this, code_space.begin(),
746 return {
reinterpret_cast<byte*
>(code_space.begin()), code_space.size()};
750 class NativeModuleWireBytesStorage final :
public WireBytesStorage {
752 explicit NativeModuleWireBytesStorage(NativeModule* native_module)
753 : native_module_(native_module) {}
755 Vector<const uint8_t> GetCode(WireBytesRef ref)
const final {
756 return native_module_->wire_bytes().SubVector(ref.offset(),
761 NativeModule*
const native_module_;
765 void NativeModule::SetWireBytes(OwnedVector<const byte> wire_bytes) {
766 wire_bytes_ = std::move(wire_bytes);
767 if (!wire_bytes.is_empty()) {
768 compilation_state_->SetWireBytesStorage(
769 std::make_shared<NativeModuleWireBytesStorage>(
this));
773 WasmCode* NativeModule::Lookup(Address pc)
const {
774 base::MutexGuard lock(&allocation_mutex_);
775 if (owned_code_.empty())
return nullptr;
776 auto iter = std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
777 WasmCodeUniquePtrComparator());
778 if (iter == owned_code_.begin())
return nullptr;
780 WasmCode* candidate = iter->get();
781 DCHECK_NOT_NULL(candidate);
782 return candidate->contains(pc) ? candidate :
nullptr;
785 Address NativeModule::GetCallTargetForFunction(
uint32_t func_index)
const {
791 DCHECK_NOT_NULL(jump_table_);
792 uint32_t slot_idx = func_index - module_->num_imported_functions;
793 uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
794 DCHECK_LT(slot_offset, jump_table_->instructions().size());
795 return jump_table_->instruction_start() + slot_offset;
798 uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
799 Address slot_address)
const {
800 DCHECK(is_jump_table_slot(slot_address));
802 static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
803 uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
804 DCHECK_LT(slot_idx, module_->num_declared_functions);
805 return module_->num_imported_functions + slot_idx;
808 void NativeModule::DisableTrapHandler() {
810 DCHECK(use_trap_handler_);
811 use_trap_handler_ = kNoTrapHandler;
815 uint32_t num_wasm_functions = module_->num_declared_functions;
816 memset(code_table_.get(), 0, num_wasm_functions *
sizeof(WasmCode*));
822 NativeModule::~NativeModule() {
823 TRACE_HEAP(
"Deleting native module: %p\n", reinterpret_cast<void*>(
this));
826 compilation_state_->CancelAndWait();
827 code_manager_->FreeNativeModule(
this);
830 WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
831 size_t max_committed)
832 : memory_tracker_(memory_tracker),
833 remaining_uncommitted_code_space_(max_committed) {
834 DCHECK_LE(max_committed, kMaxWasmCodeMemory);
837 bool WasmCodeManager::Commit(Address start,
size_t size) {
839 if (FLAG_perf_prof)
return true;
840 DCHECK(IsAligned(start, AllocatePageSize()));
841 DCHECK(IsAligned(size, AllocatePageSize()));
846 size_t old_value = remaining_uncommitted_code_space_.load();
847 if (old_value < size)
return false;
848 if (remaining_uncommitted_code_space_.compare_exchange_weak(
849 old_value, old_value - size)) {
854 ? PageAllocator::kReadWrite
855 : PageAllocator::kReadWriteExecute;
858 SetPermissions(GetPlatformPageAllocator(), start, size, permission);
859 TRACE_HEAP(
"Setting rw permissions for %p:%p\n",
860 reinterpret_cast<void*>(start),
861 reinterpret_cast<void*>(start + size));
865 remaining_uncommitted_code_space_.fetch_add(size);
871 void WasmCodeManager::AssignRanges(Address start, Address end,
872 NativeModule* native_module) {
873 base::MutexGuard lock(&native_modules_mutex_);
874 lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
877 void WasmCodeManager::AssignRangesAndAddModule(Address start, Address end,
878 NativeModule* native_module) {
879 base::MutexGuard lock(&native_modules_mutex_);
880 lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
881 native_modules_.emplace(native_module);
884 VirtualMemory WasmCodeManager::TryAllocate(
size_t size,
void* hint) {
888 if (!memory_tracker_->ReserveAddressSpace(size,
889 WasmMemoryTracker::kHardLimit)) {
894 VirtualMemory mem(page_allocator, size, hint,
896 if (!mem.IsReserved()) {
897 memory_tracker_->ReleaseReservation(size);
900 TRACE_HEAP(
"VMem alloc: %p:%p (%zu)\n",
901 reinterpret_cast<void*>(mem.address()),
902 reinterpret_cast<void*>(mem.end()), mem.size());
905 if (FLAG_perf_prof) {
906 SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
907 PageAllocator::kReadWriteExecute);
912 void WasmCodeManager::SampleModuleSizes(Isolate* isolate)
const {
913 base::MutexGuard lock(&native_modules_mutex_);
914 for (NativeModule* native_module : native_modules_) {
916 static_cast<int>(native_module->committed_code_space_.load() / MB);
917 isolate->counters()->wasm_module_code_size_mb()->AddSample(code_size);
921 void WasmCodeManager::SetMaxCommittedMemoryForTesting(
size_t limit) {
922 remaining_uncommitted_code_space_.store(limit);
927 void ModuleSamplingCallback(v8::Isolate* v8_isolate, v8::GCType type,
928 v8::GCCallbackFlags flags,
void* data) {
929 Isolate* isolate =
reinterpret_cast<Isolate*
>(v8_isolate);
930 isolate->wasm_engine()->code_manager()->SampleModuleSizes(isolate);
936 void WasmCodeManager::InstallSamplingGCCallback(Isolate* isolate) {
937 isolate->heap()->AddGCEpilogueCallback(ModuleSamplingCallback,
938 v8::kGCTypeMarkSweepCompact,
nullptr);
942 size_t WasmCodeManager::EstimateNativeModuleCodeSize(
const WasmModule* module) {
943 constexpr
size_t kCodeSizeMultiplier = 4;
944 constexpr
size_t kCodeOverhead = 32;
945 constexpr
size_t kStaticCodeSize = 512;
946 constexpr
size_t kImportSize = 64 * kPointerSize;
948 size_t estimate = kStaticCodeSize;
949 for (
auto&
function : module->functions) {
950 estimate += kCodeOverhead + kCodeSizeMultiplier *
function.code.length();
953 JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
954 estimate += kImportSize * module->num_imported_functions;
960 size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
961 const WasmModule* module) {
962 size_t wasm_module_estimate = EstimateStoredSize(module);
964 uint32_t num_wasm_functions = module->num_declared_functions;
967 size_t native_module_estimate =
968 sizeof(NativeModule) +
969 (
sizeof(WasmCode*) * num_wasm_functions) +
970 (
sizeof(WasmCode) * num_wasm_functions);
972 return wasm_module_estimate + native_module_estimate;
975 bool WasmCodeManager::ShouldForceCriticalMemoryPressureNotification() {
976 base::MutexGuard lock(&native_modules_mutex_);
981 constexpr
size_t kCriticalThreshold = 32 * 1024 * 1024;
982 return native_modules_.size() > 1 &&
983 remaining_uncommitted_code_space_.load() < kCriticalThreshold;
986 std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
987 Isolate* isolate,
const WasmFeatures& enabled,
size_t code_size_estimate,
988 bool can_request_more, std::shared_ptr<const WasmModule> module) {
989 DCHECK_EQ(
this, isolate->wasm_engine()->code_manager());
990 if (ShouldForceCriticalMemoryPressureNotification()) {
991 (
reinterpret_cast<v8::Isolate*
>(isolate))
992 ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
996 size_t code_vmem_size =
997 kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1001 static constexpr
int kAllocationRetries = 2;
1002 VirtualMemory code_space;
1003 for (
int retries = 0;; ++retries) {
1004 code_space = TryAllocate(code_vmem_size);
1005 if (code_space.IsReserved())
break;
1006 if (retries == kAllocationRetries) {
1007 V8::FatalProcessOutOfMemory(isolate,
"WasmCodeManager::NewNativeModule");
1011 isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1015 Address start = code_space.address();
1016 size_t size = code_space.size();
1017 Address end = code_space.end();
1018 std::unique_ptr<NativeModule> ret(
new NativeModule(
1019 isolate, enabled, can_request_more, std::move(code_space),
1020 isolate->wasm_engine()->code_manager(), std::move(module)));
1021 TRACE_HEAP(
"New NativeModule %p: Mem: %" PRIuPTR
",+%zu\n", ret.get(), start,
1023 AssignRangesAndAddModule(start, end, ret.get());
1027 bool NativeModule::SetExecutable(
bool executable) {
1028 if (is_executable_ == executable)
return true;
1029 TRACE_HEAP(
"Setting module %p as executable: %d.\n",
this, executable);
1033 if (FLAG_wasm_write_protect_code_memory) {
1035 executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1045 if (can_request_more_memory_) {
1046 for (
auto& vmem : owned_code_space_) {
1047 if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1051 TRACE_HEAP(
"Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1054 is_executable_ = executable;
1058 for (
auto& region : allocated_code_space_.regions()) {
1061 size_t region_size =
1063 if (!SetPermissions(page_allocator, region.begin(), region_size,
1067 TRACE_HEAP(
"Set %p:%p to executable:%d\n",
1068 reinterpret_cast<void*>(region.begin()),
1069 reinterpret_cast<void*>(region.end()), executable);
1072 is_executable_ = executable;
1076 void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1077 base::MutexGuard lock(&native_modules_mutex_);
1078 DCHECK_EQ(1, native_modules_.count(native_module));
1079 native_modules_.erase(native_module);
1080 TRACE_HEAP(
"Freeing NativeModule %p\n", native_module);
1081 for (
auto& code_space : native_module->owned_code_space_) {
1082 DCHECK(code_space.IsReserved());
1083 TRACE_HEAP(
"VMem Release: %" PRIxPTR
":%" PRIxPTR
" (%zu)\n",
1084 code_space.address(), code_space.end(), code_space.size());
1085 lookup_map_.erase(code_space.address());
1086 memory_tracker_->ReleaseReservation(code_space.size());
1088 DCHECK(!code_space.IsReserved());
1090 native_module->owned_code_space_.clear();
1092 size_t code_size = native_module->committed_code_space_.load();
1093 DCHECK(IsAligned(code_size, AllocatePageSize()));
1094 remaining_uncommitted_code_space_.fetch_add(code_size);
1096 DCHECK_LE(remaining_uncommitted_code_space_.load(), kMaxWasmCodeMemory);
1099 NativeModule* WasmCodeManager::LookupNativeModule(Address pc)
const {
1100 base::MutexGuard lock(&native_modules_mutex_);
1101 if (lookup_map_.empty())
return nullptr;
1103 auto iter = lookup_map_.upper_bound(pc);
1104 if (iter == lookup_map_.begin())
return nullptr;
1106 Address region_start = iter->first;
1107 Address region_end = iter->second.first;
1108 NativeModule* candidate = iter->second.second;
1110 DCHECK_NOT_NULL(candidate);
1111 return region_start <= pc && pc < region_end ? candidate :
nullptr;
1114 WasmCode* WasmCodeManager::LookupCode(Address pc)
const {
1115 NativeModule* candidate = LookupNativeModule(pc);
1116 return candidate ? candidate->Lookup(pc) :
nullptr;
1119 size_t WasmCodeManager::remaining_uncommitted_code_space()
const {
1120 return remaining_uncommitted_code_space_.load();
1125 NativeModuleModificationScope::NativeModuleModificationScope(
1126 NativeModule* native_module)
1127 : native_module_(native_module) {
1128 if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1129 (native_module_->modification_scope_depth_++) == 0) {
1130 bool success = native_module_->SetExecutable(
false);
1135 NativeModuleModificationScope::~NativeModuleModificationScope() {
1136 if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1137 (native_module_->modification_scope_depth_--) == 1) {
1138 bool success = native_module_->SetExecutable(
true);
virtual void * GetRandomMmapAddr()=0
virtual size_t AllocatePageSize()=0