5 #include "src/snapshot/embedded-data.h" 7 #include "src/assembler-inl.h" 8 #include "src/callable.h" 9 #include "src/macro-assembler.h" 10 #include "src/objects-inl.h" 11 #include "src/snapshot/snapshot.h" 17 bool InstructionStream::PcIsOffHeap(Isolate* isolate, Address pc) {
18 if (FLAG_embedded_builtins) {
19 const Address start =
reinterpret_cast<Address
>(isolate->embedded_blob());
20 return IsInRange(pc, start, start + isolate->embedded_blob_size());
27 Code InstructionStream::TryLookupCode(Isolate* isolate, Address address) {
28 if (!PcIsOffHeap(isolate, address))
return Code();
30 EmbeddedData d = EmbeddedData::FromBlob();
31 if (address < d.InstructionStartOfBuiltin(0))
return Code();
37 int l = 0, r = Builtins::builtin_count;
39 const int mid = (l + r) / 2;
40 Address start = d.InstructionStartOfBuiltin(mid);
41 Address end = start + d.PaddedInstructionSizeOfBuiltin(mid);
43 if (address < start) {
45 }
else if (address >= end) {
48 return isolate->builtins()->builtin(mid);
56 void InstructionStream::CreateOffHeapInstructionStream(Isolate* isolate,
59 EmbeddedData d = EmbeddedData::FromIsolate(isolate);
64 const uint32_t allocated_size = RoundUp(d.size(), page_size);
66 uint8_t* allocated_bytes =
static_cast<uint8_t*
>(
67 AllocatePages(page_allocator, isolate->heap()->GetRandomMmapAddr(),
68 allocated_size, page_size, PageAllocator::kReadWrite));
69 CHECK_NOT_NULL(allocated_bytes);
71 std::memcpy(allocated_bytes, d.data(), d.size());
72 CHECK(SetPermissions(page_allocator, allocated_bytes, allocated_size,
73 PageAllocator::kReadExecute));
75 *data = allocated_bytes;
82 void InstructionStream::FreeOffHeapInstructionStream(uint8_t* data,
87 CHECK(FreePages(page_allocator, data, RoundUp(size, page_size)));
92 bool BuiltinAliasesOffHeapTrampolineRegister(Isolate* isolate, Code code) {
93 DCHECK(Builtins::IsIsolateIndependent(code->builtin_index()));
94 switch (Builtins::KindOf(code->builtin_index())) {
111 Callable callable = Builtins::CallableFor(
112 isolate, static_cast<Builtins::Name>(code->builtin_index()));
113 CallInterfaceDescriptor descriptor = callable.descriptor();
115 if (descriptor.ContextRegister() == kOffHeapTrampolineRegister) {
119 for (
int i = 0;
i < descriptor.GetRegisterParameterCount();
i++) {
120 Register reg = descriptor.GetRegisterParameter(
i);
121 if (reg == kOffHeapTrampolineRegister)
return true;
127 void FinalizeEmbeddedCodeTargets(Isolate* isolate, EmbeddedData* blob) {
128 static const int kRelocMask =
129 RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
130 RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET);
132 for (
int i = 0;
i < Builtins::builtin_count;
i++) {
133 if (!Builtins::IsIsolateIndependent(
i))
continue;
135 Code code = isolate->builtins()->builtin(
i);
136 RelocIterator on_heap_it(code, kRelocMask);
137 RelocIterator off_heap_it(blob, code, kRelocMask);
139 #if defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_ARM64) || \ 140 defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS) || \ 141 defined(V8_TARGET_ARCH_IA32) 146 while (!on_heap_it.done()) {
147 DCHECK(!off_heap_it.done());
149 RelocInfo* rinfo = on_heap_it.rinfo();
150 DCHECK_EQ(rinfo->rmode(), off_heap_it.rinfo()->rmode());
151 Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
152 CHECK(Builtins::IsIsolateIndependentBuiltin(target));
155 off_heap_it.rinfo()->set_target_address(
156 blob->InstructionStartOfBuiltin(target->builtin_index()),
162 DCHECK(off_heap_it.done());
167 CHECK(on_heap_it.done());
168 CHECK(off_heap_it.done());
169 #endif // defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_ARM64) 176 EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
177 Builtins* builtins = isolate->builtins();
180 std::vector<struct Metadata> metadata(kTableSize);
182 bool saw_unsafe_builtin =
false;
184 for (
int i = 0;
i < Builtins::builtin_count;
i++) {
185 Code code = builtins->builtin(
i);
187 if (Builtins::IsIsolateIndependent(
i)) {
190 if (!code->IsIsolateIndependent(isolate)) {
191 saw_unsafe_builtin =
true;
192 fprintf(stderr,
"%s is not isolate-independent.\n", Builtins::name(
i));
194 if (Builtins::IsWasmRuntimeStub(
i) &&
195 RelocInfo::RequiresRelocation(code)) {
200 saw_unsafe_builtin =
true;
201 fprintf(stderr,
"%s is a wasm runtime stub but needs relocation.\n",
204 if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) {
205 saw_unsafe_builtin =
true;
206 fprintf(stderr,
"%s aliases the off-heap trampoline register.\n",
212 DCHECK_EQ(0, raw_data_size % kCodeAlignment);
213 metadata[
i].instructions_offset = raw_data_size;
214 metadata[
i].instructions_length = length;
217 raw_data_size += PadAndAlign(length);
219 metadata[
i].instructions_offset = raw_data_size;
224 "One or more builtins marked as isolate-independent either contains " 225 "isolate-dependent code or aliases the off-heap trampoline register. " 226 "If in doubt, ask jgruber@");
228 const uint32_t blob_size = RawDataOffset() + raw_data_size;
229 uint8_t*
const blob =
new uint8_t[blob_size];
230 uint8_t*
const raw_data_start = blob + RawDataOffset();
234 ZapCode(reinterpret_cast<Address>(blob), blob_size);
237 DCHECK_EQ(MetadataSize(),
sizeof(metadata[0]) * metadata.size());
238 std::memcpy(blob + MetadataOffset(), metadata.data(), MetadataSize());
241 for (
int i = 0;
i < Builtins::builtin_count;
i++) {
242 if (!Builtins::IsIsolateIndependent(
i))
continue;
243 Code code = builtins->builtin(
i);
244 uint32_t offset = metadata[
i].instructions_offset;
245 uint8_t* dst = raw_data_start + offset;
246 DCHECK_LE(RawDataOffset() + offset + code->raw_instruction_size(),
248 std::memcpy(dst, reinterpret_cast<uint8_t*>(code->raw_instruction_start()),
249 code->raw_instruction_size());
252 EmbeddedData d(blob, blob_size);
255 FinalizeEmbeddedCodeTargets(isolate, &d);
258 STATIC_ASSERT(HashSize() == kSizetSize);
259 const size_t hash = d.CreateHash();
260 std::memcpy(blob + HashOffset(), &hash, HashSize());
262 DCHECK_EQ(hash, d.CreateHash());
263 DCHECK_EQ(hash, d.Hash());
265 if (FLAG_serialization_statistics) d.PrintStatistics();
270 Address EmbeddedData::InstructionStartOfBuiltin(
int i)
const {
271 DCHECK(Builtins::IsBuiltinId(
i));
272 const struct Metadata* metadata = Metadata();
273 const uint8_t* result = RawData() + metadata[
i].instructions_offset;
274 DCHECK_LE(result, data_ + size_);
275 DCHECK_IMPLIES(result == data_ + size_, InstructionSizeOfBuiltin(
i) == 0);
276 return reinterpret_cast<Address
>(result);
279 uint32_t EmbeddedData::InstructionSizeOfBuiltin(
int i)
const {
280 DCHECK(Builtins::IsBuiltinId(
i));
281 const struct Metadata* metadata = Metadata();
282 return metadata[
i].instructions_length;
285 size_t EmbeddedData::CreateHash()
const {
286 STATIC_ASSERT(HashOffset() == 0);
287 STATIC_ASSERT(HashSize() == kSizetSize);
288 return base::hash_range(data_ + HashSize(), data_ + size_);
291 void EmbeddedData::PrintStatistics()
const {
292 DCHECK(FLAG_serialization_statistics);
294 constexpr
int kCount = Builtins::builtin_count;
296 int embedded_count = 0;
297 int instruction_size = 0;
299 for (
int i = 0;
i < kCount;
i++) {
300 if (!Builtins::IsIsolateIndependent(
i))
continue;
301 const int size = InstructionSizeOfBuiltin(
i);
302 instruction_size += size;
303 sizes[embedded_count] = size;
308 std::sort(&sizes[0], &sizes[embedded_count]);
310 const int k50th = embedded_count * 0.5;
311 const int k75th = embedded_count * 0.75;
312 const int k90th = embedded_count * 0.90;
313 const int k99th = embedded_count * 0.99;
315 const int metadata_size =
static_cast<int>(HashSize() + MetadataSize());
317 PrintF(
"EmbeddedData:\n");
318 PrintF(
" Total size: %d\n",
319 static_cast<int>(size()));
320 PrintF(
" Metadata size: %d\n", metadata_size);
321 PrintF(
" Instruction size: %d\n", instruction_size);
322 PrintF(
" Padding: %d\n",
323 static_cast<int>(size() - metadata_size - instruction_size));
324 PrintF(
" Embedded builtin count: %d\n", embedded_count);
325 PrintF(
" Instruction size (50th percentile): %d\n", sizes[k50th]);
326 PrintF(
" Instruction size (75th percentile): %d\n", sizes[k75th]);
327 PrintF(
" Instruction size (90th percentile): %d\n", sizes[k90th]);
328 PrintF(
" Instruction size (99th percentile): %d\n", sizes[k99th]);
virtual size_t AllocatePageSize()=0