V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
wasm-code-manager.h
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_WASM_WASM_CODE_MANAGER_H_
6 #define V8_WASM_WASM_CODE_MANAGER_H_
7 
8 #include <functional>
9 #include <list>
10 #include <map>
11 #include <unordered_map>
12 #include <unordered_set>
13 
14 #include "src/base/macros.h"
15 #include "src/builtins/builtins-definitions.h"
16 #include "src/handles.h"
17 #include "src/trap-handler/trap-handler.h"
18 #include "src/vector.h"
19 #include "src/wasm/compilation-environment.h"
20 #include "src/wasm/wasm-features.h"
21 #include "src/wasm/wasm-limits.h"
22 
23 namespace v8 {
24 namespace internal {
25 
26 struct CodeDesc;
27 class Code;
28 
29 namespace wasm {
30 
31 class NativeModule;
32 class WasmCodeManager;
33 class WasmEngine;
34 class WasmMemoryTracker;
35 class WasmImportWrapperCache;
36 struct WasmModule;
37 
38 // Sorted, disjoint and non-overlapping memory regions. A region is of the
39 // form [start, end). So there's no [start, end), [end, other_end),
40 // because that should have been reduced to [start, other_end).
41 class V8_EXPORT_PRIVATE DisjointAllocationPool final {
42  public:
43  DisjointAllocationPool() = default;
44 
46  : regions_({region}) {}
47 
49  DisjointAllocationPool& operator=(DisjointAllocationPool&& other) = default;
50 
51  // Merge the parameter region into this object while preserving ordering of
52  // the regions. The assumption is that the passed parameter is not
53  // intersecting this object - for example, it was obtained from a previous
54  // Allocate.
56 
57  // Allocate a contiguous region of size {size}. Return an empty pool on
58  // failure.
59  base::AddressRegion Allocate(size_t size);
60 
61  bool IsEmpty() const { return regions_.empty(); }
62  const std::list<base::AddressRegion>& regions() const { return regions_; }
63 
64  private:
65  std::list<base::AddressRegion> regions_;
66 
67  DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool)
68 };
69 
70 class V8_EXPORT_PRIVATE WasmCode final {
71  public:
72  enum Kind {
73  kFunction,
74  kWasmToJsWrapper,
75  kLazyStub,
76  kRuntimeStub,
77  kInterpreterEntry,
78  kJumpTable
79  };
80 
81  // Each runtime stub is identified by an id. This id is used to reference the
82  // stub via {RelocInfo::WASM_STUB_CALL} and gets resolved during relocation.
83  enum RuntimeStubId {
84 #define DEF_ENUM(Name) k##Name,
85 #define DEF_ENUM_TRAP(Name) kThrowWasm##Name,
86  WASM_RUNTIME_STUB_LIST(DEF_ENUM, DEF_ENUM_TRAP)
87 #undef DEF_ENUM_TRAP
88 #undef DEF_ENUM
89  kRuntimeStubCount
90  };
91 
92  // kOther is used if we have WasmCode that is neither
93  // liftoff- nor turbofan-compiled, i.e. if Kind is
94  // not a kFunction.
95  enum Tier : int8_t { kLiftoff, kTurbofan, kOther };
96 
97  Vector<byte> instructions() const { return instructions_; }
98  Address instruction_start() const {
99  return reinterpret_cast<Address>(instructions_.start());
100  }
101  Vector<const byte> reloc_info() const { return reloc_info_.as_vector(); }
102  Vector<const byte> source_positions() const {
103  return source_position_table_.as_vector();
104  }
105 
106  uint32_t index() const {
107  DCHECK(!IsAnonymous());
108  return index_;
109  }
110  // Anonymous functions are functions that don't carry an index.
111  bool IsAnonymous() const { return index_ == kAnonymousFuncIndex; }
112  Kind kind() const { return kind_; }
113  NativeModule* native_module() const { return native_module_; }
114  Tier tier() const { return tier_; }
115  Address constant_pool() const;
116  size_t constant_pool_offset() const { return constant_pool_offset_; }
117  size_t safepoint_table_offset() const { return safepoint_table_offset_; }
118  size_t handler_table_offset() const { return handler_table_offset_; }
119  uint32_t stack_slots() const { return stack_slots_; }
120  bool is_liftoff() const { return tier_ == kLiftoff; }
121  bool contains(Address pc) const {
122  return reinterpret_cast<Address>(instructions_.start()) <= pc &&
123  pc < reinterpret_cast<Address>(instructions_.end());
124  }
125 
126  Vector<trap_handler::ProtectedInstructionData> protected_instructions()
127  const {
128  return protected_instructions_.as_vector();
129  }
130 
131  const char* GetRuntimeStubName() const;
132 
133  void Validate() const;
134  void Print(const char* name = nullptr) const;
135  void Disassemble(const char* name, std::ostream& os,
136  Address current_pc = kNullAddress) const;
137 
138  static bool ShouldBeLogged(Isolate* isolate);
139  void LogCode(Isolate* isolate) const;
140 
141  ~WasmCode();
142 
143  enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
144 
145  static constexpr uint32_t kAnonymousFuncIndex = 0xffffffff;
146  STATIC_ASSERT(kAnonymousFuncIndex > kV8MaxWasmFunctions);
147 
148  private:
149  friend class NativeModule;
150 
151  WasmCode(NativeModule* native_module, uint32_t index,
152  Vector<byte> instructions, uint32_t stack_slots,
153  size_t safepoint_table_offset, size_t handler_table_offset,
154  size_t constant_pool_offset,
156  protected_instructions,
157  OwnedVector<const byte> reloc_info,
158  OwnedVector<const byte> source_position_table, Kind kind, Tier tier)
159  : instructions_(instructions),
160  reloc_info_(std::move(reloc_info)),
161  source_position_table_(std::move(source_position_table)),
162  native_module_(native_module),
163  index_(index),
164  kind_(kind),
165  constant_pool_offset_(constant_pool_offset),
166  stack_slots_(stack_slots),
167  safepoint_table_offset_(safepoint_table_offset),
168  handler_table_offset_(handler_table_offset),
169  protected_instructions_(std::move(protected_instructions)),
170  tier_(tier) {
171  DCHECK_LE(safepoint_table_offset, instructions.size());
172  DCHECK_LE(constant_pool_offset, instructions.size());
173  DCHECK_LE(handler_table_offset, instructions.size());
174  }
175 
176  // Code objects that have been registered with the global trap handler within
177  // this process, will have a {trap_handler_index} associated with them.
178  size_t trap_handler_index() const;
179  void set_trap_handler_index(size_t);
180  bool HasTrapHandlerIndex() const;
181 
182  // Register protected instruction information with the trap handler. Sets
183  // trap_handler_index.
184  void RegisterTrapHandlerData();
185 
186  Vector<byte> instructions_;
187  OwnedVector<const byte> reloc_info_;
188  OwnedVector<const byte> source_position_table_;
189  NativeModule* native_module_ = nullptr;
190  uint32_t index_;
191  Kind kind_;
192  size_t constant_pool_offset_ = 0;
193  uint32_t stack_slots_ = 0;
194  // we care about safepoint data for wasm-to-js functions,
195  // since there may be stack/register tagged values for large number
196  // conversions.
197  size_t safepoint_table_offset_ = 0;
198  size_t handler_table_offset_ = 0;
199  intptr_t trap_handler_index_ = -1;
201  Tier tier_;
202 
203  DISALLOW_COPY_AND_ASSIGN(WasmCode);
204 };
205 
206 // Return a textual description of the kind.
207 const char* GetWasmCodeKindAsString(WasmCode::Kind);
208 
209 class V8_EXPORT_PRIVATE NativeModule final {
210  public:
211 #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
212  static constexpr bool kCanAllocateMoreMemory = false;
213 #else
214  static constexpr bool kCanAllocateMoreMemory = true;
215 #endif
216 
217  // {AddCode} is thread safe w.r.t. other calls to {AddCode} or methods adding
218  // code below, i.e. it can be called concurrently from background threads.
219  WasmCode* AddCode(uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
220  size_t safepoint_table_offset, size_t handler_table_offset,
222  protected_instructions,
223  OwnedVector<const byte> source_position_table,
224  WasmCode::Kind kind, WasmCode::Tier tier);
225 
226  WasmCode* AddDeserializedCode(
227  uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
228  size_t safepoint_table_offset, size_t handler_table_offset,
229  size_t constant_pool_offset,
231  protected_instructions,
232  OwnedVector<const byte> reloc_info,
233  OwnedVector<const byte> source_position_table, WasmCode::Tier tier);
234 
235  // Adds anonymous code for testing purposes.
236  WasmCode* AddCodeForTesting(Handle<Code> code);
237 
238  // When starting lazy compilation, provide the WasmLazyCompile builtin by
239  // calling SetLazyBuiltin. It will be copied into this NativeModule and the
240  // jump table will be populated with that copy.
241  void SetLazyBuiltin(Handle<Code> code);
242 
243  // Initializes all runtime stubs by copying them over from the JS-allocated
244  // heap into this native module. It must be called exactly once per native
245  // module before adding other WasmCode so that runtime stub ids can be
246  // resolved during relocation.
247  void SetRuntimeStubs(Isolate* isolate);
248 
249  // Makes the code available to the system (by entering it into the code table
250  // and patching the jump table). Callers have to take care not to race with
251  // threads executing the old code.
252  void PublishCode(WasmCode* code);
253 
254  // Switch a function to an interpreter entry wrapper. When adding interpreter
255  // wrappers, we do not insert them in the code_table, however, we let them
256  // self-identify as the {index} function.
257  void PublishInterpreterEntry(WasmCode* code, uint32_t index);
258 
259  // Creates a snapshot of the current state of the code table. This is useful
260  // to get a consistent view of the table (e.g. used by the serializer).
261  std::vector<WasmCode*> SnapshotCodeTable() const;
262 
263  WasmCode* code(uint32_t index) const {
264  DCHECK_LT(index, num_functions());
265  DCHECK_LE(module_->num_imported_functions, index);
266  return code_table_[index - module_->num_imported_functions];
267  }
268 
269  bool has_code(uint32_t index) const { return code(index) != nullptr; }
270 
271  WasmCode* runtime_stub(WasmCode::RuntimeStubId index) const {
272  DCHECK_LT(index, WasmCode::kRuntimeStubCount);
273  WasmCode* code = runtime_stub_table_[index];
274  DCHECK_NOT_NULL(code);
275  return code;
276  }
277 
278  Address jump_table_start() const {
279  return jump_table_ ? jump_table_->instruction_start() : kNullAddress;
280  }
281 
282  ptrdiff_t jump_table_offset(uint32_t func_index) const {
283  DCHECK_GE(func_index, num_imported_functions());
284  return GetCallTargetForFunction(func_index) - jump_table_start();
285  }
286 
287  bool is_jump_table_slot(Address address) const {
288  return jump_table_->contains(address);
289  }
290 
291  // Transition this module from code relying on trap handlers (i.e. without
292  // explicit memory bounds checks) to code that does not require trap handlers
293  // (i.e. code with explicit bounds checks).
294  // This method must only be called if {use_trap_handler()} is true (it will be
295  // false afterwards). All code in this {NativeModule} needs to be re-added
296  // after calling this method.
297  void DisableTrapHandler();
298 
299  // Returns the target to call for the given function (returns a jump table
300  // slot within {jump_table_}).
301  Address GetCallTargetForFunction(uint32_t func_index) const;
302 
303  // Reverse lookup from a given call target (i.e. a jump table slot as the
304  // above {GetCallTargetForFunction} returns) to a function index.
305  uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
306 
307  bool SetExecutable(bool executable);
308 
309  // For cctests, where we build both WasmModule and the runtime objects
310  // on the fly, and bypass the instance builder pipeline.
311  void ReserveCodeTableForTesting(uint32_t max_functions);
312 
313  void LogWasmCodes(Isolate* isolate);
314 
315  CompilationState* compilation_state() { return compilation_state_.get(); }
316 
317  // Create a {CompilationEnv} object for compilation. Only valid as long as
318  // this {NativeModule} is alive.
319  CompilationEnv CreateCompilationEnv() const;
320 
321  uint32_t num_functions() const {
322  return module_->num_declared_functions + module_->num_imported_functions;
323  }
324  uint32_t num_imported_functions() const {
325  return module_->num_imported_functions;
326  }
327  UseTrapHandler use_trap_handler() const { return use_trap_handler_; }
328  void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
329  bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
330  Vector<const byte> wire_bytes() const { return wire_bytes_.as_vector(); }
331  const WasmModule* module() const { return module_.get(); }
332  size_t committed_code_space() const { return committed_code_space_.load(); }
333 
334  void SetWireBytes(OwnedVector<const byte> wire_bytes);
335 
336  WasmCode* Lookup(Address) const;
337 
338  WasmImportWrapperCache* import_wrapper_cache() const {
339  return import_wrapper_cache_.get();
340  }
341 
342  ~NativeModule();
343 
344  const WasmFeatures& enabled_features() const { return enabled_features_; }
345 
346  private:
347  friend class WasmCode;
348  friend class WasmCodeManager;
349  friend class NativeModuleModificationScope;
350 
351  NativeModule(Isolate* isolate, const WasmFeatures& enabled_features,
352  bool can_request_more, VirtualMemory code_space,
353  WasmCodeManager* code_manager,
354  std::shared_ptr<const WasmModule> module);
355 
356  WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind,
357  const char* name = nullptr);
358  // Allocate code space. Returns a valid buffer or fails with OOM (crash).
359  Vector<byte> AllocateForCode(size_t size);
360 
361  // Primitive for adding code to the native module. All code added to a native
362  // module is owned by that module. Various callers get to decide on how the
363  // code is obtained (CodeDesc vs, as a point in time, Code), the kind,
364  // whether it has an index or is anonymous, etc.
365  WasmCode* AddOwnedCode(uint32_t index, Vector<const byte> instructions,
366  uint32_t stack_slots, size_t safepoint_table_offset,
367  size_t handler_table_offset,
368  size_t constant_pool_offset,
370  OwnedVector<const byte> reloc_info,
371  OwnedVector<const byte> source_position_table,
372  WasmCode::Kind, WasmCode::Tier);
373 
374  WasmCode* CreateEmptyJumpTable(uint32_t num_wasm_functions);
375 
376  // Hold the {allocation_mutex_} when calling this method.
377  void InstallCode(WasmCode* code);
378 
379  Vector<WasmCode*> code_table() const {
380  return {code_table_.get(), module_->num_declared_functions};
381  }
382 
383  // Hold the {mutex_} when calling this method.
384  bool has_interpreter_redirection(uint32_t func_index) {
385  DCHECK_LT(func_index, num_functions());
386  DCHECK_LE(module_->num_imported_functions, func_index);
387  if (!interpreter_redirections_) return false;
388  uint32_t bitset_idx = func_index - module_->num_imported_functions;
389  uint8_t byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
390  return byte & (1 << (bitset_idx % kBitsPerByte));
391  }
392 
393  // Hold the {mutex_} when calling this method.
394  void SetInterpreterRedirection(uint32_t func_index) {
395  DCHECK_LT(func_index, num_functions());
396  DCHECK_LE(module_->num_imported_functions, func_index);
397  if (!interpreter_redirections_) {
398  interpreter_redirections_.reset(
399  new uint8_t[RoundUp<kBitsPerByte>(module_->num_declared_functions) /
400  kBitsPerByte]);
401  }
402  uint32_t bitset_idx = func_index - module_->num_imported_functions;
403  uint8_t& byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
404  byte |= 1 << (bitset_idx % kBitsPerByte);
405  }
406 
407  // Features enabled for this module. We keep a copy of the features that
408  // were enabled at the time of the creation of this native module,
409  // to be consistent across asynchronous compilations later.
410  const WasmFeatures enabled_features_;
411 
412  // TODO(clemensh): Make this a unique_ptr (requires refactoring
413  // AsyncCompileJob).
414  std::shared_ptr<const WasmModule> module_;
415 
416  OwnedVector<const byte> wire_bytes_;
417 
418  WasmCode* runtime_stub_table_[WasmCode::kRuntimeStubCount] = {nullptr};
419 
420  // Jump table used to easily redirect wasm function calls.
421  WasmCode* jump_table_ = nullptr;
422 
423  // The compilation state keeps track of compilation tasks for this module.
424  // Note that its destructor blocks until all tasks are finished/aborted and
425  // hence needs to be destructed first when this native module dies.
426  std::unique_ptr<CompilationState> compilation_state_;
427 
428  // A cache of the import wrappers, keyed on the kind and signature.
429  std::unique_ptr<WasmImportWrapperCache> import_wrapper_cache_;
430 
431  // This mutex protects concurrent calls to {AddCode} and friends.
432  mutable base::Mutex allocation_mutex_;
433 
435  // Protected by {allocation_mutex_}:
436 
437  // Holds all allocated code objects, is maintained to be in ascending order
438  // according to the codes instruction start address to allow lookups.
439  std::vector<std::unique_ptr<WasmCode>> owned_code_;
440 
441  std::unique_ptr<WasmCode* []> code_table_;
442 
443  // Null if no redirections exist, otherwise a bitset over all functions in
444  // this module marking those functions that have been redirected.
445  std::unique_ptr<uint8_t[]> interpreter_redirections_;
446 
447  DisjointAllocationPool free_code_space_;
448  DisjointAllocationPool allocated_code_space_;
449  std::list<VirtualMemory> owned_code_space_;
450 
451  // End of fields protected by {allocation_mutex_}.
453 
454  WasmCodeManager* const code_manager_;
455  std::atomic<size_t> committed_code_space_{0};
456  int modification_scope_depth_ = 0;
457  bool can_request_more_memory_;
458  UseTrapHandler use_trap_handler_ = kNoTrapHandler;
459  bool is_executable_ = false;
460  bool lazy_compile_frozen_ = false;
461 
462  DISALLOW_COPY_AND_ASSIGN(NativeModule);
463 };
464 
465 class V8_EXPORT_PRIVATE WasmCodeManager final {
466  public:
467  explicit WasmCodeManager(WasmMemoryTracker* memory_tracker,
468  size_t max_committed);
469  // Create a new NativeModule. The caller is responsible for its
470  // lifetime. The native module will be given some memory for code,
471  // which will be page size aligned. The size of the initial memory
472  // is determined with a heuristic based on the total size of wasm
473  // code. The native module may later request more memory.
474  // TODO(titzer): isolate is only required here for CompilationState.
475  std::unique_ptr<NativeModule> NewNativeModule(
476  Isolate* isolate, const WasmFeatures& enabled_features,
477  size_t code_size_estimate, bool can_request_more,
478  std::shared_ptr<const WasmModule> module);
479 
480  NativeModule* LookupNativeModule(Address pc) const;
481  WasmCode* LookupCode(Address pc) const;
482  size_t remaining_uncommitted_code_space() const;
483 
484  // Add a sample of all module sizes.
485  void SampleModuleSizes(Isolate* isolate) const;
486 
487  void SetMaxCommittedMemoryForTesting(size_t limit);
488 
489  // TODO(v8:7424): For now we sample module sizes in a GC callback. This will
490  // bias samples towards apps with high memory pressure. We should switch to
491  // using sampling based on regular intervals independent of the GC.
492  static void InstallSamplingGCCallback(Isolate* isolate);
493 
494  static size_t EstimateNativeModuleCodeSize(const WasmModule* module);
495  static size_t EstimateNativeModuleNonCodeSize(const WasmModule* module);
496 
497  private:
498  friend class NativeModule;
499 
500  V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
501  void* hint = nullptr);
502  bool Commit(Address, size_t);
503  // Currently, we uncommit a whole module, so all we need is account
504  // for the freed memory size. We do that in FreeNativeModule.
505  // There's no separate Uncommit.
506 
507  void FreeNativeModule(NativeModule*);
508  void AssignRanges(Address start, Address end, NativeModule*);
509  void AssignRangesAndAddModule(Address start, Address end, NativeModule*);
510  bool ShouldForceCriticalMemoryPressureNotification();
511 
512  WasmMemoryTracker* const memory_tracker_;
513  std::atomic<size_t> remaining_uncommitted_code_space_;
514  mutable base::Mutex native_modules_mutex_;
515 
517  // Protected by {native_modules_mutex_}:
518 
519  std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
520  std::unordered_set<NativeModule*> native_modules_;
521 
522  // End of fields protected by {native_modules_mutex_}.
524 
525  DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
526 };
527 
528 // Within the scope, the native_module is writable and not executable.
529 // At the scope's destruction, the native_module is executable and not writable.
530 // The states inside the scope and at the scope termination are irrespective of
531 // native_module's state when entering the scope.
532 // We currently mark the entire module's memory W^X:
533 // - for AOT, that's as efficient as it can be.
534 // - for Lazy, we don't have a heuristic for functions that may need patching,
535 // and even if we did, the resulting set of pages may be fragmented.
536 // Currently, we try and keep the number of syscalls low.
537 // - similar argument for debug time.
539  public:
540  explicit NativeModuleModificationScope(NativeModule* native_module);
542 
543  private:
544  NativeModule* native_module_;
545 };
546 
547 } // namespace wasm
548 } // namespace internal
549 } // namespace v8
550 
551 #endif // V8_WASM_WASM_CODE_MANAGER_H_
Definition: libplatform.h:13