V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
wasm-objects.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/wasm/wasm-objects.h"
6 #include "src/utils.h"
7 
8 #include "src/assembler-inl.h"
9 #include "src/base/iterator.h"
10 #include "src/code-factory.h"
11 #include "src/compiler/wasm-compiler.h"
12 #include "src/counters.h"
13 #include "src/debug/debug-interface.h"
14 #include "src/objects-inl.h"
15 #include "src/objects/debug-objects-inl.h"
16 #include "src/objects/shared-function-info.h"
17 #include "src/trap-handler/trap-handler.h"
18 #include "src/wasm/jump-table-assembler.h"
19 #include "src/wasm/module-compiler.h"
20 #include "src/wasm/module-decoder.h"
21 #include "src/wasm/wasm-code-manager.h"
22 #include "src/wasm/wasm-engine.h"
23 #include "src/wasm/wasm-limits.h"
24 #include "src/wasm/wasm-memory.h"
25 #include "src/wasm/wasm-module.h"
26 #include "src/wasm/wasm-objects-inl.h"
27 #include "src/wasm/wasm-text.h"
28 
29 #define TRACE(...) \
30  do { \
31  if (FLAG_trace_wasm_instances) PrintF(__VA_ARGS__); \
32  } while (false)
33 
34 #define TRACE_IFT(...) \
35  do { \
36  if (false) PrintF(__VA_ARGS__); \
37  } while (false)
38 
39 namespace v8 {
40 namespace internal {
41 
42 // Import a few often used types from the wasm namespace.
43 using WasmFunction = wasm::WasmFunction;
44 using WasmModule = wasm::WasmModule;
45 
46 namespace {
47 
48 // Manages the natively-allocated memory for a WasmInstanceObject. Since
49 // an instance finalizer is not guaranteed to run upon isolate shutdown,
50 // we must use a Managed<WasmInstanceNativeAllocations> to guarantee
51 // it is freed.
52 // Native allocations are the signature ids and targets for indirect call
53 // targets, as well as the call targets for imported functions.
54 class WasmInstanceNativeAllocations {
55  public:
56 // Helper macro to set an internal field and the corresponding field
57 // on an instance.
58 #define SET(instance, field, value) \
59  { \
60  auto v = value; \
61  this->field##_ = v; \
62  instance->set_##field(v); \
63  }
64 
65  // Allocates initial native storage for a given instance.
66  WasmInstanceNativeAllocations(Handle<WasmInstanceObject> instance,
67  size_t num_imported_functions,
68  size_t num_imported_mutable_globals) {
69  SET(instance, imported_function_targets,
70  reinterpret_cast<Address*>(
71  calloc(num_imported_functions, sizeof(Address))));
72  SET(instance, imported_mutable_globals,
73  reinterpret_cast<Address*>(
74  calloc(num_imported_mutable_globals, sizeof(Address))));
75  }
76  ~WasmInstanceNativeAllocations() {
77  ::free(indirect_function_table_sig_ids_);
78  indirect_function_table_sig_ids_ = nullptr;
79  ::free(indirect_function_table_targets_);
80  indirect_function_table_targets_ = nullptr;
81  ::free(imported_function_targets_);
82  imported_function_targets_ = nullptr;
83  ::free(imported_mutable_globals_);
84  imported_mutable_globals_ = nullptr;
85  }
86  // Resizes the indirect function table.
87  void resize_indirect_function_table(Isolate* isolate,
88  Handle<WasmInstanceObject> instance,
89  uint32_t new_size) {
90  uint32_t old_size = instance->indirect_function_table_size();
91  void* new_sig_ids = nullptr;
92  void* new_targets = nullptr;
93  Handle<FixedArray> new_refs;
94  if (indirect_function_table_sig_ids_) {
95  // Reallocate the old storage.
96  new_sig_ids = realloc(indirect_function_table_sig_ids_,
97  new_size * sizeof(uint32_t));
98  new_targets =
99  realloc(indirect_function_table_targets_, new_size * sizeof(Address));
100 
101  Handle<FixedArray> old(instance->indirect_function_table_refs(), isolate);
102  new_refs = isolate->factory()->CopyFixedArrayAndGrow(
103  old, static_cast<int>(new_size - old_size));
104  } else {
105  // Allocate new storage.
106  new_sig_ids = malloc(new_size * sizeof(uint32_t));
107  new_targets = malloc(new_size * sizeof(Address));
108  new_refs = isolate->factory()->NewFixedArray(static_cast<int>(new_size));
109  }
110  // Initialize new entries.
111  instance->set_indirect_function_table_size(new_size);
112  SET(instance, indirect_function_table_sig_ids,
113  reinterpret_cast<uint32_t*>(new_sig_ids));
114  SET(instance, indirect_function_table_targets,
115  reinterpret_cast<Address*>(new_targets));
116 
117  instance->set_indirect_function_table_refs(*new_refs);
118  for (uint32_t j = old_size; j < new_size; j++) {
119  IndirectFunctionTableEntry(instance, static_cast<int>(j)).clear();
120  }
121  }
122  uint32_t* indirect_function_table_sig_ids_ = nullptr;
123  Address* indirect_function_table_targets_ = nullptr;
124  Address* imported_function_targets_ = nullptr;
125  Address* imported_mutable_globals_ = nullptr;
126 #undef SET
127 };
128 
129 size_t EstimateNativeAllocationsSize(const WasmModule* module) {
130  size_t estimate = sizeof(WasmInstanceNativeAllocations) +
131  (1 * kPointerSize * module->num_imported_mutable_globals) +
132  (2 * kPointerSize * module->num_imported_functions);
133  for (auto& table : module->tables) {
134  estimate += 3 * kPointerSize * table.initial_size;
135  }
136  return estimate;
137 }
138 
139 WasmInstanceNativeAllocations* GetNativeAllocations(
140  WasmInstanceObject* instance) {
141  return reinterpret_cast<Managed<WasmInstanceNativeAllocations>*>(
142  instance->managed_native_allocations())
143  ->raw();
144 }
145 
146 #ifdef DEBUG
147 bool IsBreakablePosition(wasm::NativeModule* native_module, int func_index,
148  int offset_in_func) {
149  AccountingAllocator alloc;
150  Zone tmp(&alloc, ZONE_NAME);
151  wasm::BodyLocalDecls locals(&tmp);
152  const byte* module_start = native_module->wire_bytes().start();
153  const WasmFunction& func = native_module->module()->functions[func_index];
154  wasm::BytecodeIterator iterator(module_start + func.code.offset(),
155  module_start + func.code.end_offset(),
156  &locals);
157  DCHECK_LT(0, locals.encoded_size);
158  for (uint32_t offset : iterator.offsets()) {
159  if (offset > static_cast<uint32_t>(offset_in_func)) break;
160  if (offset == static_cast<uint32_t>(offset_in_func)) return true;
161  }
162  return false;
163 }
164 #endif // DEBUG
165 
166 enum DispatchTableElements : int {
167  kDispatchTableInstanceOffset,
168  kDispatchTableIndexOffset,
169  kDispatchTableFunctionTableOffset,
170  // Marker:
171  kDispatchTableNumElements
172 };
173 
174 } // namespace
175 
176 // static
177 Handle<WasmModuleObject> WasmModuleObject::New(
178  Isolate* isolate, const wasm::WasmFeatures& enabled,
179  std::shared_ptr<const wasm::WasmModule> shared_module,
180  OwnedVector<const uint8_t> wire_bytes, Handle<Script> script,
181  Handle<ByteArray> asm_js_offset_table) {
182  // Create a new {NativeModule} first.
183  size_t code_size_estimate =
184  wasm::WasmCodeManager::EstimateNativeModuleCodeSize(shared_module.get());
185  auto native_module = isolate->wasm_engine()->code_manager()->NewNativeModule(
186  isolate, enabled, code_size_estimate,
187  wasm::NativeModule::kCanAllocateMoreMemory, std::move(shared_module));
188  native_module->SetWireBytes(std::move(wire_bytes));
189  native_module->SetRuntimeStubs(isolate);
190 
191  // Delegate to the shared {WasmModuleObject::New} allocator.
192  Handle<WasmModuleObject> module_object =
193  New(isolate, std::move(native_module), script, code_size_estimate);
194  if (!asm_js_offset_table.is_null()) {
195  module_object->set_asm_js_offset_table(*asm_js_offset_table);
196  }
197  return module_object;
198 }
199 
200 // static
201 Handle<WasmModuleObject> WasmModuleObject::New(
202  Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module,
203  Handle<Script> script, size_t code_size_estimate) {
204  const WasmModule* module = native_module->module();
205  int export_wrapper_size = static_cast<int>(module->num_exported_functions);
206  Handle<FixedArray> export_wrappers =
207  isolate->factory()->NewFixedArray(export_wrapper_size, TENURED);
208  return New(isolate, std::move(native_module), script, export_wrappers,
209  code_size_estimate);
210 }
211 
212 // static
213 Handle<WasmModuleObject> WasmModuleObject::New(
214  Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module,
215  Handle<Script> script, Handle<FixedArray> export_wrappers,
216  size_t code_size_estimate) {
217  const WasmModule* module = native_module->module();
218 
219  // Use the given shared {NativeModule}, but increase its reference count by
220  // allocating a new {Managed<T>} that the {WasmModuleObject} references.
221  size_t memory_estimate =
222  code_size_estimate +
223  wasm::WasmCodeManager::EstimateNativeModuleNonCodeSize(module);
224  Handle<Managed<wasm::NativeModule>> managed_native_module =
225  Managed<wasm::NativeModule>::FromSharedPtr(isolate, memory_estimate,
226  std::move(native_module));
227 
228  Handle<WasmModuleObject> module_object = Handle<WasmModuleObject>::cast(
229  isolate->factory()->NewJSObject(isolate->wasm_module_constructor()));
230  module_object->set_export_wrappers(*export_wrappers);
231  if (script->type() == Script::TYPE_WASM) {
232  script->set_wasm_module_object(*module_object);
233  }
234  module_object->set_script(*script);
235  module_object->set_weak_instance_list(
236  ReadOnlyRoots(isolate).empty_weak_array_list());
237  module_object->set_managed_native_module(*managed_native_module);
238  return module_object;
239 }
240 
241 bool WasmModuleObject::SetBreakPoint(Handle<WasmModuleObject> module_object,
242  int* position,
243  Handle<BreakPoint> break_point) {
244  Isolate* isolate = module_object->GetIsolate();
245 
246  // Find the function for this breakpoint.
247  int func_index = module_object->GetContainingFunction(*position);
248  if (func_index < 0) return false;
249  const WasmFunction& func = module_object->module()->functions[func_index];
250  int offset_in_func = *position - func.code.offset();
251 
252  // According to the current design, we should only be called with valid
253  // breakable positions.
254  DCHECK(IsBreakablePosition(module_object->native_module(), func_index,
255  offset_in_func));
256 
257  // Insert new break point into break_positions of module object.
258  WasmModuleObject::AddBreakpoint(module_object, *position, break_point);
259 
260  // Iterate over all instances of this module and tell them to set this new
261  // breakpoint. We do this using the weak list of all instances.
262  Handle<WeakArrayList> weak_instance_list(module_object->weak_instance_list(),
263  isolate);
264  for (int i = 0; i < weak_instance_list->length(); ++i) {
265  MaybeObject maybe_instance = weak_instance_list->Get(i);
266  if (maybe_instance->IsWeak()) {
267  Handle<WasmInstanceObject> instance(
268  WasmInstanceObject::cast(maybe_instance->GetHeapObjectAssumeWeak()),
269  isolate);
270  Handle<WasmDebugInfo> debug_info =
271  WasmInstanceObject::GetOrCreateDebugInfo(instance);
272  WasmDebugInfo::SetBreakpoint(debug_info, func_index, offset_in_func);
273  }
274  }
275 
276  return true;
277 }
278 
279 namespace {
280 
281 int GetBreakpointPos(Isolate* isolate, Object* break_point_info_or_undef) {
282  if (break_point_info_or_undef->IsUndefined(isolate)) return kMaxInt;
283  return BreakPointInfo::cast(break_point_info_or_undef)->source_position();
284 }
285 
286 int FindBreakpointInfoInsertPos(Isolate* isolate,
287  Handle<FixedArray> breakpoint_infos,
288  int position) {
289  // Find insert location via binary search, taking care of undefined values on
290  // the right. Position is always greater than zero.
291  DCHECK_LT(0, position);
292 
293  int left = 0; // inclusive
294  int right = breakpoint_infos->length(); // exclusive
295  while (right - left > 1) {
296  int mid = left + (right - left) / 2;
297  Object* mid_obj = breakpoint_infos->get(mid);
298  if (GetBreakpointPos(isolate, mid_obj) <= position) {
299  left = mid;
300  } else {
301  right = mid;
302  }
303  }
304 
305  int left_pos = GetBreakpointPos(isolate, breakpoint_infos->get(left));
306  return left_pos < position ? left + 1 : left;
307 }
308 
309 } // namespace
310 
311 void WasmModuleObject::AddBreakpoint(Handle<WasmModuleObject> module_object,
312  int position,
313  Handle<BreakPoint> break_point) {
314  Isolate* isolate = module_object->GetIsolate();
315  Handle<FixedArray> breakpoint_infos;
316  if (module_object->has_breakpoint_infos()) {
317  breakpoint_infos = handle(module_object->breakpoint_infos(), isolate);
318  } else {
319  breakpoint_infos = isolate->factory()->NewFixedArray(4, TENURED);
320  module_object->set_breakpoint_infos(*breakpoint_infos);
321  }
322 
323  int insert_pos =
324  FindBreakpointInfoInsertPos(isolate, breakpoint_infos, position);
325 
326  // If a BreakPointInfo object already exists for this position, add the new
327  // breakpoint object and return.
328  if (insert_pos < breakpoint_infos->length() &&
329  GetBreakpointPos(isolate, breakpoint_infos->get(insert_pos)) ==
330  position) {
331  Handle<BreakPointInfo> old_info(
332  BreakPointInfo::cast(breakpoint_infos->get(insert_pos)), isolate);
333  BreakPointInfo::SetBreakPoint(isolate, old_info, break_point);
334  return;
335  }
336 
337  // Enlarge break positions array if necessary.
338  bool need_realloc = !breakpoint_infos->get(breakpoint_infos->length() - 1)
339  ->IsUndefined(isolate);
340  Handle<FixedArray> new_breakpoint_infos = breakpoint_infos;
341  if (need_realloc) {
342  new_breakpoint_infos = isolate->factory()->NewFixedArray(
343  2 * breakpoint_infos->length(), TENURED);
344  module_object->set_breakpoint_infos(*new_breakpoint_infos);
345  // Copy over the entries [0, insert_pos).
346  for (int i = 0; i < insert_pos; ++i)
347  new_breakpoint_infos->set(i, breakpoint_infos->get(i));
348  }
349 
350  // Move elements [insert_pos, ...] up by one.
351  for (int i = breakpoint_infos->length() - 1; i >= insert_pos; --i) {
352  Object* entry = breakpoint_infos->get(i);
353  if (entry->IsUndefined(isolate)) continue;
354  new_breakpoint_infos->set(i + 1, entry);
355  }
356 
357  // Generate new BreakpointInfo.
358  Handle<BreakPointInfo> breakpoint_info =
359  isolate->factory()->NewBreakPointInfo(position);
360  BreakPointInfo::SetBreakPoint(isolate, breakpoint_info, break_point);
361 
362  // Now insert new position at insert_pos.
363  new_breakpoint_infos->set(insert_pos, *breakpoint_info);
364 }
365 
366 void WasmModuleObject::SetBreakpointsOnNewInstance(
367  Handle<WasmModuleObject> module_object,
368  Handle<WasmInstanceObject> instance) {
369  if (!module_object->has_breakpoint_infos()) return;
370  Isolate* isolate = module_object->GetIsolate();
371  Handle<WasmDebugInfo> debug_info =
372  WasmInstanceObject::GetOrCreateDebugInfo(instance);
373 
374  Handle<FixedArray> breakpoint_infos(module_object->breakpoint_infos(),
375  isolate);
376  // If the array exists, it should not be empty.
377  DCHECK_LT(0, breakpoint_infos->length());
378 
379  for (int i = 0, e = breakpoint_infos->length(); i < e; ++i) {
380  Handle<Object> obj(breakpoint_infos->get(i), isolate);
381  if (obj->IsUndefined(isolate)) {
382  for (; i < e; ++i) {
383  DCHECK(breakpoint_infos->get(i)->IsUndefined(isolate));
384  }
385  break;
386  }
387  Handle<BreakPointInfo> breakpoint_info = Handle<BreakPointInfo>::cast(obj);
388  int position = breakpoint_info->source_position();
389 
390  // Find the function for this breakpoint, and set the breakpoint.
391  int func_index = module_object->GetContainingFunction(position);
392  DCHECK_LE(0, func_index);
393  const WasmFunction& func = module_object->module()->functions[func_index];
394  int offset_in_func = position - func.code.offset();
395  WasmDebugInfo::SetBreakpoint(debug_info, func_index, offset_in_func);
396  }
397 }
398 
399 namespace {
400 
401 enum AsmJsOffsetTableEntryLayout {
402  kOTEByteOffset,
403  kOTECallPosition,
404  kOTENumberConvPosition,
405  kOTESize
406 };
407 
408 Handle<ByteArray> GetDecodedAsmJsOffsetTable(
409  Handle<WasmModuleObject> module_object, Isolate* isolate) {
410  DCHECK(module_object->is_asm_js());
411  Handle<ByteArray> offset_table(module_object->asm_js_offset_table(), isolate);
412 
413  // The last byte in the asm_js_offset_tables ByteArray tells whether it is
414  // still encoded (0) or decoded (1).
415  enum AsmJsTableType : int { Encoded = 0, Decoded = 1 };
416  int table_type = offset_table->get(offset_table->length() - 1);
417  DCHECK(table_type == Encoded || table_type == Decoded);
418  if (table_type == Decoded) return offset_table;
419 
420  wasm::AsmJsOffsets asm_offsets;
421  {
422  DisallowHeapAllocation no_gc;
423  byte* bytes_start = offset_table->GetDataStartAddress();
424  byte* bytes_end = reinterpret_cast<byte*>(
425  reinterpret_cast<Address>(bytes_start) + offset_table->length() - 1);
426  asm_offsets = wasm::DecodeAsmJsOffsets(bytes_start, bytes_end).value();
427  }
428  // Wasm bytes must be valid and must contain asm.js offset table.
429  DCHECK_GE(kMaxInt, asm_offsets.size());
430  int num_functions = static_cast<int>(asm_offsets.size());
431  int num_imported_functions =
432  static_cast<int>(module_object->module()->num_imported_functions);
433  DCHECK_EQ(module_object->module()->functions.size(),
434  static_cast<size_t>(num_functions) + num_imported_functions);
435  int num_entries = 0;
436  for (int func = 0; func < num_functions; ++func) {
437  size_t new_size = asm_offsets[func].size();
438  DCHECK_LE(new_size, static_cast<size_t>(kMaxInt) - num_entries);
439  num_entries += static_cast<int>(new_size);
440  }
441  // One byte to encode that this is a decoded table.
442  DCHECK_GE(kMaxInt,
443  1 + static_cast<uint64_t>(num_entries) * kOTESize * kIntSize);
444  int total_size = 1 + num_entries * kOTESize * kIntSize;
445  Handle<ByteArray> decoded_table =
446  isolate->factory()->NewByteArray(total_size, TENURED);
447  decoded_table->set(total_size - 1, AsmJsTableType::Decoded);
448  module_object->set_asm_js_offset_table(*decoded_table);
449 
450  int idx = 0;
451  const std::vector<WasmFunction>& wasm_funs =
452  module_object->module()->functions;
453  for (int func = 0; func < num_functions; ++func) {
454  std::vector<wasm::AsmJsOffsetEntry>& func_asm_offsets = asm_offsets[func];
455  if (func_asm_offsets.empty()) continue;
456  int func_offset = wasm_funs[num_imported_functions + func].code.offset();
457  for (wasm::AsmJsOffsetEntry& e : func_asm_offsets) {
458  // Byte offsets must be strictly monotonously increasing:
459  DCHECK_IMPLIES(idx > 0, func_offset + e.byte_offset >
460  decoded_table->get_int(idx - kOTESize));
461  decoded_table->set_int(idx + kOTEByteOffset, func_offset + e.byte_offset);
462  decoded_table->set_int(idx + kOTECallPosition, e.source_position_call);
463  decoded_table->set_int(idx + kOTENumberConvPosition,
464  e.source_position_number_conversion);
465  idx += kOTESize;
466  }
467  }
468  DCHECK_EQ(total_size, idx * kIntSize + 1);
469  return decoded_table;
470 }
471 
472 } // namespace
473 
474 int WasmModuleObject::GetSourcePosition(Handle<WasmModuleObject> module_object,
475  uint32_t func_index,
476  uint32_t byte_offset,
477  bool is_at_number_conversion) {
478  Isolate* isolate = module_object->GetIsolate();
479  const WasmModule* module = module_object->module();
480 
481  if (module->origin != wasm::kAsmJsOrigin) {
482  // for non-asm.js modules, we just add the function's start offset
483  // to make a module-relative position.
484  return byte_offset + module_object->GetFunctionOffset(func_index);
485  }
486 
487  // asm.js modules have an additional offset table that must be searched.
488  Handle<ByteArray> offset_table =
489  GetDecodedAsmJsOffsetTable(module_object, isolate);
490 
491  DCHECK_LT(func_index, module->functions.size());
492  uint32_t func_code_offset = module->functions[func_index].code.offset();
493  uint32_t total_offset = func_code_offset + byte_offset;
494 
495  // Binary search for the total byte offset.
496  int left = 0; // inclusive
497  int right = offset_table->length() / kIntSize / kOTESize; // exclusive
498  DCHECK_LT(left, right);
499  while (right - left > 1) {
500  int mid = left + (right - left) / 2;
501  int mid_entry = offset_table->get_int(kOTESize * mid);
502  DCHECK_GE(kMaxInt, mid_entry);
503  if (static_cast<uint32_t>(mid_entry) <= total_offset) {
504  left = mid;
505  } else {
506  right = mid;
507  }
508  }
509  // There should be an entry for each position that could show up on the stack
510  // trace:
511  DCHECK_EQ(total_offset, offset_table->get_int(kOTESize * left));
512  int idx = is_at_number_conversion ? kOTENumberConvPosition : kOTECallPosition;
513  return offset_table->get_int(kOTESize * left + idx);
514 }
515 
516 v8::debug::WasmDisassembly WasmModuleObject::DisassembleFunction(
517  int func_index) {
518  DisallowHeapAllocation no_gc;
519 
520  if (func_index < 0 ||
521  static_cast<uint32_t>(func_index) >= module()->functions.size())
522  return {};
523 
524  Vector<const byte> wire_bytes = native_module()->wire_bytes();
525 
526  std::ostringstream disassembly_os;
527  v8::debug::WasmDisassembly::OffsetTable offset_table;
528 
529  PrintWasmText(module(), wire_bytes, static_cast<uint32_t>(func_index),
530  disassembly_os, &offset_table);
531 
532  return {disassembly_os.str(), std::move(offset_table)};
533 }
534 
535 bool WasmModuleObject::GetPossibleBreakpoints(
536  const v8::debug::Location& start, const v8::debug::Location& end,
537  std::vector<v8::debug::BreakLocation>* locations) {
538  DisallowHeapAllocation no_gc;
539 
540  const std::vector<WasmFunction>& functions = module()->functions;
541  if (start.GetLineNumber() < 0 || start.GetColumnNumber() < 0 ||
542  (!end.IsEmpty() &&
543  (end.GetLineNumber() < 0 || end.GetColumnNumber() < 0)))
544  return false;
545 
546  // start_func_index, start_offset and end_func_index is inclusive.
547  // end_offset is exclusive.
548  // start_offset and end_offset are module-relative byte offsets.
549  uint32_t start_func_index = start.GetLineNumber();
550  if (start_func_index >= functions.size()) return false;
551  int start_func_len = functions[start_func_index].code.length();
552  if (start.GetColumnNumber() > start_func_len) return false;
553  uint32_t start_offset =
554  functions[start_func_index].code.offset() + start.GetColumnNumber();
555  uint32_t end_func_index;
556  uint32_t end_offset;
557  if (end.IsEmpty()) {
558  // Default: everything till the end of the Script.
559  end_func_index = static_cast<uint32_t>(functions.size() - 1);
560  end_offset = functions[end_func_index].code.end_offset();
561  } else {
562  // If end is specified: Use it and check for valid input.
563  end_func_index = static_cast<uint32_t>(end.GetLineNumber());
564 
565  // Special case: Stop before the start of the next function. Change to: Stop
566  // at the end of the function before, such that we don't disassemble the
567  // next function also.
568  if (end.GetColumnNumber() == 0 && end_func_index > 0) {
569  --end_func_index;
570  end_offset = functions[end_func_index].code.end_offset();
571  } else {
572  if (end_func_index >= functions.size()) return false;
573  end_offset =
574  functions[end_func_index].code.offset() + end.GetColumnNumber();
575  if (end_offset > functions[end_func_index].code.end_offset())
576  return false;
577  }
578  }
579 
580  AccountingAllocator alloc;
581  Zone tmp(&alloc, ZONE_NAME);
582  const byte* module_start = native_module()->wire_bytes().start();
583 
584  for (uint32_t func_idx = start_func_index; func_idx <= end_func_index;
585  ++func_idx) {
586  const WasmFunction& func = functions[func_idx];
587  if (func.code.length() == 0) continue;
588 
589  wasm::BodyLocalDecls locals(&tmp);
590  wasm::BytecodeIterator iterator(module_start + func.code.offset(),
591  module_start + func.code.end_offset(),
592  &locals);
593  DCHECK_LT(0u, locals.encoded_size);
594  for (uint32_t offset : iterator.offsets()) {
595  uint32_t total_offset = func.code.offset() + offset;
596  if (total_offset >= end_offset) {
597  DCHECK_EQ(end_func_index, func_idx);
598  break;
599  }
600  if (total_offset < start_offset) continue;
601  locations->emplace_back(func_idx, offset, debug::kCommonBreakLocation);
602  }
603  }
604  return true;
605 }
606 
607 MaybeHandle<FixedArray> WasmModuleObject::CheckBreakPoints(
608  Isolate* isolate, Handle<WasmModuleObject> module_object, int position) {
609  if (!module_object->has_breakpoint_infos()) return {};
610 
611  Handle<FixedArray> breakpoint_infos(module_object->breakpoint_infos(),
612  isolate);
613  int insert_pos =
614  FindBreakpointInfoInsertPos(isolate, breakpoint_infos, position);
615  if (insert_pos >= breakpoint_infos->length()) return {};
616 
617  Handle<Object> maybe_breakpoint_info(breakpoint_infos->get(insert_pos),
618  isolate);
619  if (maybe_breakpoint_info->IsUndefined(isolate)) return {};
620  Handle<BreakPointInfo> breakpoint_info =
621  Handle<BreakPointInfo>::cast(maybe_breakpoint_info);
622  if (breakpoint_info->source_position() != position) return {};
623 
624  // There is no support for conditional break points. Just assume that every
625  // break point always hits.
626  Handle<Object> break_points(breakpoint_info->break_points(), isolate);
627  if (break_points->IsFixedArray()) {
628  return Handle<FixedArray>::cast(break_points);
629  }
630  Handle<FixedArray> break_points_hit = isolate->factory()->NewFixedArray(1);
631  break_points_hit->set(0, *break_points);
632  return break_points_hit;
633 }
634 
635 MaybeHandle<String> WasmModuleObject::ExtractUtf8StringFromModuleBytes(
636  Isolate* isolate, Handle<WasmModuleObject> module_object,
637  wasm::WireBytesRef ref) {
638  // TODO(wasm): cache strings from modules if it's a performance win.
639  Vector<const uint8_t> wire_bytes =
640  module_object->native_module()->wire_bytes();
641  return ExtractUtf8StringFromModuleBytes(isolate, wire_bytes, ref);
642 }
643 
644 MaybeHandle<String> WasmModuleObject::ExtractUtf8StringFromModuleBytes(
645  Isolate* isolate, Vector<const uint8_t> wire_bytes,
646  wasm::WireBytesRef ref) {
647  Vector<const uint8_t> name_vec = wire_bytes + ref.offset();
648  name_vec.Truncate(ref.length());
649  // UTF8 validation happens at decode time.
650  DCHECK(unibrow::Utf8::ValidateEncoding(name_vec.start(), name_vec.length()));
651  return isolate->factory()->NewStringFromUtf8(
652  Vector<const char>::cast(name_vec));
653 }
654 
655 MaybeHandle<String> WasmModuleObject::GetModuleNameOrNull(
656  Isolate* isolate, Handle<WasmModuleObject> module_object) {
657  const WasmModule* module = module_object->module();
658  if (!module->name.is_set()) return {};
659  return ExtractUtf8StringFromModuleBytes(isolate, module_object, module->name);
660 }
661 
662 MaybeHandle<String> WasmModuleObject::GetFunctionNameOrNull(
663  Isolate* isolate, Handle<WasmModuleObject> module_object,
664  uint32_t func_index) {
665  DCHECK_LT(func_index, module_object->module()->functions.size());
666  wasm::WireBytesRef name = module_object->module()->LookupFunctionName(
667  wasm::ModuleWireBytes(module_object->native_module()->wire_bytes()),
668  func_index);
669  if (!name.is_set()) return {};
670  return ExtractUtf8StringFromModuleBytes(isolate, module_object, name);
671 }
672 
673 Handle<String> WasmModuleObject::GetFunctionName(
674  Isolate* isolate, Handle<WasmModuleObject> module_object,
675  uint32_t func_index) {
676  MaybeHandle<String> name =
677  GetFunctionNameOrNull(isolate, module_object, func_index);
678  if (!name.is_null()) return name.ToHandleChecked();
679  EmbeddedVector<char, 32> buffer;
680  int length = SNPrintF(buffer, "wasm-function[%u]", func_index);
681  return isolate->factory()
682  ->NewStringFromOneByte(Vector<uint8_t>::cast(buffer.SubVector(0, length)))
683  .ToHandleChecked();
684 }
685 
686 Vector<const uint8_t> WasmModuleObject::GetRawFunctionName(
687  uint32_t func_index) {
688  DCHECK_GT(module()->functions.size(), func_index);
689  wasm::ModuleWireBytes wire_bytes(native_module()->wire_bytes());
690  wasm::WireBytesRef name_ref =
691  module()->LookupFunctionName(wire_bytes, func_index);
692  wasm::WasmName name = wire_bytes.GetNameOrNull(name_ref);
693  return Vector<const uint8_t>::cast(name);
694 }
695 
696 int WasmModuleObject::GetFunctionOffset(uint32_t func_index) {
697  const std::vector<WasmFunction>& functions = module()->functions;
698  if (static_cast<uint32_t>(func_index) >= functions.size()) return -1;
699  DCHECK_GE(kMaxInt, functions[func_index].code.offset());
700  return static_cast<int>(functions[func_index].code.offset());
701 }
702 
703 int WasmModuleObject::GetContainingFunction(uint32_t byte_offset) {
704  const std::vector<WasmFunction>& functions = module()->functions;
705 
706  // Binary search for a function containing the given position.
707  int left = 0; // inclusive
708  int right = static_cast<int>(functions.size()); // exclusive
709  if (right == 0) return false;
710  while (right - left > 1) {
711  int mid = left + (right - left) / 2;
712  if (functions[mid].code.offset() <= byte_offset) {
713  left = mid;
714  } else {
715  right = mid;
716  }
717  }
718  // If the found function does not contains the given position, return -1.
719  const WasmFunction& func = functions[left];
720  if (byte_offset < func.code.offset() ||
721  byte_offset >= func.code.end_offset()) {
722  return -1;
723  }
724 
725  return left;
726 }
727 
728 bool WasmModuleObject::GetPositionInfo(uint32_t position,
729  Script::PositionInfo* info) {
730  if (script()->source_mapping_url()->IsString()) {
731  if (module()->functions.size() == 0) return false;
732  info->line = 0;
733  info->column = position;
734  info->line_start = module()->functions[0].code.offset();
735  info->line_end = module()->functions.back().code.end_offset();
736  return true;
737  }
738  int func_index = GetContainingFunction(position);
739  if (func_index < 0) return false;
740 
741  const WasmFunction& function = module()->functions[func_index];
742 
743  info->line = func_index;
744  info->column = position - function.code.offset();
745  info->line_start = function.code.offset();
746  info->line_end = function.code.end_offset();
747  return true;
748 }
749 
750 Handle<WasmTableObject> WasmTableObject::New(Isolate* isolate, uint32_t initial,
751  int64_t maximum,
752  Handle<FixedArray>* js_functions) {
753  Handle<JSFunction> table_ctor(
754  isolate->native_context()->wasm_table_constructor(), isolate);
755  auto table_obj = Handle<WasmTableObject>::cast(
756  isolate->factory()->NewJSObject(table_ctor));
757 
758  *js_functions = isolate->factory()->NewFixedArray(initial);
759  Object* null = ReadOnlyRoots(isolate).null_value();
760  for (int i = 0; i < static_cast<int>(initial); ++i) {
761  (*js_functions)->set(i, null);
762  }
763  table_obj->set_functions(**js_functions);
764  DCHECK_EQ(maximum, static_cast<int>(maximum));
765  Handle<Object> max = isolate->factory()->NewNumber(maximum);
766  table_obj->set_maximum_length(*max);
767 
768  table_obj->set_dispatch_tables(ReadOnlyRoots(isolate).empty_fixed_array());
769  return Handle<WasmTableObject>::cast(table_obj);
770 }
771 
772 void WasmTableObject::AddDispatchTable(Isolate* isolate,
773  Handle<WasmTableObject> table_obj,
774  Handle<WasmInstanceObject> instance,
775  int table_index) {
776  Handle<FixedArray> dispatch_tables(table_obj->dispatch_tables(), isolate);
777  int old_length = dispatch_tables->length();
778  DCHECK_EQ(0, old_length % kDispatchTableNumElements);
779 
780  if (instance.is_null()) return;
781  // TODO(titzer): use weak cells here to avoid leaking instances.
782 
783  // Grow the dispatch table and add a new entry at the end.
784  Handle<FixedArray> new_dispatch_tables =
785  isolate->factory()->CopyFixedArrayAndGrow(dispatch_tables,
786  kDispatchTableNumElements);
787 
788  new_dispatch_tables->set(old_length + kDispatchTableInstanceOffset,
789  *instance);
790  new_dispatch_tables->set(old_length + kDispatchTableIndexOffset,
791  Smi::FromInt(table_index));
792 
793  table_obj->set_dispatch_tables(*new_dispatch_tables);
794 }
795 
796 void WasmTableObject::Grow(Isolate* isolate, uint32_t count) {
797  if (count == 0) return; // Degenerate case: nothing to do.
798 
799  Handle<FixedArray> dispatch_tables(this->dispatch_tables(), isolate);
800  DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
801  uint32_t old_size = functions()->length();
802 
803  // Tables are stored in the instance object, no code patching is
804  // necessary. We simply have to grow the raw tables in each instance
805  // that has imported this table.
806 
807  // TODO(titzer): replace the dispatch table with a weak list of all
808  // the instances that import a given table.
809  for (int i = 0; i < dispatch_tables->length();
810  i += kDispatchTableNumElements) {
811  Handle<WasmInstanceObject> instance(
812  WasmInstanceObject::cast(dispatch_tables->get(i)), isolate);
813  DCHECK_EQ(old_size, instance->indirect_function_table_size());
814  uint32_t new_size = old_size + count;
815  WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(instance,
816  new_size);
817  }
818 }
819 
820 void WasmTableObject::Set(Isolate* isolate, Handle<WasmTableObject> table,
821  int32_t table_index, Handle<JSFunction> function) {
822  Handle<FixedArray> array(table->functions(), isolate);
823  if (function.is_null()) {
824  ClearDispatchTables(isolate, table, table_index); // Degenerate case.
825  array->set(table_index, ReadOnlyRoots(isolate).null_value());
826  return;
827  }
828 
829  // TODO(titzer): Change this to MaybeHandle<WasmExportedFunction>
830  DCHECK(WasmExportedFunction::IsWasmExportedFunction(*function));
831  auto exported_function = Handle<WasmExportedFunction>::cast(function);
832  Handle<WasmInstanceObject> target_instance(exported_function->instance(),
833  isolate);
834  int func_index = exported_function->function_index();
835  auto* wasm_function = &target_instance->module()->functions[func_index];
836  DCHECK_NOT_NULL(wasm_function);
837  DCHECK_NOT_NULL(wasm_function->sig);
838  UpdateDispatchTables(isolate, table, table_index, wasm_function->sig,
839  handle(exported_function->instance(), isolate),
840  func_index);
841  array->set(table_index, *function);
842 }
843 
844 void WasmTableObject::UpdateDispatchTables(
845  Isolate* isolate, Handle<WasmTableObject> table, int table_index,
846  wasm::FunctionSig* sig, Handle<WasmInstanceObject> target_instance,
847  int target_func_index) {
848  // We simply need to update the IFTs for each instance that imports
849  // this table.
850  Handle<FixedArray> dispatch_tables(table->dispatch_tables(), isolate);
851  DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
852 
853  for (int i = 0; i < dispatch_tables->length();
854  i += kDispatchTableNumElements) {
855  Handle<WasmInstanceObject> instance(
856  WasmInstanceObject::cast(
857  dispatch_tables->get(i + kDispatchTableInstanceOffset)),
858  isolate);
859  // Note that {SignatureMap::Find} may return {-1} if the signature is
860  // not found; it will simply never match any check.
861  auto sig_id = instance->module()->signature_map.Find(*sig);
862  IndirectFunctionTableEntry(instance, table_index)
863  .Set(sig_id, target_instance, target_func_index);
864  }
865 }
866 
867 void WasmTableObject::ClearDispatchTables(Isolate* isolate,
868  Handle<WasmTableObject> table,
869  int index) {
870  Handle<FixedArray> dispatch_tables(table->dispatch_tables(), isolate);
871  DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
872  for (int i = 0; i < dispatch_tables->length();
873  i += kDispatchTableNumElements) {
874  Handle<WasmInstanceObject> target_instance(
875  WasmInstanceObject::cast(
876  dispatch_tables->get(i + kDispatchTableInstanceOffset)),
877  isolate);
878  DCHECK_LT(index, target_instance->indirect_function_table_size());
879  IndirectFunctionTableEntry(target_instance, index).clear();
880  }
881 }
882 
883 namespace {
884 MaybeHandle<JSArrayBuffer> MemoryGrowBuffer(Isolate* isolate,
885  Handle<JSArrayBuffer> old_buffer,
886  uint32_t pages,
887  uint32_t maximum_pages) {
888  CHECK_GE(wasm::max_mem_pages(), maximum_pages);
889  if (!old_buffer->is_growable()) return {};
890  void* old_mem_start = old_buffer->backing_store();
891  size_t old_size = old_buffer->byte_length();
892  CHECK_EQ(0, old_size % wasm::kWasmPageSize);
893  size_t old_pages = old_size / wasm::kWasmPageSize;
894  CHECK_GE(wasm::max_mem_pages(), old_pages);
895 
896  if ((pages > maximum_pages - old_pages) || // exceeds remaining
897  (pages > wasm::max_mem_pages() - old_pages)) { // exceeds limit
898  return {};
899  }
900  size_t new_size =
901  static_cast<size_t>(old_pages + pages) * wasm::kWasmPageSize;
902 
903  // Reusing the backing store from externalized buffers causes problems with
904  // Blink's array buffers. The connection between the two is lost, which can
905  // lead to Blink not knowing about the other reference to the buffer and
906  // freeing it too early.
907  if (!old_buffer->is_external() &&
908  ((new_size < old_buffer->allocation_length()) || old_size == new_size)) {
909  if (old_size != new_size) {
910  DCHECK_NOT_NULL(old_buffer->backing_store());
911  // If adjusting permissions fails, propagate error back to return
912  // failure to grow.
913  if (!i::SetPermissions(GetPlatformPageAllocator(), old_mem_start,
914  new_size, PageAllocator::kReadWrite)) {
915  return {};
916  }
917  reinterpret_cast<v8::Isolate*>(isolate)
918  ->AdjustAmountOfExternalAllocatedMemory(pages * wasm::kWasmPageSize);
919  }
920  // NOTE: We must allocate a new array buffer here because the spec
921  // assumes that ArrayBuffers do not change size.
922  void* backing_store = old_buffer->backing_store();
923  bool is_external = old_buffer->is_external();
924  // Disconnect buffer early so GC won't free it.
925  i::wasm::DetachMemoryBuffer(isolate, old_buffer, false);
926  Handle<JSArrayBuffer> new_buffer =
927  wasm::SetupArrayBuffer(isolate, backing_store, new_size, is_external);
928  return new_buffer;
929  } else {
930  // We couldn't reuse the old backing store, so create a new one and copy the
931  // old contents in.
932  Handle<JSArrayBuffer> new_buffer;
933  if (!wasm::NewArrayBuffer(isolate, new_size).ToHandle(&new_buffer)) {
934  return {};
935  }
936  wasm::WasmMemoryTracker* const memory_tracker =
937  isolate->wasm_engine()->memory_tracker();
938  // If the old buffer had full guard regions, we can only safely use the new
939  // buffer if it also has full guard regions. Otherwise, we'd have to
940  // recompile all the instances using this memory to insert bounds checks.
941  if (memory_tracker->HasFullGuardRegions(old_mem_start) &&
942  !memory_tracker->HasFullGuardRegions(new_buffer->backing_store())) {
943  return {};
944  }
945  if (old_size == 0) return new_buffer;
946  memcpy(new_buffer->backing_store(), old_mem_start, old_size);
947  DCHECK(old_buffer.is_null() || !old_buffer->is_shared());
948  constexpr bool free_memory = true;
949  i::wasm::DetachMemoryBuffer(isolate, old_buffer, free_memory);
950  return new_buffer;
951  }
952 }
953 
954 // May GC, because SetSpecializationMemInfoFrom may GC
955 void SetInstanceMemory(Handle<WasmInstanceObject> instance,
956  Handle<JSArrayBuffer> buffer) {
957  instance->SetRawMemory(reinterpret_cast<byte*>(buffer->backing_store()),
958  buffer->byte_length());
959 #if DEBUG
960  if (!FLAG_mock_arraybuffer_allocator) {
961  // To flush out bugs earlier, in DEBUG mode, check that all pages of the
962  // memory are accessible by reading and writing one byte on each page.
963  // Don't do this if the mock ArrayBuffer allocator is enabled.
964  byte* mem_start = instance->memory_start();
965  size_t mem_size = instance->memory_size();
966  for (size_t offset = 0; offset < mem_size; offset += wasm::kWasmPageSize) {
967  byte val = mem_start[offset];
968  USE(val);
969  mem_start[offset] = val;
970  }
971  }
972 #endif
973 }
974 
975 } // namespace
976 
977 Handle<WasmMemoryObject> WasmMemoryObject::New(
978  Isolate* isolate, MaybeHandle<JSArrayBuffer> maybe_buffer,
979  int32_t maximum) {
980  // TODO(kschimpf): Do we need to add an argument that defines the
981  // style of memory the user prefers (with/without trap handling), so
982  // that the memory will match the style of the compiled wasm module.
983  // See issue v8:7143
984  Handle<JSFunction> memory_ctor(
985  isolate->native_context()->wasm_memory_constructor(), isolate);
986  auto memory_obj = Handle<WasmMemoryObject>::cast(
987  isolate->factory()->NewJSObject(memory_ctor, TENURED));
988 
989  Handle<JSArrayBuffer> buffer;
990  if (!maybe_buffer.ToHandle(&buffer)) {
991  // If no buffer was provided, create a 0-length one.
992  buffer = wasm::SetupArrayBuffer(isolate, nullptr, 0, false);
993  }
994  memory_obj->set_array_buffer(*buffer);
995  memory_obj->set_maximum_pages(maximum);
996 
997  return memory_obj;
998 }
999 
1000 uint32_t WasmMemoryObject::current_pages() {
1001  return static_cast<uint32_t>(array_buffer()->byte_length() /
1002  wasm::kWasmPageSize);
1003 }
1004 
1005 bool WasmMemoryObject::has_full_guard_region(Isolate* isolate) {
1006  const wasm::WasmMemoryTracker::AllocationData* allocation =
1007  isolate->wasm_engine()->memory_tracker()->FindAllocationData(
1008  array_buffer()->backing_store());
1009  CHECK_NOT_NULL(allocation);
1010 
1011  Address allocation_base =
1012  reinterpret_cast<Address>(allocation->allocation_base);
1013  Address buffer_start = reinterpret_cast<Address>(allocation->buffer_start);
1014 
1015  // Return whether the allocation covers every possible Wasm heap index.
1016  //
1017  // We always have the following relationship:
1018  // allocation_base <= buffer_start <= buffer_start + memory_size <=
1019  // allocation_base + allocation_length
1020  // (in other words, the buffer fits within the allocation)
1021  //
1022  // The space between buffer_start + memory_size and allocation_base +
1023  // allocation_length is the guard region. Here we make sure the guard region
1024  // is large enough for any Wasm heap offset.
1025  return buffer_start + wasm::kWasmMaxHeapOffset <=
1026  allocation_base + allocation->allocation_length;
1027 }
1028 
1029 void WasmMemoryObject::AddInstance(Isolate* isolate,
1030  Handle<WasmMemoryObject> memory,
1031  Handle<WasmInstanceObject> instance) {
1032  Handle<WeakArrayList> old_instances =
1033  memory->has_instances()
1034  ? Handle<WeakArrayList>(memory->instances(), isolate)
1035  : handle(ReadOnlyRoots(isolate->heap()).empty_weak_array_list(),
1036  isolate);
1037  Handle<WeakArrayList> new_instances = WeakArrayList::AddToEnd(
1038  isolate, old_instances, MaybeObjectHandle::Weak(instance));
1039  memory->set_instances(*new_instances);
1040  Handle<JSArrayBuffer> buffer(memory->array_buffer(), isolate);
1041  SetInstanceMemory(instance, buffer);
1042 }
1043 
1044 void WasmMemoryObject::RemoveInstance(Handle<WasmMemoryObject> memory,
1045  Handle<WasmInstanceObject> instance) {
1046  if (memory->has_instances()) {
1047  memory->instances()->RemoveOne(MaybeObjectHandle::Weak(instance));
1048  }
1049 }
1050 
1051 // static
1052 int32_t WasmMemoryObject::Grow(Isolate* isolate,
1053  Handle<WasmMemoryObject> memory_object,
1054  uint32_t pages) {
1055  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.wasm"), "GrowMemory");
1056  Handle<JSArrayBuffer> old_buffer(memory_object->array_buffer(), isolate);
1057  if (!old_buffer->is_growable()) return -1;
1058  size_t old_size = old_buffer->byte_length();
1059  DCHECK_EQ(0, old_size % wasm::kWasmPageSize);
1060  Handle<JSArrayBuffer> new_buffer;
1061 
1062  uint32_t maximum_pages = wasm::max_mem_pages();
1063  if (memory_object->has_maximum_pages()) {
1064  maximum_pages = std::min(
1065  maximum_pages, static_cast<uint32_t>(memory_object->maximum_pages()));
1066  }
1067  if (!MemoryGrowBuffer(isolate, old_buffer, pages, maximum_pages)
1068  .ToHandle(&new_buffer)) {
1069  return -1;
1070  }
1071 
1072  if (memory_object->has_instances()) {
1073  Handle<WeakArrayList> instances(memory_object->instances(), isolate);
1074  for (int i = 0; i < instances->length(); i++) {
1075  MaybeObject elem = instances->Get(i);
1076  HeapObject* heap_object;
1077  if (elem->GetHeapObjectIfWeak(&heap_object)) {
1078  Handle<WasmInstanceObject> instance(
1079  WasmInstanceObject::cast(heap_object), isolate);
1080  SetInstanceMemory(instance, new_buffer);
1081  } else {
1082  DCHECK(elem->IsCleared());
1083  }
1084  }
1085  }
1086  memory_object->set_array_buffer(*new_buffer);
1087  return static_cast<uint32_t>(old_size / wasm::kWasmPageSize);
1088 }
1089 
1090 // static
1091 MaybeHandle<WasmGlobalObject> WasmGlobalObject::New(
1092  Isolate* isolate, MaybeHandle<JSArrayBuffer> maybe_buffer,
1093  wasm::ValueType type, int32_t offset, bool is_mutable) {
1094  Handle<JSFunction> global_ctor(
1095  isolate->native_context()->wasm_global_constructor(), isolate);
1096  auto global_obj = Handle<WasmGlobalObject>::cast(
1097  isolate->factory()->NewJSObject(global_ctor));
1098 
1099  uint32_t type_size = wasm::ValueTypes::ElementSizeInBytes(type);
1100 
1101  Handle<JSArrayBuffer> buffer;
1102  if (!maybe_buffer.ToHandle(&buffer)) {
1103  // If no buffer was provided, create one long enough for the given type.
1104  buffer =
1105  isolate->factory()->NewJSArrayBuffer(SharedFlag::kNotShared, TENURED);
1106 
1107  const bool initialize = true;
1108  if (!JSArrayBuffer::SetupAllocatingData(buffer, isolate, type_size,
1109  initialize)) {
1110  return {};
1111  }
1112  }
1113 
1114  // Check that the offset is in bounds.
1115  CHECK_LE(offset + type_size, buffer->byte_length());
1116 
1117  global_obj->set_array_buffer(*buffer);
1118  global_obj->set_flags(0);
1119  global_obj->set_type(type);
1120  global_obj->set_offset(offset);
1121  global_obj->set_is_mutable(is_mutable);
1122 
1123  return global_obj;
1124 }
1125 
1126 void IndirectFunctionTableEntry::clear() {
1127  instance_->indirect_function_table_sig_ids()[index_] = -1;
1128  instance_->indirect_function_table_targets()[index_] = 0;
1129  instance_->indirect_function_table_refs()->set(
1130  index_, ReadOnlyRoots(instance_->GetIsolate()).undefined_value());
1131 }
1132 
1133 void IndirectFunctionTableEntry::Set(int sig_id,
1134  Handle<WasmInstanceObject> target_instance,
1135  int target_func_index) {
1136  TRACE_IFT(
1137  "IFT entry %p[%d] = {sig_id=%d, target_instance=%p, "
1138  "target_func_index=%d}\n",
1139  *instance_, index_, sig_id, *target_instance, target_func_index);
1140 
1141  Object* ref = nullptr;
1142  Address call_target = 0;
1143  if (target_func_index <
1144  static_cast<int>(target_instance->module()->num_imported_functions)) {
1145  // The function in the target instance was imported. Use its imports table,
1146  // which contains a tuple needed by the import wrapper.
1147  ImportedFunctionEntry entry(target_instance, target_func_index);
1148  ref = entry.object_ref();
1149  call_target = entry.target();
1150  } else {
1151  // The function in the target instance was not imported.
1152  ref = *target_instance;
1153  call_target = target_instance->GetCallTarget(target_func_index);
1154  }
1155 
1156  // Set the signature id, the target, and the receiver ref.
1157  instance_->indirect_function_table_sig_ids()[index_] = sig_id;
1158  instance_->indirect_function_table_targets()[index_] = call_target;
1159  instance_->indirect_function_table_refs()->set(index_, ref);
1160 }
1161 
1162 Object* IndirectFunctionTableEntry::object_ref() {
1163  return instance_->indirect_function_table_refs()->get(index_);
1164 }
1165 
1166 int IndirectFunctionTableEntry::sig_id() {
1167  return instance_->indirect_function_table_sig_ids()[index_];
1168 }
1169 
1170 Address IndirectFunctionTableEntry::target() {
1171  return instance_->indirect_function_table_targets()[index_];
1172 }
1173 
1174 void ImportedFunctionEntry::SetWasmToJs(
1175  Isolate* isolate, Handle<JSReceiver> callable,
1176  const wasm::WasmCode* wasm_to_js_wrapper) {
1177  TRACE_IFT("Import callable %p[%d] = {callable=%p, target=%p}\n", *instance_,
1178  index_, *callable, wasm_to_js_wrapper->instructions().start());
1179  DCHECK_EQ(wasm::WasmCode::kWasmToJsWrapper, wasm_to_js_wrapper->kind());
1180  Handle<Tuple2> tuple =
1181  isolate->factory()->NewTuple2(instance_, callable, TENURED);
1182  instance_->imported_function_refs()->set(index_, *tuple);
1183  instance_->imported_function_targets()[index_] =
1184  wasm_to_js_wrapper->instruction_start();
1185 }
1186 
1187 void ImportedFunctionEntry::SetWasmToWasm(WasmInstanceObject* instance,
1188  Address call_target) {
1189  TRACE_IFT("Import WASM %p[%d] = {instance=%p, target=%" PRIuPTR "}\n",
1190  *instance_, index_, instance, call_target);
1191  instance_->imported_function_refs()->set(index_, instance);
1192  instance_->imported_function_targets()[index_] = call_target;
1193 }
1194 
1195 WasmInstanceObject* ImportedFunctionEntry::instance() {
1196  // The imported reference entry is either a target instance or a tuple
1197  // of this instance and the target callable.
1198  Object* value = instance_->imported_function_refs()->get(index_);
1199  if (value->IsWasmInstanceObject()) {
1200  return WasmInstanceObject::cast(value);
1201  }
1202  Tuple2* tuple = Tuple2::cast(value);
1203  return WasmInstanceObject::cast(tuple->value1());
1204 }
1205 
1206 JSReceiver* ImportedFunctionEntry::callable() {
1207  return JSReceiver::cast(Tuple2::cast(object_ref())->value2());
1208 }
1209 
1210 Object* ImportedFunctionEntry::object_ref() {
1211  return instance_->imported_function_refs()->get(index_);
1212 }
1213 
1214 Address ImportedFunctionEntry::target() {
1215  return instance_->imported_function_targets()[index_];
1216 }
1217 
1218 bool WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(
1219  Handle<WasmInstanceObject> instance, uint32_t minimum_size) {
1220  uint32_t old_size = instance->indirect_function_table_size();
1221  if (old_size >= minimum_size) return false; // Nothing to do.
1222 
1223  Isolate* isolate = instance->GetIsolate();
1224  HandleScope scope(isolate);
1225  auto native_allocations = GetNativeAllocations(*instance);
1226  native_allocations->resize_indirect_function_table(isolate, instance,
1227  minimum_size);
1228  return true;
1229 }
1230 
1231 void WasmInstanceObject::SetRawMemory(byte* mem_start, size_t mem_size) {
1232  CHECK_LE(mem_size, wasm::max_mem_bytes());
1233 #if V8_HOST_ARCH_64_BIT
1234  uint64_t mem_mask64 = base::bits::RoundUpToPowerOfTwo64(mem_size) - 1;
1235  set_memory_start(mem_start);
1236  set_memory_size(mem_size);
1237  set_memory_mask(mem_mask64);
1238 #else
1239  // Must handle memory > 2GiB specially.
1240  CHECK_LE(mem_size, size_t{kMaxUInt32});
1241  uint32_t mem_mask32 =
1242  (mem_size > 2 * size_t{GB})
1243  ? 0xFFFFFFFFu
1244  : base::bits::RoundUpToPowerOfTwo32(static_cast<uint32_t>(mem_size)) -
1245  1;
1246  set_memory_start(mem_start);
1247  set_memory_size(mem_size);
1248  set_memory_mask(mem_mask32);
1249 #endif
1250 }
1251 
1252 const WasmModule* WasmInstanceObject::module() {
1253  return module_object()->module();
1254 }
1255 
1256 Handle<WasmDebugInfo> WasmInstanceObject::GetOrCreateDebugInfo(
1257  Handle<WasmInstanceObject> instance) {
1258  if (instance->has_debug_info()) {
1259  return handle(instance->debug_info(), instance->GetIsolate());
1260  }
1261  Handle<WasmDebugInfo> new_info = WasmDebugInfo::New(instance);
1262  DCHECK(instance->has_debug_info());
1263  return new_info;
1264 }
1265 
1266 Handle<WasmInstanceObject> WasmInstanceObject::New(
1267  Isolate* isolate, Handle<WasmModuleObject> module_object) {
1268  Handle<JSFunction> instance_cons(
1269  isolate->native_context()->wasm_instance_constructor(), isolate);
1270  Handle<JSObject> instance_object =
1271  isolate->factory()->NewJSObject(instance_cons, TENURED);
1272 
1273  Handle<WasmInstanceObject> instance(
1274  reinterpret_cast<WasmInstanceObject*>(*instance_object), isolate);
1275 
1276  // Initialize the imported function arrays.
1277  auto module = module_object->module();
1278  auto num_imported_functions = module->num_imported_functions;
1279  auto num_imported_mutable_globals = module->num_imported_mutable_globals;
1280  size_t native_allocations_size = EstimateNativeAllocationsSize(module);
1281  auto native_allocations = Managed<WasmInstanceNativeAllocations>::Allocate(
1282  isolate, native_allocations_size, instance, num_imported_functions,
1283  num_imported_mutable_globals);
1284  instance->set_managed_native_allocations(*native_allocations);
1285 
1286  Handle<FixedArray> imported_function_refs =
1287  isolate->factory()->NewFixedArray(num_imported_functions);
1288  instance->set_imported_function_refs(*imported_function_refs);
1289 
1290  Handle<Code> centry_stub = CodeFactory::CEntry(isolate);
1291  instance->set_centry_stub(*centry_stub);
1292 
1293  instance->SetRawMemory(nullptr, 0);
1294  instance->set_isolate_root(isolate->isolate_root());
1295  instance->set_stack_limit_address(
1296  isolate->stack_guard()->address_of_jslimit());
1297  instance->set_real_stack_limit_address(
1298  isolate->stack_guard()->address_of_real_jslimit());
1299  instance->set_globals_start(nullptr);
1300  instance->set_indirect_function_table_size(0);
1301  instance->set_indirect_function_table_sig_ids(nullptr);
1302  instance->set_indirect_function_table_targets(nullptr);
1303  instance->set_native_context(*isolate->native_context());
1304  instance->set_module_object(*module_object);
1305  instance->set_undefined_value(ReadOnlyRoots(isolate).undefined_value());
1306  instance->set_null_value(ReadOnlyRoots(isolate).null_value());
1307  instance->set_jump_table_start(
1308  module_object->native_module()->jump_table_start());
1309 
1310  // Insert the new instance into the modules weak list of instances.
1311  // TODO(mstarzinger): Allow to reuse holes in the {WeakArrayList} below.
1312  Handle<WeakArrayList> weak_instance_list(module_object->weak_instance_list(),
1313  isolate);
1314  weak_instance_list = WeakArrayList::AddToEnd(
1315  isolate, weak_instance_list, MaybeObjectHandle::Weak(instance));
1316  module_object->set_weak_instance_list(*weak_instance_list);
1317 
1318  return instance;
1319 }
1320 
1321 Address WasmInstanceObject::GetCallTarget(uint32_t func_index) {
1322  wasm::NativeModule* native_module = module_object()->native_module();
1323  if (func_index < native_module->num_imported_functions()) {
1324  return imported_function_targets()[func_index];
1325  }
1326  return native_module->GetCallTargetForFunction(func_index);
1327 }
1328 
1329 // static
1330 Handle<WasmExceptionObject> WasmExceptionObject::New(
1331  Isolate* isolate, const wasm::FunctionSig* sig,
1332  Handle<HeapObject> exception_tag) {
1333  Handle<JSFunction> exception_cons(
1334  isolate->native_context()->wasm_exception_constructor(), isolate);
1335  Handle<JSObject> exception_object =
1336  isolate->factory()->NewJSObject(exception_cons, TENURED);
1337  Handle<WasmExceptionObject> exception =
1338  Handle<WasmExceptionObject>::cast(exception_object);
1339 
1340  // Serialize the signature.
1341  DCHECK_EQ(0, sig->return_count());
1342  DCHECK_LE(sig->parameter_count(), std::numeric_limits<int>::max());
1343  int sig_size = static_cast<int>(sig->parameter_count());
1344  Handle<PodArray<wasm::ValueType>> serialized_sig =
1345  PodArray<wasm::ValueType>::New(isolate, sig_size, TENURED);
1346  int index = 0; // Index into the {PodArray} above.
1347  for (wasm::ValueType param : sig->parameters()) {
1348  serialized_sig->set(index++, param);
1349  }
1350  exception->set_serialized_signature(*serialized_sig);
1351  exception->set_exception_tag(*exception_tag);
1352 
1353  return exception;
1354 }
1355 
1356 bool WasmExceptionObject::IsSignatureEqual(const wasm::FunctionSig* sig) {
1357  DCHECK_EQ(0, sig->return_count());
1358  DCHECK_LE(sig->parameter_count(), std::numeric_limits<int>::max());
1359  int sig_size = static_cast<int>(sig->parameter_count());
1360  if (sig_size != serialized_signature()->length()) return false;
1361  for (int index = 0; index < sig_size; ++index) {
1362  if (sig->GetParam(index) != serialized_signature()->get(index)) {
1363  return false;
1364  }
1365  }
1366  return true;
1367 }
1368 
1369 bool WasmExportedFunction::IsWasmExportedFunction(Object* object) {
1370  if (!object->IsJSFunction()) return false;
1371  JSFunction* js_function = JSFunction::cast(object);
1372  if (Code::JS_TO_WASM_FUNCTION != js_function->code()->kind()) return false;
1373  DCHECK(js_function->shared()->HasWasmExportedFunctionData());
1374  return true;
1375 }
1376 
1377 WasmExportedFunction* WasmExportedFunction::cast(Object* object) {
1378  DCHECK(IsWasmExportedFunction(object));
1379  return reinterpret_cast<WasmExportedFunction*>(object);
1380 }
1381 
1382 WasmInstanceObject* WasmExportedFunction::instance() {
1383  return shared()->wasm_exported_function_data()->instance();
1384 }
1385 
1386 int WasmExportedFunction::function_index() {
1387  return shared()->wasm_exported_function_data()->function_index();
1388 }
1389 
1390 Handle<WasmExportedFunction> WasmExportedFunction::New(
1391  Isolate* isolate, Handle<WasmInstanceObject> instance,
1392  MaybeHandle<String> maybe_name, int func_index, int arity,
1393  Handle<Code> export_wrapper) {
1394  DCHECK_EQ(Code::JS_TO_WASM_FUNCTION, export_wrapper->kind());
1395  int num_imported_functions = instance->module()->num_imported_functions;
1396  int jump_table_offset = -1;
1397  if (func_index >= num_imported_functions) {
1398  ptrdiff_t jump_table_diff =
1399  instance->module_object()->native_module()->jump_table_offset(
1400  func_index);
1401  DCHECK(jump_table_diff >= 0 && jump_table_diff <= INT_MAX);
1402  jump_table_offset = static_cast<int>(jump_table_diff);
1403  }
1404  Handle<WasmExportedFunctionData> function_data =
1405  Handle<WasmExportedFunctionData>::cast(isolate->factory()->NewStruct(
1406  WASM_EXPORTED_FUNCTION_DATA_TYPE, TENURED));
1407  function_data->set_wrapper_code(*export_wrapper);
1408  function_data->set_instance(*instance);
1409  function_data->set_jump_table_offset(jump_table_offset);
1410  function_data->set_function_index(func_index);
1411  Handle<String> name;
1412  if (!maybe_name.ToHandle(&name)) {
1413  EmbeddedVector<char, 16> buffer;
1414  int length = SNPrintF(buffer, "%d", func_index);
1415  name = isolate->factory()
1416  ->NewStringFromOneByte(
1417  Vector<uint8_t>::cast(buffer.SubVector(0, length)))
1418  .ToHandleChecked();
1419  }
1420  NewFunctionArgs args = NewFunctionArgs::ForWasm(
1421  name, function_data, isolate->sloppy_function_without_prototype_map());
1422  Handle<JSFunction> js_function = isolate->factory()->NewFunction(args);
1423  // According to the spec, exported functions should not have a [[Construct]]
1424  // method.
1425  DCHECK(!js_function->IsConstructor());
1426  js_function->shared()->set_length(arity);
1427  js_function->shared()->set_internal_formal_parameter_count(arity);
1428  return Handle<WasmExportedFunction>::cast(js_function);
1429 }
1430 
1431 Address WasmExportedFunction::GetWasmCallTarget() {
1432  return instance()->GetCallTarget(function_index());
1433 }
1434 
1435 wasm::FunctionSig* WasmExportedFunction::sig() {
1436  return instance()->module()->functions[function_index()].sig;
1437 }
1438 
1439 Handle<AsmWasmData> AsmWasmData::New(
1440  Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module,
1441  Handle<FixedArray> export_wrappers, Handle<ByteArray> asm_js_offset_table,
1442  Handle<HeapNumber> uses_bitset) {
1443  const WasmModule* module = native_module->module();
1444  size_t memory_estimate =
1445  wasm::WasmCodeManager::EstimateNativeModuleCodeSize(module) +
1446  wasm::WasmCodeManager::EstimateNativeModuleNonCodeSize(module);
1447  Handle<Managed<wasm::NativeModule>> managed_native_module =
1448  Managed<wasm::NativeModule>::FromSharedPtr(isolate, memory_estimate,
1449  std::move(native_module));
1450  Handle<AsmWasmData> result = Handle<AsmWasmData>::cast(
1451  isolate->factory()->NewStruct(ASM_WASM_DATA_TYPE, TENURED));
1452  result->set_managed_native_module(*managed_native_module);
1453  result->set_export_wrappers(*export_wrappers);
1454  result->set_asm_js_offset_table(*asm_js_offset_table);
1455  result->set_uses_bitset(*uses_bitset);
1456  return result;
1457 }
1458 
1459 #undef TRACE
1460 #undef TRACE_IFT
1461 } // namespace internal
1462 } // namespace v8
Definition: libplatform.h:13