V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
isolate.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/isolate.h"
6 
7 #include <stdlib.h>
8 
9 #include <atomic>
10 #include <fstream> // NOLINT(readability/streams)
11 #include <memory>
12 #include <sstream>
13 #include <unordered_map>
14 
15 #include "src/api-inl.h"
16 #include "src/assembler-inl.h"
17 #include "src/ast/ast-value-factory.h"
18 #include "src/ast/scopes.h"
19 #include "src/base/adapters.h"
20 #include "src/base/hashmap.h"
21 #include "src/base/platform/platform.h"
22 #include "src/base/sys-info.h"
23 #include "src/base/utils/random-number-generator.h"
24 #include "src/bootstrapper.h"
25 #include "src/builtins/builtins-promise-gen.h"
26 #include "src/builtins/constants-table-builder.h"
27 #include "src/cancelable-task.h"
28 #include "src/code-stubs.h"
29 #include "src/compilation-cache.h"
30 #include "src/compilation-statistics.h"
31 #include "src/compiler-dispatcher/compiler-dispatcher.h"
32 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
33 #include "src/date.h"
34 #include "src/debug/debug-frames.h"
35 #include "src/debug/debug.h"
36 #include "src/deoptimizer.h"
37 #include "src/elements.h"
38 #include "src/frames-inl.h"
39 #include "src/ic/stub-cache.h"
40 #include "src/interpreter/interpreter.h"
41 #include "src/isolate-inl.h"
42 #include "src/libsampler/sampler.h"
43 #include "src/log.h"
44 #include "src/messages.h"
45 #include "src/microtask-queue.h"
46 #include "src/objects/frame-array-inl.h"
47 #include "src/objects/hash-table-inl.h"
48 #include "src/objects/js-array-inl.h"
49 #include "src/objects/js-generator-inl.h"
50 #include "src/objects/module-inl.h"
51 #include "src/objects/promise-inl.h"
52 #include "src/objects/slots.h"
53 #include "src/objects/smi.h"
54 #include "src/objects/stack-frame-info-inl.h"
55 #include "src/profiler/tracing-cpu-profiler.h"
56 #include "src/prototype.h"
57 #include "src/ptr-compr.h"
58 #include "src/regexp/regexp-stack.h"
59 #include "src/runtime-profiler.h"
60 #include "src/setup-isolate.h"
61 #include "src/simulator.h"
62 #include "src/snapshot/embedded-data.h"
63 #include "src/snapshot/startup-deserializer.h"
64 #include "src/tracing/tracing-category-observer.h"
65 #include "src/trap-handler/trap-handler.h"
66 #include "src/unicode-cache.h"
67 #include "src/v8.h"
68 #include "src/version.h"
69 #include "src/visitors.h"
70 #include "src/vm-state-inl.h"
71 #include "src/wasm/wasm-code-manager.h"
72 #include "src/wasm/wasm-engine.h"
73 #include "src/wasm/wasm-objects.h"
74 #include "src/zone/accounting-allocator.h"
75 #ifdef V8_INTL_SUPPORT
76 #include "unicode/uobject.h"
77 #endif // V8_INTL_SUPPORT
78 
79 #if defined(V8_USE_ADDRESS_SANITIZER)
80 #include <sanitizer/asan_interface.h>
81 #endif
82 
83 extern "C" const uint8_t* v8_Default_embedded_blob_;
84 extern "C" uint32_t v8_Default_embedded_blob_size_;
85 
86 namespace v8 {
87 namespace internal {
88 
89 #ifdef DEBUG
90 #define TRACE_ISOLATE(tag) \
91  do { \
92  if (FLAG_trace_isolates) { \
93  PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
94  id()); \
95  } \
96  } while (false)
97 #else
98 #define TRACE_ISOLATE(tag)
99 #endif
100 
101 const uint8_t* DefaultEmbeddedBlob() { return v8_Default_embedded_blob_; }
102 uint32_t DefaultEmbeddedBlobSize() { return v8_Default_embedded_blob_size_; }
103 
104 #ifdef V8_MULTI_SNAPSHOTS
105 extern "C" const uint8_t* v8_Trusted_embedded_blob_;
106 extern "C" uint32_t v8_Trusted_embedded_blob_size_;
107 
108 const uint8_t* TrustedEmbeddedBlob() { return v8_Trusted_embedded_blob_; }
109 uint32_t TrustedEmbeddedBlobSize() { return v8_Trusted_embedded_blob_size_; }
110 #endif
111 
112 namespace {
113 // These variables provide access to the current embedded blob without requiring
114 // an isolate instance. This is needed e.g. by Code::InstructionStart, which may
115 // not have access to an isolate but still needs to access the embedded blob.
116 // The variables are initialized by each isolate in Init(). Writes and reads are
117 // relaxed since we can guarantee that the current thread has initialized these
118 // variables before accessing them. Different threads may race, but this is fine
119 // since they all attempt to set the same values of the blob pointer and size.
120 
121 std::atomic<const uint8_t*> current_embedded_blob_(nullptr);
122 std::atomic<uint32_t> current_embedded_blob_size_(0);
123 
124 // The various workflows around embedded snapshots are fairly complex. We need
125 // to support plain old snapshot builds, nosnap builds, and the requirements of
126 // subtly different serialization tests. There's two related knobs to twiddle:
127 //
128 // - The default embedded blob may be overridden by setting the sticky embedded
129 // blob. This is set automatically whenever we create a new embedded blob.
130 //
131 // - Lifecycle management can be either manual or set to refcounting.
132 //
133 // A few situations to demonstrate their use:
134 //
135 // - A plain old snapshot build neither overrides the default blob nor
136 // refcounts.
137 //
138 // - mksnapshot sets the sticky blob and manually frees the embedded
139 // blob once done.
140 //
141 // - Most serializer tests do the same.
142 //
143 // - Nosnapshot builds set the sticky blob and enable refcounting.
144 
145 // This mutex protects access to the following variables:
146 // - sticky_embedded_blob_
147 // - sticky_embedded_blob_size_
148 // - enable_embedded_blob_refcounting_
149 // - current_embedded_blob_refs_
150 base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER;
151 
152 const uint8_t* sticky_embedded_blob_ = nullptr;
153 uint32_t sticky_embedded_blob_size_ = 0;
154 
155 bool enable_embedded_blob_refcounting_ = true;
156 int current_embedded_blob_refs_ = 0;
157 
158 const uint8_t* StickyEmbeddedBlob() { return sticky_embedded_blob_; }
159 uint32_t StickyEmbeddedBlobSize() { return sticky_embedded_blob_size_; }
160 
161 void SetStickyEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) {
162  sticky_embedded_blob_ = blob;
163  sticky_embedded_blob_size_ = blob_size;
164 }
165 
166 } // namespace
167 
168 void DisableEmbeddedBlobRefcounting() {
169  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
170  enable_embedded_blob_refcounting_ = false;
171 }
172 
173 void FreeCurrentEmbeddedBlob() {
174  CHECK(!enable_embedded_blob_refcounting_);
175  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
176 
177  if (StickyEmbeddedBlob() == nullptr) return;
178 
179  CHECK_EQ(StickyEmbeddedBlob(), Isolate::CurrentEmbeddedBlob());
180 
181  InstructionStream::FreeOffHeapInstructionStream(
182  const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlob()),
183  Isolate::CurrentEmbeddedBlobSize());
184 
185  current_embedded_blob_.store(nullptr, std::memory_order_relaxed);
186  current_embedded_blob_size_.store(0, std::memory_order_relaxed);
187  sticky_embedded_blob_ = nullptr;
188  sticky_embedded_blob_size_ = 0;
189 }
190 
191 void Isolate::SetEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) {
192  CHECK_NOT_NULL(blob);
193 
194  embedded_blob_ = blob;
195  embedded_blob_size_ = blob_size;
196  current_embedded_blob_.store(blob, std::memory_order_relaxed);
197  current_embedded_blob_size_.store(blob_size, std::memory_order_relaxed);
198 
199 #ifdef DEBUG
200  // Verify that the contents of the embedded blob are unchanged from
201  // serialization-time, just to ensure the compiler isn't messing with us.
202  EmbeddedData d = EmbeddedData::FromBlob();
203  CHECK_EQ(d.Hash(), d.CreateHash());
204 #endif // DEBUG
205 }
206 
207 void Isolate::ClearEmbeddedBlob() {
208  CHECK(enable_embedded_blob_refcounting_);
209  CHECK_EQ(embedded_blob_, CurrentEmbeddedBlob());
210  CHECK_EQ(embedded_blob_, StickyEmbeddedBlob());
211 
212  embedded_blob_ = nullptr;
213  embedded_blob_size_ = 0;
214  current_embedded_blob_.store(nullptr, std::memory_order_relaxed);
215  current_embedded_blob_size_.store(0, std::memory_order_relaxed);
216  sticky_embedded_blob_ = nullptr;
217  sticky_embedded_blob_size_ = 0;
218 }
219 
220 const uint8_t* Isolate::embedded_blob() const { return embedded_blob_; }
221 uint32_t Isolate::embedded_blob_size() const { return embedded_blob_size_; }
222 
223 // static
224 const uint8_t* Isolate::CurrentEmbeddedBlob() {
225  return current_embedded_blob_.load(std::memory_order::memory_order_relaxed);
226 }
227 
228 // static
229 uint32_t Isolate::CurrentEmbeddedBlobSize() {
230  return current_embedded_blob_size_.load(
231  std::memory_order::memory_order_relaxed);
232 }
233 
234 void ThreadLocalTop::Initialize(Isolate* isolate) {
235  *this = ThreadLocalTop();
236  isolate_ = isolate;
237 #ifdef USE_SIMULATOR
238  simulator_ = Simulator::current(isolate);
239 #endif
240  thread_id_ = ThreadId::Current();
241  thread_in_wasm_flag_address_ = reinterpret_cast<Address>(
242  trap_handler::GetThreadInWasmThreadLocalAddress());
243 }
244 
245 void ThreadLocalTop::Free() {
246  // Match unmatched PopPromise calls.
247  while (promise_on_stack_) isolate_->PopPromise();
248 }
249 
250 
251 base::Thread::LocalStorageKey Isolate::isolate_key_;
252 base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
253 base::Atomic32 Isolate::isolate_counter_ = 0;
254 #if DEBUG
255 base::Atomic32 Isolate::isolate_key_created_ = 0;
256 #endif
257 
258 Isolate::PerIsolateThreadData*
259  Isolate::FindOrAllocatePerThreadDataForThisThread() {
260  ThreadId thread_id = ThreadId::Current();
261  PerIsolateThreadData* per_thread = nullptr;
262  {
263  base::MutexGuard lock_guard(&thread_data_table_mutex_);
264  per_thread = thread_data_table_.Lookup(thread_id);
265  if (per_thread == nullptr) {
266  per_thread = new PerIsolateThreadData(this, thread_id);
267  thread_data_table_.Insert(per_thread);
268  }
269  DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
270  }
271  return per_thread;
272 }
273 
274 
275 void Isolate::DiscardPerThreadDataForThisThread() {
276  ThreadId thread_id = ThreadId::TryGetCurrent();
277  if (thread_id.IsValid()) {
278  DCHECK(!thread_manager_->mutex_owner_.Equals(thread_id));
279  base::MutexGuard lock_guard(&thread_data_table_mutex_);
280  PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
281  if (per_thread) {
282  DCHECK(!per_thread->thread_state_);
283  thread_data_table_.Remove(per_thread);
284  }
285  }
286 }
287 
288 
289 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
290  ThreadId thread_id = ThreadId::Current();
291  return FindPerThreadDataForThread(thread_id);
292 }
293 
294 
295 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
296  ThreadId thread_id) {
297  PerIsolateThreadData* per_thread = nullptr;
298  {
299  base::MutexGuard lock_guard(&thread_data_table_mutex_);
300  per_thread = thread_data_table_.Lookup(thread_id);
301  }
302  return per_thread;
303 }
304 
305 
306 void Isolate::InitializeOncePerProcess() {
307  isolate_key_ = base::Thread::CreateThreadLocalKey();
308 #if DEBUG
309  base::Relaxed_Store(&isolate_key_created_, 1);
310 #endif
311  per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
312  init_memcopy_functions();
313 }
314 
315 Address Isolate::get_address_from_id(IsolateAddressId id) {
316  return isolate_addresses_[id];
317 }
318 
319 char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
320  ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
321  Iterate(v, thread);
322  return thread_storage + sizeof(ThreadLocalTop);
323 }
324 
325 
326 void Isolate::IterateThread(ThreadVisitor* v, char* t) {
327  ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
328  v->VisitThread(this, thread);
329 }
330 
331 void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
332  // Visit the roots from the top for a given thread.
333  v->VisitRootPointer(Root::kTop, nullptr,
334  ObjectSlot(&thread->pending_exception_));
335  v->VisitRootPointer(Root::kTop, nullptr,
336  ObjectSlot(&thread->pending_message_obj_));
337  v->VisitRootPointer(Root::kTop, nullptr, ObjectSlot(&thread->context_));
338  v->VisitRootPointer(Root::kTop, nullptr,
339  ObjectSlot(&thread->scheduled_exception_));
340 
341  for (v8::TryCatch* block = thread->try_catch_handler(); block != nullptr;
342  block = block->next_) {
343  // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too).
344  v->VisitRootPointer(
345  Root::kTop, nullptr,
346  ObjectSlot(reinterpret_cast<Address>(&(block->exception_))));
347  v->VisitRootPointer(
348  Root::kTop, nullptr,
349  ObjectSlot(reinterpret_cast<Address>(&(block->message_obj_))));
350  }
351 
352  // Iterate over pointers on native execution stack.
353  for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
354  it.frame()->Iterate(v);
355  }
356 }
357 
358 void Isolate::Iterate(RootVisitor* v) {
359  ThreadLocalTop* current_t = thread_local_top();
360  Iterate(v, current_t);
361 }
362 
363 void Isolate::IterateDeferredHandles(RootVisitor* visitor) {
364  for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
365  deferred = deferred->next_) {
366  deferred->Iterate(visitor);
367  }
368 }
369 
370 
371 #ifdef DEBUG
372 bool Isolate::IsDeferredHandle(Address* handle) {
373  // Comparing unrelated pointers (not from the same array) is undefined
374  // behavior, so cast to Address before making arbitrary comparisons.
375  Address handle_as_address = reinterpret_cast<Address>(handle);
376  // Each DeferredHandles instance keeps the handles to one job in the
377  // concurrent recompilation queue, containing a list of blocks. Each block
378  // contains kHandleBlockSize handles except for the first block, which may
379  // not be fully filled.
380  // We iterate through all the blocks to see whether the argument handle
381  // belongs to one of the blocks. If so, it is deferred.
382  for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
383  deferred = deferred->next_) {
384  std::vector<Address*>* blocks = &deferred->blocks_;
385  for (size_t i = 0; i < blocks->size(); i++) {
386  Address* block_limit = (i == 0) ? deferred->first_block_limit_
387  : blocks->at(i) + kHandleBlockSize;
388  if (reinterpret_cast<Address>(blocks->at(i)) <= handle_as_address &&
389  handle_as_address < reinterpret_cast<Address>(block_limit)) {
390  return true;
391  }
392  }
393  }
394  return false;
395 }
396 #endif // DEBUG
397 
398 
399 void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
400  thread_local_top()->set_try_catch_handler(that);
401 }
402 
403 
404 void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
405  DCHECK(thread_local_top()->try_catch_handler() == that);
406  thread_local_top()->set_try_catch_handler(that->next_);
407 }
408 
409 
410 Handle<String> Isolate::StackTraceString() {
411  if (stack_trace_nesting_level_ == 0) {
412  stack_trace_nesting_level_++;
413  HeapStringAllocator allocator;
414  StringStream::ClearMentionedObjectCache(this);
415  StringStream accumulator(&allocator);
416  incomplete_message_ = &accumulator;
417  PrintStack(&accumulator);
418  Handle<String> stack_trace = accumulator.ToString(this);
419  incomplete_message_ = nullptr;
420  stack_trace_nesting_level_ = 0;
421  return stack_trace;
422  } else if (stack_trace_nesting_level_ == 1) {
423  stack_trace_nesting_level_++;
424  base::OS::PrintError(
425  "\n\nAttempt to print stack while printing stack (double fault)\n");
426  base::OS::PrintError(
427  "If you are lucky you may find a partial stack dump on stdout.\n\n");
428  incomplete_message_->OutputToStdOut();
429  return factory()->empty_string();
430  } else {
431  base::OS::Abort();
432  // Unreachable
433  return factory()->empty_string();
434  }
435 }
436 
437 void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
438  void* ptr4) {
439  StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4);
440  message.Print();
441  base::OS::Abort();
442 }
443 
444 void StackTraceFailureMessage::Print() volatile {
445  // Print the details of this failure message object, including its own address
446  // to force stack allocation.
447  base::OS::PrintError(
448  "Stacktrace:\n ptr1=%p\n ptr2=%p\n ptr3=%p\n ptr4=%p\n "
449  "failure_message_object=%p\n%s",
450  ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]);
451 }
452 
453 StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1,
454  void* ptr2, void* ptr3,
455  void* ptr4) {
456  isolate_ = isolate;
457  ptr1_ = ptr1;
458  ptr2_ = ptr2;
459  ptr3_ = ptr3;
460  ptr4_ = ptr4;
461  // Write a stracktrace into the {js_stack_trace_} buffer.
462  const size_t buffer_length = arraysize(js_stack_trace_);
463  memset(&js_stack_trace_, 0, buffer_length);
464  FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
465  StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
466  isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
467  // Keeping a reference to the last code objects to increase likelyhood that
468  // they get included in the minidump.
469  const size_t code_objects_length = arraysize(code_objects_);
470  size_t i = 0;
471  StackFrameIterator it(isolate);
472  for (; !it.done() && i < code_objects_length; it.Advance()) {
473  if (it.frame()->type() == StackFrame::INTERNAL) continue;
474  code_objects_[i++] = it.frame()->unchecked_code();
475  }
476 }
477 
478 namespace {
479 
480 class FrameArrayBuilder {
481  public:
482  FrameArrayBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
483  Handle<Object> caller)
484  : isolate_(isolate), mode_(mode), limit_(limit), caller_(caller) {
485  switch (mode_) {
486  case SKIP_FIRST:
487  skip_next_frame_ = true;
488  break;
489  case SKIP_UNTIL_SEEN:
490  DCHECK(caller_->IsJSFunction());
491  skip_next_frame_ = true;
492  break;
493  case SKIP_NONE:
494  skip_next_frame_ = false;
495  break;
496  }
497 
498  elements_ = isolate->factory()->NewFrameArray(Min(limit, 10));
499  }
500 
501  void AppendAsyncFrame(Handle<JSGeneratorObject> generator_object) {
502  if (full()) return;
503  Handle<JSFunction> function(generator_object->function(), isolate_);
504  if (!IsVisibleInStackTrace(function)) return;
505  int flags = FrameArray::kIsAsync;
506  if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
507 
508  Handle<Object> receiver(generator_object->receiver(), isolate_);
509  Handle<AbstractCode> code(
510  AbstractCode::cast(function->shared()->GetBytecodeArray()), isolate_);
511  int offset = Smi::ToInt(generator_object->input_or_debug_pos());
512  // The stored bytecode offset is relative to a different base than what
513  // is used in the source position table, hence the subtraction.
514  offset -= BytecodeArray::kHeaderSize - kHeapObjectTag;
515  elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code,
516  offset, flags);
517  }
518 
519  void AppendPromiseAllFrame(Handle<Context> context, int offset) {
520  if (full()) return;
521  int flags = FrameArray::kIsAsync | FrameArray::kIsPromiseAll;
522 
523  Handle<Context> native_context(context->native_context(), isolate_);
524  Handle<JSFunction> function(native_context->promise_all(), isolate_);
525  if (!IsVisibleInStackTrace(function)) return;
526 
527  Handle<Object> receiver(native_context->promise_function(), isolate_);
528  Handle<AbstractCode> code(AbstractCode::cast(function->code()), isolate_);
529  elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code,
530  offset, flags);
531  }
532 
533  void AppendJavaScriptFrame(
534  FrameSummary::JavaScriptFrameSummary const& summary) {
535  // Filter out internal frames that we do not want to show.
536  if (!IsVisibleInStackTrace(summary.function())) return;
537 
538  Handle<AbstractCode> abstract_code = summary.abstract_code();
539  const int offset = summary.code_offset();
540 
541  bool is_constructor = summary.is_constructor();
542  // Help CallSite::IsConstructor correctly detect hand-written
543  // construct stubs.
544  if (abstract_code->IsCode() &&
545  Code::cast(*abstract_code)->is_construct_stub()) {
546  is_constructor = true;
547  }
548 
549  int flags = 0;
550  Handle<JSFunction> function = summary.function();
551  if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
552  if (is_constructor) flags |= FrameArray::kIsConstructor;
553 
554  elements_ = FrameArray::AppendJSFrame(
555  elements_, TheHoleToUndefined(isolate_, summary.receiver()), function,
556  abstract_code, offset, flags);
557  }
558 
559  void AppendWasmCompiledFrame(
560  FrameSummary::WasmCompiledFrameSummary const& summary) {
561  if (summary.code()->kind() != wasm::WasmCode::kFunction) return;
562  Handle<WasmInstanceObject> instance = summary.wasm_instance();
563  int flags = 0;
564  if (instance->module_object()->is_asm_js()) {
565  flags |= FrameArray::kIsAsmJsWasmFrame;
566  if (summary.at_to_number_conversion()) {
567  flags |= FrameArray::kAsmJsAtNumberConversion;
568  }
569  } else {
570  flags |= FrameArray::kIsWasmFrame;
571  }
572 
573  elements_ = FrameArray::AppendWasmFrame(
574  elements_, instance, summary.function_index(), summary.code(),
575  summary.code_offset(), flags);
576  }
577 
578  void AppendWasmInterpretedFrame(
579  FrameSummary::WasmInterpretedFrameSummary const& summary) {
580  Handle<WasmInstanceObject> instance = summary.wasm_instance();
581  int flags = FrameArray::kIsWasmInterpretedFrame;
582  DCHECK(!instance->module_object()->is_asm_js());
583  elements_ = FrameArray::AppendWasmFrame(elements_, instance,
584  summary.function_index(), {},
585  summary.byte_offset(), flags);
586  }
587 
588  void AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) {
589  Handle<JSFunction> function = handle(exit_frame->function(), isolate_);
590 
591  // Filter out internal frames that we do not want to show.
592  if (!IsVisibleInStackTrace(function)) return;
593 
594  Handle<Object> receiver(exit_frame->receiver(), isolate_);
595  Handle<Code> code(exit_frame->LookupCode(), isolate_);
596  const int offset =
597  static_cast<int>(exit_frame->pc() - code->InstructionStart());
598 
599  int flags = 0;
600  if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
601  if (exit_frame->IsConstructor()) flags |= FrameArray::kIsConstructor;
602 
603  elements_ = FrameArray::AppendJSFrame(elements_, receiver, function,
604  Handle<AbstractCode>::cast(code),
605  offset, flags);
606  }
607 
608  bool full() { return elements_->FrameCount() >= limit_; }
609 
610  Handle<FrameArray> GetElements() {
611  elements_->ShrinkToFit(isolate_);
612  return elements_;
613  }
614 
615  private:
616  // Poison stack frames below the first strict mode frame.
617  // The stack trace API should not expose receivers and function
618  // objects on frames deeper than the top-most one with a strict mode
619  // function.
620  bool IsStrictFrame(Handle<JSFunction> function) {
621  if (!encountered_strict_function_) {
622  encountered_strict_function_ =
623  is_strict(function->shared()->language_mode());
624  }
625  return encountered_strict_function_;
626  }
627 
628  // Determines whether the given stack frame should be displayed in a stack
629  // trace.
630  bool IsVisibleInStackTrace(Handle<JSFunction> function) {
631  return ShouldIncludeFrame(function) && IsNotHidden(function) &&
632  IsInSameSecurityContext(function);
633  }
634 
635  // This mechanism excludes a number of uninteresting frames from the stack
636  // trace. This can be be the first frame (which will be a builtin-exit frame
637  // for the error constructor builtin) or every frame until encountering a
638  // user-specified function.
639  bool ShouldIncludeFrame(Handle<JSFunction> function) {
640  switch (mode_) {
641  case SKIP_NONE:
642  return true;
643  case SKIP_FIRST:
644  if (!skip_next_frame_) return true;
645  skip_next_frame_ = false;
646  return false;
647  case SKIP_UNTIL_SEEN:
648  if (skip_next_frame_ && (*function == *caller_)) {
649  skip_next_frame_ = false;
650  return false;
651  }
652  return !skip_next_frame_;
653  }
654  UNREACHABLE();
655  }
656 
657  bool IsNotHidden(Handle<JSFunction> function) {
658  // Functions defined not in user scripts are not visible unless directly
659  // exposed, in which case the native flag is set.
660  // The --builtins-in-stack-traces command line flag allows including
661  // internal call sites in the stack trace for debugging purposes.
662  if (!FLAG_builtins_in_stack_traces &&
663  !function->shared()->IsUserJavaScript()) {
664  return function->shared()->native();
665  }
666  return true;
667  }
668 
669  bool IsInSameSecurityContext(Handle<JSFunction> function) {
670  return isolate_->context()->HasSameSecurityTokenAs(function->context());
671  }
672 
673  // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the
674  // receiver in RegExp constructor frames.
675  Handle<Object> TheHoleToUndefined(Isolate* isolate, Handle<Object> in) {
676  return (in->IsTheHole(isolate))
677  ? Handle<Object>::cast(isolate->factory()->undefined_value())
678  : in;
679  }
680 
681  Isolate* isolate_;
682  const FrameSkipMode mode_;
683  int limit_;
684  const Handle<Object> caller_;
685  bool skip_next_frame_ = true;
686  bool encountered_strict_function_ = false;
687  Handle<FrameArray> elements_;
688 };
689 
690 bool GetStackTraceLimit(Isolate* isolate, int* result) {
691  Handle<JSObject> error = isolate->error_function();
692 
693  Handle<String> key = isolate->factory()->stackTraceLimit_string();
694  Handle<Object> stack_trace_limit = JSReceiver::GetDataProperty(error, key);
695  if (!stack_trace_limit->IsNumber()) return false;
696 
697  // Ensure that limit is not negative.
698  *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0);
699 
700  if (*result != FLAG_stack_trace_limit) {
701  isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
702  }
703 
704  return true;
705 }
706 
707 bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
708 
709 bool IsBuiltinFunction(Isolate* isolate, HeapObject* object,
710  Builtins::Name builtin_index) {
711  if (!object->IsJSFunction()) return false;
712  JSFunction* const function = JSFunction::cast(object);
713  return function->code() == isolate->builtins()->builtin(builtin_index);
714 }
715 
716 void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise,
717  FrameArrayBuilder* builder) {
718  while (!builder->full()) {
719  // Check that the {promise} is not settled.
720  if (promise->status() != Promise::kPending) return;
721 
722  // Check that we have exactly one PromiseReaction on the {promise}.
723  if (!promise->reactions()->IsPromiseReaction()) return;
724  Handle<PromiseReaction> reaction(
725  PromiseReaction::cast(promise->reactions()), isolate);
726  if (!reaction->next()->IsSmi()) return;
727 
728  // Check if the {reaction} has one of the known async function or
729  // async generator continuations as its fulfill handler.
730  if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
731  Builtins::kAsyncFunctionAwaitResolveClosure) ||
732  IsBuiltinFunction(isolate, reaction->fulfill_handler(),
733  Builtins::kAsyncGeneratorAwaitResolveClosure) ||
734  IsBuiltinFunction(isolate, reaction->fulfill_handler(),
735  Builtins::kAsyncGeneratorYieldResolveClosure)) {
736  // Now peak into the handlers' AwaitContext to get to
737  // the JSGeneratorObject for the async function.
738  Handle<Context> context(
739  JSFunction::cast(reaction->fulfill_handler())->context(), isolate);
740  Handle<JSGeneratorObject> generator_object(
741  JSGeneratorObject::cast(context->extension()), isolate);
742  CHECK(generator_object->is_suspended());
743 
744  // Append async frame corresponding to the {generator_object}.
745  builder->AppendAsyncFrame(generator_object);
746 
747  // Try to continue from here.
748  if (generator_object->IsJSAsyncFunctionObject()) {
749  Handle<JSAsyncFunctionObject> async_function_object =
750  Handle<JSAsyncFunctionObject>::cast(generator_object);
751  promise = handle(async_function_object->promise(), isolate);
752  } else {
753  Handle<JSAsyncGeneratorObject> async_generator_object =
754  Handle<JSAsyncGeneratorObject>::cast(generator_object);
755  if (async_generator_object->queue()->IsUndefined(isolate)) return;
756  Handle<AsyncGeneratorRequest> async_generator_request(
757  AsyncGeneratorRequest::cast(async_generator_object->queue()),
758  isolate);
759  promise = handle(JSPromise::cast(async_generator_request->promise()),
760  isolate);
761  }
762  } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
763  Builtins::kPromiseAllResolveElementClosure)) {
764  Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
765  isolate);
766  Handle<Context> context(function->context(), isolate);
767 
768  // We store the offset of the promise into the {function}'s
769  // hash field for promise resolve element callbacks.
770  int const offset = Smi::ToInt(Smi::cast(function->GetIdentityHash())) - 1;
771  builder->AppendPromiseAllFrame(context, offset);
772 
773  // Now peak into the Promise.all() resolve element context to
774  // find the promise capability that's being resolved when all
775  // the concurrent promises resolve.
776  int const index =
777  PromiseBuiltinsAssembler::kPromiseAllResolveElementCapabilitySlot;
778  Handle<PromiseCapability> capability(
779  PromiseCapability::cast(context->get(index)), isolate);
780  if (!capability->promise()->IsJSPromise()) return;
781  promise = handle(JSPromise::cast(capability->promise()), isolate);
782  } else {
783  // We have some generic promise chain here, so try to
784  // continue with the chained promise on the reaction
785  // (only works for native promise chains).
786  Handle<HeapObject> promise_or_capability(
787  reaction->promise_or_capability(), isolate);
788  if (promise_or_capability->IsJSPromise()) {
789  promise = Handle<JSPromise>::cast(promise_or_capability);
790  } else if (promise_or_capability->IsPromiseCapability()) {
791  Handle<PromiseCapability> capability =
792  Handle<PromiseCapability>::cast(promise_or_capability);
793  if (!capability->promise()->IsJSPromise()) return;
794  promise = handle(JSPromise::cast(capability->promise()), isolate);
795  } else {
796  // Otherwise the {promise_or_capability} must be undefined here.
797  CHECK(promise_or_capability->IsUndefined(isolate));
798  return;
799  }
800  }
801  }
802 }
803 
804 } // namespace
805 
806 Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
807  FrameSkipMode mode,
808  Handle<Object> caller) {
809  DisallowJavascriptExecution no_js(this);
810 
811  int limit;
812  if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value();
813 
814  FrameArrayBuilder builder(this, mode, limit, caller);
815 
816  // Build the regular stack trace, and remember the last relevant
817  // frame ID and inlined index (for the async stack trace handling
818  // below, which starts from this last frame).
819  for (StackFrameIterator it(this); !it.done() && !builder.full();
820  it.Advance()) {
821  StackFrame* const frame = it.frame();
822  switch (frame->type()) {
823  case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
824  case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
825  case StackFrame::OPTIMIZED:
826  case StackFrame::INTERPRETED:
827  case StackFrame::BUILTIN:
828  case StackFrame::WASM_COMPILED:
829  case StackFrame::WASM_INTERPRETER_ENTRY: {
830  // A standard frame may include many summarized frames (due to
831  // inlining).
832  std::vector<FrameSummary> frames;
833  StandardFrame::cast(frame)->Summarize(&frames);
834  for (size_t i = frames.size(); i-- != 0 && !builder.full();) {
835  const auto& summary = frames[i];
836  if (summary.IsJavaScript()) {
837  //=========================================================
838  // Handle a JavaScript frame.
839  //=========================================================
840  auto const& java_script = summary.AsJavaScript();
841  builder.AppendJavaScriptFrame(java_script);
842  } else if (summary.IsWasmCompiled()) {
843  //=========================================================
844  // Handle a WASM compiled frame.
845  //=========================================================
846  auto const& wasm_compiled = summary.AsWasmCompiled();
847  builder.AppendWasmCompiledFrame(wasm_compiled);
848  } else if (summary.IsWasmInterpreted()) {
849  //=========================================================
850  // Handle a WASM interpreted frame.
851  //=========================================================
852  auto const& wasm_interpreted = summary.AsWasmInterpreted();
853  builder.AppendWasmInterpretedFrame(wasm_interpreted);
854  }
855  }
856  break;
857  }
858 
859  case StackFrame::BUILTIN_EXIT:
860  // BuiltinExitFrames are not standard frames, so they do not have
861  // Summarize(). However, they may have one JS frame worth showing.
862  builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame));
863  break;
864 
865  default:
866  break;
867  }
868  }
869 
870  // If --async-stack-traces are enabled and the "current microtask" is a
871  // PromiseReactionJobTask, we try to enrich the stack trace with async
872  // frames.
873  if (FLAG_async_stack_traces) {
874  Handle<Object> current_microtask = factory()->current_microtask();
875  if (current_microtask->IsPromiseReactionJobTask()) {
876  Handle<PromiseReactionJobTask> promise_reaction_job_task =
877  Handle<PromiseReactionJobTask>::cast(current_microtask);
878  // Check if the {reaction} has one of the known async function or
879  // async generator continuations as its fulfill handler.
880  if (IsBuiltinFunction(this, promise_reaction_job_task->handler(),
881  Builtins::kAsyncFunctionAwaitResolveClosure) ||
882  IsBuiltinFunction(this, promise_reaction_job_task->handler(),
883  Builtins::kAsyncGeneratorAwaitResolveClosure) ||
884  IsBuiltinFunction(this, promise_reaction_job_task->handler(),
885  Builtins::kAsyncGeneratorYieldResolveClosure)) {
886  // Now peak into the handlers' AwaitContext to get to
887  // the JSGeneratorObject for the async function.
888  Handle<Context> context(
889  JSFunction::cast(promise_reaction_job_task->handler())->context(),
890  this);
891  Handle<JSGeneratorObject> generator_object(
892  JSGeneratorObject::cast(context->extension()), this);
893  if (generator_object->is_executing()) {
894  if (generator_object->IsJSAsyncFunctionObject()) {
895  Handle<JSAsyncFunctionObject> async_function_object =
896  Handle<JSAsyncFunctionObject>::cast(generator_object);
897  Handle<JSPromise> promise(async_function_object->promise(), this);
898  CaptureAsyncStackTrace(this, promise, &builder);
899  } else {
900  Handle<JSAsyncGeneratorObject> async_generator_object =
901  Handle<JSAsyncGeneratorObject>::cast(generator_object);
902  Handle<AsyncGeneratorRequest> async_generator_request(
903  AsyncGeneratorRequest::cast(async_generator_object->queue()),
904  this);
905  Handle<JSPromise> promise(
906  JSPromise::cast(async_generator_request->promise()), this);
907  CaptureAsyncStackTrace(this, promise, &builder);
908  }
909  }
910  } else {
911  // The {promise_reaction_job_task} doesn't belong to an await (or
912  // yield inside an async generator), but we might still be able to
913  // find an async frame if we follow along the chain of promises on
914  // the {promise_reaction_job_task}.
915  Handle<HeapObject> promise_or_capability(
916  promise_reaction_job_task->promise_or_capability(), this);
917  if (promise_or_capability->IsJSPromise()) {
918  Handle<JSPromise> promise =
919  Handle<JSPromise>::cast(promise_or_capability);
920  CaptureAsyncStackTrace(this, promise, &builder);
921  }
922  }
923  }
924  }
925 
926  // TODO(yangguo): Queue this structured stack trace for preprocessing on GC.
927  // TODO(3770): Drop explicit cast.
928  return factory()->NewJSArrayWithElements(
929  Handle<FixedArray>(builder.GetElements().location()));
930 }
931 
932 MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
933  Handle<JSReceiver> error_object) {
934  if (capture_stack_trace_for_uncaught_exceptions_) {
935  // Capture stack trace for a detailed exception message.
936  Handle<Name> key = factory()->detailed_stack_trace_symbol();
937  Handle<FixedArray> stack_trace = CaptureCurrentStackTrace(
938  stack_trace_for_uncaught_exceptions_frame_limit_,
939  stack_trace_for_uncaught_exceptions_options_);
940  RETURN_ON_EXCEPTION(
941  this,
942  JSReceiver::SetProperty(this, error_object, key, stack_trace,
943  LanguageMode::kStrict),
944  JSReceiver);
945  }
946  return error_object;
947 }
948 
949 MaybeHandle<JSReceiver> Isolate::CaptureAndSetSimpleStackTrace(
950  Handle<JSReceiver> error_object, FrameSkipMode mode,
951  Handle<Object> caller) {
952  // Capture stack trace for simple stack trace string formatting.
953  Handle<Name> key = factory()->stack_trace_symbol();
954  Handle<Object> stack_trace =
955  CaptureSimpleStackTrace(error_object, mode, caller);
956  RETURN_ON_EXCEPTION(
957  this,
958  JSReceiver::SetProperty(this, error_object, key, stack_trace,
959  LanguageMode::kStrict),
960  JSReceiver);
961  return error_object;
962 }
963 
964 Handle<FixedArray> Isolate::GetDetailedStackTrace(
965  Handle<JSObject> error_object) {
966  Handle<Name> key_detailed = factory()->detailed_stack_trace_symbol();
967  Handle<Object> stack_trace =
968  JSReceiver::GetDataProperty(error_object, key_detailed);
969  if (stack_trace->IsFixedArray()) return Handle<FixedArray>::cast(stack_trace);
970  return Handle<FixedArray>();
971 }
972 
973 Address Isolate::GetAbstractPC(int* line, int* column) {
974  JavaScriptFrameIterator it(this);
975 
976  if (it.done()) {
977  *line = -1;
978  *column = -1;
979  return kNullAddress;
980  }
981  JavaScriptFrame* frame = it.frame();
982  DCHECK(!frame->is_builtin());
983  int position = frame->position();
984 
985  Object* maybe_script = frame->function()->shared()->script();
986  if (maybe_script->IsScript()) {
987  Handle<Script> script(Script::cast(maybe_script), this);
988  Script::PositionInfo info;
989  Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
990  *line = info.line + 1;
991  *column = info.column + 1;
992  } else {
993  *line = position;
994  *column = -1;
995  }
996 
997  if (frame->is_interpreted()) {
998  InterpretedFrame* iframe = static_cast<InterpretedFrame*>(frame);
999  Address bytecode_start =
1000  iframe->GetBytecodeArray()->GetFirstBytecodeAddress();
1001  return bytecode_start + iframe->GetBytecodeOffset();
1002  }
1003 
1004  return frame->pc();
1005 }
1006 
1008  public:
1009  explicit CaptureStackTraceHelper(Isolate* isolate) : isolate_(isolate) {}
1010 
1011  Handle<StackFrameInfo> NewStackFrameObject(FrameSummary& summ) {
1012  if (summ.IsJavaScript()) return NewStackFrameObject(summ.AsJavaScript());
1013  if (summ.IsWasm()) return NewStackFrameObject(summ.AsWasm());
1014  UNREACHABLE();
1015  }
1016 
1017  Handle<StackFrameInfo> NewStackFrameObject(
1019  int code_offset;
1020  Handle<ByteArray> source_position_table;
1021  Handle<Object> maybe_cache;
1023  if (!FLAG_optimize_for_size) {
1024  code_offset = summ.code_offset();
1025  source_position_table =
1026  handle(summ.abstract_code()->source_position_table(), isolate_);
1027  maybe_cache = handle(summ.abstract_code()->stack_frame_cache(), isolate_);
1028  if (maybe_cache->IsSimpleNumberDictionary()) {
1029  cache = Handle<SimpleNumberDictionary>::cast(maybe_cache);
1030  } else {
1031  cache = SimpleNumberDictionary::New(isolate_, 1);
1032  }
1033  int entry = cache->FindEntry(isolate_, code_offset);
1034  if (entry != NumberDictionary::kNotFound) {
1035  Handle<StackFrameInfo> frame(
1036  StackFrameInfo::cast(cache->ValueAt(entry)), isolate_);
1037  return frame;
1038  }
1039  }
1040 
1041  Handle<StackFrameInfo> frame = factory()->NewStackFrameInfo();
1042  Handle<Script> script = Handle<Script>::cast(summ.script());
1043  Script::PositionInfo info;
1044  bool valid_pos = Script::GetPositionInfo(script, summ.SourcePosition(),
1045  &info, Script::WITH_OFFSET);
1046  if (valid_pos) {
1047  frame->set_line_number(info.line + 1);
1048  frame->set_column_number(info.column + 1);
1049  }
1050  frame->set_script_id(script->id());
1051  frame->set_script_name(script->name());
1052  frame->set_script_name_or_source_url(script->GetNameOrSourceURL());
1053  frame->set_is_eval(script->compilation_type() ==
1054  Script::COMPILATION_TYPE_EVAL);
1055  Handle<String> function_name = summ.FunctionName();
1056  frame->set_function_name(*function_name);
1057  frame->set_is_constructor(summ.is_constructor());
1058  frame->set_is_wasm(false);
1059  if (!FLAG_optimize_for_size) {
1060  auto new_cache =
1061  SimpleNumberDictionary::Set(isolate_, cache, code_offset, frame);
1062  if (*new_cache != *cache || !maybe_cache->IsNumberDictionary()) {
1063  AbstractCode::SetStackFrameCache(summ.abstract_code(), new_cache);
1064  }
1065  }
1066  frame->set_id(next_id());
1067  return frame;
1068  }
1069 
1070  Handle<StackFrameInfo> NewStackFrameObject(
1071  const FrameSummary::WasmFrameSummary& summ) {
1072  Handle<StackFrameInfo> info = factory()->NewStackFrameInfo();
1073 
1074  Handle<WasmModuleObject> module_object(
1075  summ.wasm_instance()->module_object(), isolate_);
1076  Handle<String> name = WasmModuleObject::GetFunctionName(
1077  isolate_, module_object, summ.function_index());
1078  info->set_function_name(*name);
1079  // Encode the function index as line number (1-based).
1080  info->set_line_number(summ.function_index() + 1);
1081  // Encode the byte offset as column (1-based).
1082  int position = summ.byte_offset();
1083  // Make position 1-based.
1084  if (position >= 0) ++position;
1085  info->set_column_number(position);
1086  info->set_script_id(summ.script()->id());
1087  info->set_is_wasm(true);
1088  info->set_id(next_id());
1089  return info;
1090  }
1091 
1092  private:
1093  inline Factory* factory() { return isolate_->factory(); }
1094 
1095  int next_id() const {
1096  int id = isolate_->last_stack_frame_info_id() + 1;
1097  isolate_->set_last_stack_frame_info_id(id);
1098  return id;
1099  }
1100 
1101  Isolate* isolate_;
1102 };
1103 
1104 Handle<FixedArray> Isolate::CaptureCurrentStackTrace(
1105  int frame_limit, StackTrace::StackTraceOptions options) {
1106  DisallowJavascriptExecution no_js(this);
1107  CaptureStackTraceHelper helper(this);
1108 
1109  // Ensure no negative values.
1110  int limit = Max(frame_limit, 0);
1111  Handle<FixedArray> stack_trace_elems = factory()->NewFixedArray(limit);
1112 
1113  int frames_seen = 0;
1114  for (StackTraceFrameIterator it(this); !it.done() && (frames_seen < limit);
1115  it.Advance()) {
1116  StandardFrame* frame = it.frame();
1117  // Set initial size to the maximum inlining level + 1 for the outermost
1118  // function.
1119  std::vector<FrameSummary> frames;
1120  frame->Summarize(&frames);
1121  for (size_t i = frames.size(); i != 0 && frames_seen < limit; i--) {
1122  FrameSummary& frame = frames[i - 1];
1123  if (!frame.is_subject_to_debugging()) continue;
1124  // Filter frames from other security contexts.
1125  if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
1126  !this->context()->HasSameSecurityTokenAs(*frame.native_context()))
1127  continue;
1128  Handle<StackFrameInfo> new_frame_obj = helper.NewStackFrameObject(frame);
1129  stack_trace_elems->set(frames_seen, *new_frame_obj);
1130  frames_seen++;
1131  }
1132  }
1133  return FixedArray::ShrinkOrEmpty(this, stack_trace_elems, frames_seen);
1134 }
1135 
1136 
1137 void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
1138  if (stack_trace_nesting_level_ == 0) {
1139  stack_trace_nesting_level_++;
1140  StringStream::ClearMentionedObjectCache(this);
1141  HeapStringAllocator allocator;
1142  StringStream accumulator(&allocator);
1143  incomplete_message_ = &accumulator;
1144  PrintStack(&accumulator, mode);
1145  accumulator.OutputToFile(out);
1146  InitializeLoggingAndCounters();
1147  accumulator.Log(this);
1148  incomplete_message_ = nullptr;
1149  stack_trace_nesting_level_ = 0;
1150  } else if (stack_trace_nesting_level_ == 1) {
1151  stack_trace_nesting_level_++;
1152  base::OS::PrintError(
1153  "\n\nAttempt to print stack while printing stack (double fault)\n");
1154  base::OS::PrintError(
1155  "If you are lucky you may find a partial stack dump on stdout.\n\n");
1156  incomplete_message_->OutputToFile(out);
1157  }
1158 }
1159 
1160 
1161 static void PrintFrames(Isolate* isolate,
1162  StringStream* accumulator,
1163  StackFrame::PrintMode mode) {
1164  StackFrameIterator it(isolate);
1165  for (int i = 0; !it.done(); it.Advance()) {
1166  it.frame()->Print(accumulator, mode, i++);
1167  }
1168 }
1169 
1170 void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
1171  // The MentionedObjectCache is not GC-proof at the moment.
1172  DisallowHeapAllocation no_gc;
1173  HandleScope scope(this);
1174  DCHECK(accumulator->IsMentionedObjectCacheClear(this));
1175 
1176  // Avoid printing anything if there are no frames.
1177  if (c_entry_fp(thread_local_top()) == 0) return;
1178 
1179  accumulator->Add(
1180  "\n==== JS stack trace =========================================\n\n");
1181  PrintFrames(this, accumulator, StackFrame::OVERVIEW);
1182  if (mode == kPrintStackVerbose) {
1183  accumulator->Add(
1184  "\n==== Details ================================================\n\n");
1185  PrintFrames(this, accumulator, StackFrame::DETAILS);
1186  accumulator->PrintMentionedObjectCache(this);
1187  }
1188  accumulator->Add("=====================\n\n");
1189 }
1190 
1191 
1192 void Isolate::SetFailedAccessCheckCallback(
1193  v8::FailedAccessCheckCallback callback) {
1194  thread_local_top()->failed_access_check_callback_ = callback;
1195 }
1196 
1197 
1198 void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
1199  if (!thread_local_top()->failed_access_check_callback_) {
1200  return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
1201  }
1202 
1203  DCHECK(receiver->IsAccessCheckNeeded());
1204  DCHECK(!context().is_null());
1205 
1206  // Get the data object from access check info.
1207  HandleScope scope(this);
1208  Handle<Object> data;
1209  { DisallowHeapAllocation no_gc;
1210  AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
1211  if (!access_check_info) {
1212  AllowHeapAllocation doesnt_matter_anymore;
1213  return ScheduleThrow(
1214  *factory()->NewTypeError(MessageTemplate::kNoAccess));
1215  }
1216  data = handle(access_check_info->data(), this);
1217  }
1218 
1219  // Leaving JavaScript.
1220  VMState<EXTERNAL> state(this);
1221  thread_local_top()->failed_access_check_callback_(
1222  v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
1223 }
1224 
1225 
1226 bool Isolate::MayAccess(Handle<Context> accessing_context,
1227  Handle<JSObject> receiver) {
1228  DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
1229 
1230  // Check for compatibility between the security tokens in the
1231  // current lexical context and the accessed object.
1232 
1233  // During bootstrapping, callback functions are not enabled yet.
1234  if (bootstrapper()->IsActive()) return true;
1235  {
1236  DisallowHeapAllocation no_gc;
1237 
1238  if (receiver->IsJSGlobalProxy()) {
1239  Object* receiver_context =
1240  JSGlobalProxy::cast(*receiver)->native_context();
1241  if (!receiver_context->IsContext()) return false;
1242 
1243  // Get the native context of current top context.
1244  // avoid using Isolate::native_context() because it uses Handle.
1245  Context native_context =
1246  accessing_context->global_object()->native_context();
1247  if (receiver_context == native_context) return true;
1248 
1249  if (Context::cast(receiver_context)->security_token() ==
1250  native_context->security_token())
1251  return true;
1252  }
1253  }
1254 
1255  HandleScope scope(this);
1256  Handle<Object> data;
1257  v8::AccessCheckCallback callback = nullptr;
1258  { DisallowHeapAllocation no_gc;
1259  AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
1260  if (!access_check_info) return false;
1261  Object* fun_obj = access_check_info->callback();
1262  callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
1263  data = handle(access_check_info->data(), this);
1264  }
1265 
1266  LOG(this, ApiSecurityCheck());
1267 
1268  {
1269  // Leaving JavaScript.
1270  VMState<EXTERNAL> state(this);
1271  return callback(v8::Utils::ToLocal(accessing_context),
1272  v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
1273  }
1274 }
1275 
1276 
1277 Object* Isolate::StackOverflow() {
1278  if (FLAG_abort_on_stack_or_string_length_overflow) {
1279  FATAL("Aborting on stack overflow");
1280  }
1281 
1282  DisallowJavascriptExecution no_js(this);
1283  HandleScope scope(this);
1284 
1285  Handle<JSFunction> fun = range_error_function();
1286  Handle<Object> msg = factory()->NewStringFromAsciiChecked(
1287  MessageFormatter::TemplateString(MessageTemplate::kStackOverflow));
1288  Handle<Object> no_caller;
1289  Handle<Object> exception;
1290  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1291  this, exception,
1292  ErrorUtils::Construct(this, fun, fun, msg, SKIP_NONE, no_caller, true));
1293 
1294  Throw(*exception, nullptr);
1295 
1296 #ifdef VERIFY_HEAP
1297  if (FLAG_verify_heap && FLAG_stress_compaction) {
1298  heap()->CollectAllGarbage(Heap::kNoGCFlags,
1299  GarbageCollectionReason::kTesting);
1300  }
1301 #endif // VERIFY_HEAP
1302 
1303  return ReadOnlyRoots(heap()).exception();
1304 }
1305 
1306 
1307 Object* Isolate::TerminateExecution() {
1308  return Throw(ReadOnlyRoots(this).termination_exception(), nullptr);
1309 }
1310 
1311 
1312 void Isolate::CancelTerminateExecution() {
1313  if (try_catch_handler()) {
1314  try_catch_handler()->has_terminated_ = false;
1315  }
1316  if (has_pending_exception() &&
1317  pending_exception() == ReadOnlyRoots(this).termination_exception()) {
1318  thread_local_top()->external_caught_exception_ = false;
1319  clear_pending_exception();
1320  }
1321  if (has_scheduled_exception() &&
1322  scheduled_exception() == ReadOnlyRoots(this).termination_exception()) {
1323  thread_local_top()->external_caught_exception_ = false;
1324  clear_scheduled_exception();
1325  }
1326 }
1327 
1328 
1329 void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
1330  ExecutionAccess access(this);
1331  api_interrupts_queue_.push(InterruptEntry(callback, data));
1332  stack_guard()->RequestApiInterrupt();
1333 }
1334 
1335 
1336 void Isolate::InvokeApiInterruptCallbacks() {
1337  RuntimeCallTimerScope runtimeTimer(
1338  this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1339  // Note: callback below should be called outside of execution access lock.
1340  while (true) {
1341  InterruptEntry entry;
1342  {
1343  ExecutionAccess access(this);
1344  if (api_interrupts_queue_.empty()) return;
1345  entry = api_interrupts_queue_.front();
1346  api_interrupts_queue_.pop();
1347  }
1348  VMState<EXTERNAL> state(this);
1349  HandleScope handle_scope(this);
1350  entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1351  }
1352 }
1353 
1354 
1355 void ReportBootstrappingException(Handle<Object> exception,
1356  MessageLocation* location) {
1357  base::OS::PrintError("Exception thrown during bootstrapping\n");
1358  if (location == nullptr || location->script().is_null()) return;
1359  // We are bootstrapping and caught an error where the location is set
1360  // and we have a script for the location.
1361  // In this case we could have an extension (or an internal error
1362  // somewhere) and we print out the line number at which the error occurred
1363  // to the console for easier debugging.
1364  int line_number =
1365  location->script()->GetLineNumber(location->start_pos()) + 1;
1366  if (exception->IsString() && location->script()->name()->IsString()) {
1367  base::OS::PrintError(
1368  "Extension or internal compilation error: %s in %s at line %d.\n",
1369  String::cast(*exception)->ToCString().get(),
1370  String::cast(location->script()->name())->ToCString().get(),
1371  line_number);
1372  } else if (location->script()->name()->IsString()) {
1373  base::OS::PrintError(
1374  "Extension or internal compilation error in %s at line %d.\n",
1375  String::cast(location->script()->name())->ToCString().get(),
1376  line_number);
1377  } else if (exception->IsString()) {
1378  base::OS::PrintError("Extension or internal compilation error: %s.\n",
1379  String::cast(*exception)->ToCString().get());
1380  } else {
1381  base::OS::PrintError("Extension or internal compilation error.\n");
1382  }
1383 #ifdef OBJECT_PRINT
1384  // Since comments and empty lines have been stripped from the source of
1385  // builtins, print the actual source here so that line numbers match.
1386  if (location->script()->source()->IsString()) {
1387  Handle<String> src(String::cast(location->script()->source()),
1388  location->script()->GetIsolate());
1389  PrintF("Failing script:");
1390  int len = src->length();
1391  if (len == 0) {
1392  PrintF(" <not available>\n");
1393  } else {
1394  PrintF("\n");
1395  int line_number = 1;
1396  PrintF("%5d: ", line_number);
1397  for (int i = 0; i < len; i++) {
1398  uint16_t character = src->Get(i);
1399  PrintF("%c", character);
1400  if (character == '\n' && i < len - 2) {
1401  PrintF("%5d: ", ++line_number);
1402  }
1403  }
1404  PrintF("\n");
1405  }
1406  }
1407 #endif
1408 }
1409 
1410 Object* Isolate::Throw(Object* raw_exception, MessageLocation* location) {
1411  DCHECK(!has_pending_exception());
1412 
1413  HandleScope scope(this);
1414  Handle<Object> exception(raw_exception, this);
1415 
1416  if (FLAG_print_all_exceptions) {
1417  printf("=========================================================\n");
1418  printf("Exception thrown:\n");
1419  if (location) {
1420  Handle<Script> script = location->script();
1421  Handle<Object> name(script->GetNameOrSourceURL(), this);
1422  printf("at ");
1423  if (name->IsString() && String::cast(*name)->length() > 0)
1424  String::cast(*name)->PrintOn(stdout);
1425  else
1426  printf("<anonymous>");
1427 // Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
1428 // initialize the line_ends array, so be careful when calling them.
1429 #ifdef DEBUG
1430  if (AllowHeapAllocation::IsAllowed()) {
1431 #else
1432  if ((false)) {
1433 #endif
1434  printf(", %d:%d - %d:%d\n",
1435  Script::GetLineNumber(script, location->start_pos()) + 1,
1436  Script::GetColumnNumber(script, location->start_pos()),
1437  Script::GetLineNumber(script, location->end_pos()) + 1,
1438  Script::GetColumnNumber(script, location->end_pos()));
1439  // Make sure to update the raw exception pointer in case it moved.
1440  raw_exception = *exception;
1441  } else {
1442  printf(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
1443  }
1444  }
1445  raw_exception->Print();
1446  printf("Stack Trace:\n");
1447  PrintStack(stdout);
1448  printf("=========================================================\n");
1449  }
1450 
1451  // Determine whether a message needs to be created for the given exception
1452  // depending on the following criteria:
1453  // 1) External v8::TryCatch missing: Always create a message because any
1454  // JavaScript handler for a finally-block might re-throw to top-level.
1455  // 2) External v8::TryCatch exists: Only create a message if the handler
1456  // captures messages or is verbose (which reports despite the catch).
1457  // 3) ReThrow from v8::TryCatch: The message from a previous throw still
1458  // exists and we preserve it instead of creating a new message.
1459  bool requires_message = try_catch_handler() == nullptr ||
1460  try_catch_handler()->is_verbose_ ||
1461  try_catch_handler()->capture_message_;
1462  bool rethrowing_message = thread_local_top()->rethrowing_message_;
1463 
1464  thread_local_top()->rethrowing_message_ = false;
1465 
1466  // Notify debugger of exception.
1467  if (is_catchable_by_javascript(raw_exception)) {
1468  debug()->OnThrow(exception);
1469  }
1470 
1471  // Generate the message if required.
1472  if (requires_message && !rethrowing_message) {
1473  MessageLocation computed_location;
1474  // If no location was specified we try to use a computed one instead.
1475  if (location == nullptr && ComputeLocation(&computed_location)) {
1476  location = &computed_location;
1477  }
1478 
1479  if (bootstrapper()->IsActive()) {
1480  // It's not safe to try to make message objects or collect stack traces
1481  // while the bootstrapper is active since the infrastructure may not have
1482  // been properly initialized.
1483  ReportBootstrappingException(exception, location);
1484  } else {
1485  Handle<Object> message_obj = CreateMessage(exception, location);
1486  thread_local_top()->pending_message_obj_ = *message_obj;
1487 
1488  // For any exception not caught by JavaScript, even when an external
1489  // handler is present:
1490  // If the abort-on-uncaught-exception flag is specified, and if the
1491  // embedder didn't specify a custom uncaught exception callback,
1492  // or if the custom callback determined that V8 should abort, then
1493  // abort.
1494  if (FLAG_abort_on_uncaught_exception) {
1495  CatchType prediction = PredictExceptionCatcher();
1496  if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
1497  (!abort_on_uncaught_exception_callback_ ||
1498  abort_on_uncaught_exception_callback_(
1499  reinterpret_cast<v8::Isolate*>(this)))) {
1500  // Prevent endless recursion.
1501  FLAG_abort_on_uncaught_exception = false;
1502  // This flag is intended for use by JavaScript developers, so
1503  // print a user-friendly stack trace (not an internal one).
1504  PrintF(stderr, "%s\n\nFROM\n",
1505  MessageHandler::GetLocalizedMessage(this, message_obj).get());
1506  PrintCurrentStackTrace(stderr);
1507  base::OS::Abort();
1508  }
1509  }
1510  }
1511  }
1512 
1513  // Set the exception being thrown.
1514  set_pending_exception(*exception);
1515  return ReadOnlyRoots(heap()).exception();
1516 }
1517 
1518 
1519 Object* Isolate::ReThrow(Object* exception) {
1520  DCHECK(!has_pending_exception());
1521 
1522  // Set the exception being re-thrown.
1523  set_pending_exception(exception);
1524  return ReadOnlyRoots(heap()).exception();
1525 }
1526 
1527 
1528 Object* Isolate::UnwindAndFindHandler() {
1529  Object* exception = pending_exception();
1530 
1531  auto FoundHandler = [&](Context context, Address instruction_start,
1532  intptr_t handler_offset,
1533  Address constant_pool_address, Address handler_sp,
1534  Address handler_fp) {
1535  // Store information to be consumed by the CEntry.
1536  thread_local_top()->pending_handler_context_ = context;
1537  thread_local_top()->pending_handler_entrypoint_ =
1538  instruction_start + handler_offset;
1539  thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
1540  thread_local_top()->pending_handler_fp_ = handler_fp;
1541  thread_local_top()->pending_handler_sp_ = handler_sp;
1542 
1543  // Return and clear pending exception.
1544  clear_pending_exception();
1545  return exception;
1546  };
1547 
1548  // Special handling of termination exceptions, uncatchable by JavaScript and
1549  // Wasm code, we unwind the handlers until the top ENTRY handler is found.
1550  bool catchable_by_js = is_catchable_by_javascript(exception);
1551 
1552  // Compute handler and stack unwinding information by performing a full walk
1553  // over the stack and dispatching according to the frame type.
1554  for (StackFrameIterator iter(this);; iter.Advance()) {
1555  // Handler must exist.
1556  DCHECK(!iter.done());
1557 
1558  StackFrame* frame = iter.frame();
1559 
1560  switch (frame->type()) {
1561  case StackFrame::ENTRY:
1562  case StackFrame::CONSTRUCT_ENTRY: {
1563  // For JSEntryStub frames we always have a handler.
1564  StackHandler* handler = frame->top_handler();
1565 
1566  // Restore the next handler.
1567  thread_local_top()->handler_ = handler->next()->address();
1568 
1569  // Gather information from the handler.
1570  Code code = frame->LookupCode();
1571  HandlerTable table(code);
1572  return FoundHandler(Context(), code->InstructionStart(),
1573  table.LookupReturn(0), code->constant_pool(),
1574  handler->address() + StackHandlerConstants::kSize,
1575  0);
1576  }
1577 
1578  case StackFrame::WASM_COMPILED: {
1579  if (trap_handler::IsThreadInWasm()) {
1580  trap_handler::ClearThreadInWasm();
1581  }
1582 
1583  // For WebAssembly frames we perform a lookup in the handler table.
1584  if (!catchable_by_js) break;
1585  WasmCompiledFrame* wasm_frame = static_cast<WasmCompiledFrame*>(frame);
1586  int stack_slots = 0; // Will contain stack slot count of frame.
1587  int offset = wasm_frame->LookupExceptionHandlerInTable(&stack_slots);
1588  if (offset < 0) break;
1589  // Compute the stack pointer from the frame pointer. This ensures that
1590  // argument slots on the stack are dropped as returning would.
1591  Address return_sp = frame->fp() +
1592  StandardFrameConstants::kFixedFrameSizeAboveFp -
1593  stack_slots * kPointerSize;
1594 
1595  // This is going to be handled by Wasm, so we need to set the TLS flag
1596  // again. It was cleared above assuming the frame would be unwound.
1597  trap_handler::SetThreadInWasm();
1598 
1599  // Gather information from the frame.
1600  wasm::WasmCode* wasm_code =
1601  wasm_engine()->code_manager()->LookupCode(frame->pc());
1602  return FoundHandler(Context(), wasm_code->instruction_start(), offset,
1603  wasm_code->constant_pool(), return_sp, frame->fp());
1604  }
1605 
1606  case StackFrame::OPTIMIZED: {
1607  // For optimized frames we perform a lookup in the handler table.
1608  if (!catchable_by_js) break;
1609  OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
1610  int stack_slots = 0; // Will contain stack slot count of frame.
1611  int offset =
1612  js_frame->LookupExceptionHandlerInTable(&stack_slots, nullptr);
1613  if (offset < 0) break;
1614  // Compute the stack pointer from the frame pointer. This ensures
1615  // that argument slots on the stack are dropped as returning would.
1616  Address return_sp = frame->fp() +
1617  StandardFrameConstants::kFixedFrameSizeAboveFp -
1618  stack_slots * kPointerSize;
1619 
1620  // Gather information from the frame.
1621  Code code = frame->LookupCode();
1622 
1623  // TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
1624  // but do not have a code kind of OPTIMIZED_FUNCTION.
1625  if (code->kind() == Code::OPTIMIZED_FUNCTION &&
1626  code->marked_for_deoptimization()) {
1627  // If the target code is lazy deoptimized, we jump to the original
1628  // return address, but we make a note that we are throwing, so
1629  // that the deoptimizer can do the right thing.
1630  offset = static_cast<int>(frame->pc() - code->entry());
1631  set_deoptimizer_lazy_throw(true);
1632  }
1633 
1634  return FoundHandler(Context(), code->InstructionStart(), offset,
1635  code->constant_pool(), return_sp, frame->fp());
1636  }
1637 
1638  case StackFrame::STUB: {
1639  // Some stubs are able to handle exceptions.
1640  if (!catchable_by_js) break;
1641  StubFrame* stub_frame = static_cast<StubFrame*>(frame);
1642  Code code = stub_frame->LookupCode();
1643  if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1644  !code->handler_table_offset() || !code->is_turbofanned()) {
1645  break;
1646  }
1647 
1648  int stack_slots = 0; // Will contain stack slot count of frame.
1649  int offset = stub_frame->LookupExceptionHandlerInTable(&stack_slots);
1650  if (offset < 0) break;
1651 
1652  // Compute the stack pointer from the frame pointer. This ensures
1653  // that argument slots on the stack are dropped as returning would.
1654  Address return_sp = frame->fp() +
1655  StandardFrameConstants::kFixedFrameSizeAboveFp -
1656  stack_slots * kPointerSize;
1657 
1658  return FoundHandler(Context(), code->InstructionStart(), offset,
1659  code->constant_pool(), return_sp, frame->fp());
1660  }
1661 
1662  case StackFrame::INTERPRETED: {
1663  // For interpreted frame we perform a range lookup in the handler table.
1664  if (!catchable_by_js) break;
1665  InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
1666  int register_slots = InterpreterFrameConstants::RegisterStackSlotCount(
1667  js_frame->GetBytecodeArray()->register_count());
1668  int context_reg = 0; // Will contain register index holding context.
1669  int offset =
1670  js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
1671  if (offset < 0) break;
1672  // Compute the stack pointer from the frame pointer. This ensures that
1673  // argument slots on the stack are dropped as returning would.
1674  // Note: This is only needed for interpreted frames that have been
1675  // materialized by the deoptimizer. If there is a handler frame
1676  // in between then {frame->sp()} would already be correct.
1677  Address return_sp = frame->fp() -
1678  InterpreterFrameConstants::kFixedFrameSizeFromFp -
1679  register_slots * kPointerSize;
1680 
1681  // Patch the bytecode offset in the interpreted frame to reflect the
1682  // position of the exception handler. The special builtin below will
1683  // take care of continuing to dispatch at that position. Also restore
1684  // the correct context for the handler from the interpreter register.
1685  Context context =
1686  Context::cast(js_frame->ReadInterpreterRegister(context_reg));
1687  js_frame->PatchBytecodeOffset(static_cast<int>(offset));
1688 
1689  Code code =
1690  builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
1691  return FoundHandler(context, code->InstructionStart(), 0,
1692  code->constant_pool(), return_sp, frame->fp());
1693  }
1694 
1695  case StackFrame::BUILTIN:
1696  // For builtin frames we are guaranteed not to find a handler.
1697  if (catchable_by_js) {
1698  CHECK_EQ(-1,
1699  JavaScriptFrame::cast(frame)->LookupExceptionHandlerInTable(
1700  nullptr, nullptr));
1701  }
1702  break;
1703 
1704  case StackFrame::WASM_INTERPRETER_ENTRY: {
1705  if (trap_handler::IsThreadInWasm()) {
1706  trap_handler::ClearThreadInWasm();
1707  }
1708  WasmInterpreterEntryFrame* interpreter_frame =
1709  WasmInterpreterEntryFrame::cast(frame);
1710  // TODO(wasm): Implement try-catch in the interpreter.
1711  interpreter_frame->debug_info()->Unwind(frame->fp());
1712  } break;
1713 
1714  case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1715  // Builtin continuation frames with catch can handle exceptions.
1716  if (!catchable_by_js) break;
1717  JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
1718  JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
1719  js_frame->SetException(exception);
1720 
1721  // Reconstruct the stack pointer from the frame pointer.
1722  Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
1723  Code code = js_frame->LookupCode();
1724  return FoundHandler(Context(), code->InstructionStart(), 0,
1725  code->constant_pool(), return_sp, frame->fp());
1726  } break;
1727 
1728  default:
1729  // All other types can not handle exception.
1730  break;
1731  }
1732 
1733  if (frame->is_optimized()) {
1734  // Remove per-frame stored materialized objects.
1735  bool removed = materialized_object_store_->Remove(frame->fp());
1736  USE(removed);
1737  // If there were any materialized objects, the code should be
1738  // marked for deopt.
1739  DCHECK_IMPLIES(removed, frame->LookupCode()->marked_for_deoptimization());
1740  }
1741  }
1742 
1743  UNREACHABLE();
1744 }
1745 
1746 namespace {
1747 HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
1748  HandlerTable::CatchPrediction prediction;
1749  if (frame->is_optimized()) {
1750  if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
1751  // This optimized frame will catch. It's handler table does not include
1752  // exception prediction, and we need to use the corresponding handler
1753  // tables on the unoptimized code objects.
1754  std::vector<FrameSummary> summaries;
1755  frame->Summarize(&summaries);
1756  for (size_t i = summaries.size(); i != 0; i--) {
1757  const FrameSummary& summary = summaries[i - 1];
1758  Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
1759  if (code->IsCode() && code->kind() == AbstractCode::BUILTIN) {
1760  prediction = code->GetCode()->GetBuiltinCatchPrediction();
1761  if (prediction == HandlerTable::UNCAUGHT) continue;
1762  return prediction;
1763  }
1764 
1765  // Must have been constructed from a bytecode array.
1766  CHECK_EQ(AbstractCode::INTERPRETED_FUNCTION, code->kind());
1767  int code_offset = summary.code_offset();
1768  HandlerTable table(code->GetBytecodeArray());
1769  int index = table.LookupRange(code_offset, nullptr, &prediction);
1770  if (index <= 0) continue;
1771  if (prediction == HandlerTable::UNCAUGHT) continue;
1772  return prediction;
1773  }
1774  }
1775  } else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
1776  return prediction;
1777  }
1778  return HandlerTable::UNCAUGHT;
1779 }
1780 
1781 Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
1782  switch (prediction) {
1783  case HandlerTable::UNCAUGHT:
1784  return Isolate::NOT_CAUGHT;
1785  case HandlerTable::CAUGHT:
1786  return Isolate::CAUGHT_BY_JAVASCRIPT;
1787  case HandlerTable::PROMISE:
1788  return Isolate::CAUGHT_BY_PROMISE;
1789  case HandlerTable::DESUGARING:
1790  return Isolate::CAUGHT_BY_DESUGARING;
1791  case HandlerTable::ASYNC_AWAIT:
1792  return Isolate::CAUGHT_BY_ASYNC_AWAIT;
1793  default:
1794  UNREACHABLE();
1795  }
1796 }
1797 } // anonymous namespace
1798 
1799 Isolate::CatchType Isolate::PredictExceptionCatcher() {
1800  Address external_handler = thread_local_top()->try_catch_handler_address();
1801  if (IsExternalHandlerOnTop(nullptr)) return CAUGHT_BY_EXTERNAL;
1802 
1803  // Search for an exception handler by performing a full walk over the stack.
1804  for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
1805  StackFrame* frame = iter.frame();
1806 
1807  switch (frame->type()) {
1808  case StackFrame::ENTRY:
1809  case StackFrame::CONSTRUCT_ENTRY: {
1810  Address entry_handler = frame->top_handler()->next()->address();
1811  // The exception has been externally caught if and only if there is an
1812  // external handler which is on top of the top-most JS_ENTRY handler.
1813  if (external_handler != kNullAddress &&
1814  !try_catch_handler()->is_verbose_) {
1815  if (entry_handler == kNullAddress ||
1816  entry_handler > external_handler) {
1817  return CAUGHT_BY_EXTERNAL;
1818  }
1819  }
1820  } break;
1821 
1822  // For JavaScript frames we perform a lookup in the handler table.
1823  case StackFrame::OPTIMIZED:
1824  case StackFrame::INTERPRETED:
1825  case StackFrame::BUILTIN: {
1826  JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
1827  Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
1828  if (prediction == NOT_CAUGHT) break;
1829  return prediction;
1830  } break;
1831 
1832  case StackFrame::STUB: {
1833  Handle<Code> code(frame->LookupCode(), this);
1834  if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1835  !code->handler_table_offset() || !code->is_turbofanned()) {
1836  break;
1837  }
1838 
1839  CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
1840  if (prediction != NOT_CAUGHT) return prediction;
1841  } break;
1842 
1843  case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1844  Handle<Code> code(frame->LookupCode(), this);
1845  CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
1846  if (prediction != NOT_CAUGHT) return prediction;
1847  } break;
1848 
1849  default:
1850  // All other types can not handle exception.
1851  break;
1852  }
1853  }
1854 
1855  // Handler not found.
1856  return NOT_CAUGHT;
1857 }
1858 
1859 Object* Isolate::ThrowIllegalOperation() {
1860  if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
1861  return Throw(ReadOnlyRoots(heap()).illegal_access_string());
1862 }
1863 
1864 
1865 void Isolate::ScheduleThrow(Object* exception) {
1866  // When scheduling a throw we first throw the exception to get the
1867  // error reporting if it is uncaught before rescheduling it.
1868  Throw(exception);
1869  PropagatePendingExceptionToExternalTryCatch();
1870  if (has_pending_exception()) {
1871  thread_local_top()->scheduled_exception_ = pending_exception();
1872  thread_local_top()->external_caught_exception_ = false;
1873  clear_pending_exception();
1874  }
1875 }
1876 
1877 
1878 void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
1879  DCHECK(handler == try_catch_handler());
1880  DCHECK(handler->HasCaught());
1881  DCHECK(handler->rethrow_);
1882  DCHECK(handler->capture_message_);
1883  Object* message = reinterpret_cast<Object*>(handler->message_obj_);
1884  DCHECK(message->IsJSMessageObject() || message->IsTheHole(this));
1885  thread_local_top()->pending_message_obj_ = message;
1886 }
1887 
1888 
1889 void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
1890  DCHECK(has_scheduled_exception());
1891  if (scheduled_exception() == handler->exception_) {
1892  DCHECK(scheduled_exception() !=
1893  ReadOnlyRoots(heap()).termination_exception());
1894  clear_scheduled_exception();
1895  }
1896  if (thread_local_top_.pending_message_obj_ == handler->message_obj_) {
1897  clear_pending_message();
1898  }
1899 }
1900 
1901 
1902 Object* Isolate::PromoteScheduledException() {
1903  Object* thrown = scheduled_exception();
1904  clear_scheduled_exception();
1905  // Re-throw the exception to avoid getting repeated error reporting.
1906  return ReThrow(thrown);
1907 }
1908 
1909 
1910 void Isolate::PrintCurrentStackTrace(FILE* out) {
1911  for (StackTraceFrameIterator it(this); !it.done(); it.Advance()) {
1912  if (!it.is_javascript()) continue;
1913 
1914  HandleScope scope(this);
1915  JavaScriptFrame* frame = it.javascript_frame();
1916 
1917  Handle<Object> receiver(frame->receiver(), this);
1918  Handle<JSFunction> function(frame->function(), this);
1919  Handle<AbstractCode> code;
1920  int offset;
1921  if (frame->is_interpreted()) {
1922  InterpretedFrame* interpreted_frame = InterpretedFrame::cast(frame);
1923  code = handle(AbstractCode::cast(interpreted_frame->GetBytecodeArray()),
1924  this);
1925  offset = interpreted_frame->GetBytecodeOffset();
1926  } else {
1927  code = handle(AbstractCode::cast(frame->LookupCode()), this);
1928  offset = static_cast<int>(frame->pc() - code->InstructionStart());
1929  }
1930 
1931  JSStackFrame site(this, receiver, function, code, offset);
1932  Handle<String> line = site.ToString().ToHandleChecked();
1933  if (line->length() > 0) {
1934  line->PrintOn(out);
1935  PrintF(out, "\n");
1936  }
1937  }
1938 }
1939 
1940 bool Isolate::ComputeLocation(MessageLocation* target) {
1941  StackTraceFrameIterator it(this);
1942  if (it.done()) return false;
1943  StandardFrame* frame = it.frame();
1944  // Compute the location from the function and the relocation info of the
1945  // baseline code. For optimized code this will use the deoptimization
1946  // information to get canonical location information.
1947  std::vector<FrameSummary> frames;
1948  frame->Summarize(&frames);
1949  FrameSummary& summary = frames.back();
1950  int pos = summary.SourcePosition();
1951  Handle<SharedFunctionInfo> shared;
1952  Handle<Object> script = summary.script();
1953  if (!script->IsScript() ||
1954  (Script::cast(*script)->source()->IsUndefined(this))) {
1955  return false;
1956  }
1957 
1958  if (summary.IsJavaScript()) {
1959  shared = handle(summary.AsJavaScript().function()->shared(), this);
1960  }
1961  *target = MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
1962  return true;
1963 }
1964 
1965 bool Isolate::ComputeLocationFromException(MessageLocation* target,
1966  Handle<Object> exception) {
1967  if (!exception->IsJSObject()) return false;
1968 
1969  Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
1970  Handle<Object> start_pos = JSReceiver::GetDataProperty(
1971  Handle<JSObject>::cast(exception), start_pos_symbol);
1972  if (!start_pos->IsSmi()) return false;
1973  int start_pos_value = Handle<Smi>::cast(start_pos)->value();
1974 
1975  Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
1976  Handle<Object> end_pos = JSReceiver::GetDataProperty(
1977  Handle<JSObject>::cast(exception), end_pos_symbol);
1978  if (!end_pos->IsSmi()) return false;
1979  int end_pos_value = Handle<Smi>::cast(end_pos)->value();
1980 
1981  Handle<Name> script_symbol = factory()->error_script_symbol();
1982  Handle<Object> script = JSReceiver::GetDataProperty(
1983  Handle<JSObject>::cast(exception), script_symbol);
1984  if (!script->IsScript()) return false;
1985 
1986  Handle<Script> cast_script(Script::cast(*script), this);
1987  *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
1988  return true;
1989 }
1990 
1991 
1992 bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
1993  Handle<Object> exception) {
1994  if (!exception->IsJSObject()) return false;
1995  Handle<Name> key = factory()->stack_trace_symbol();
1996  Handle<Object> property =
1997  JSReceiver::GetDataProperty(Handle<JSObject>::cast(exception), key);
1998  if (!property->IsJSArray()) return false;
1999  Handle<JSArray> simple_stack_trace = Handle<JSArray>::cast(property);
2000 
2001  Handle<FrameArray> elements(FrameArray::cast(simple_stack_trace->elements()),
2002  this);
2003 
2004  const int frame_count = elements->FrameCount();
2005  for (int i = 0; i < frame_count; i++) {
2006  if (elements->IsWasmFrame(i) || elements->IsAsmJsWasmFrame(i)) {
2007  Handle<WasmInstanceObject> instance(elements->WasmInstance(i), this);
2008  uint32_t func_index =
2009  static_cast<uint32_t>(elements->WasmFunctionIndex(i)->value());
2010  wasm::WasmCode* wasm_code = reinterpret_cast<wasm::WasmCode*>(
2011  elements->WasmCodeObject(i)->foreign_address());
2012  int code_offset = elements->Offset(i)->value();
2013  bool is_at_number_conversion =
2014  elements->IsAsmJsWasmFrame(i) &&
2015  elements->Flags(i)->value() & FrameArray::kAsmJsAtNumberConversion;
2016  int byte_offset =
2017  FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
2018  wasm_code, code_offset);
2019  int pos = WasmModuleObject::GetSourcePosition(
2020  handle(instance->module_object(), this), func_index, byte_offset,
2021  is_at_number_conversion);
2022  Handle<Script> script(instance->module_object()->script(), this);
2023 
2024  *target = MessageLocation(script, pos, pos + 1);
2025  return true;
2026  }
2027 
2028  Handle<JSFunction> fun = handle(elements->Function(i), this);
2029  if (!fun->shared()->IsSubjectToDebugging()) continue;
2030 
2031  Object* script = fun->shared()->script();
2032  if (script->IsScript() &&
2033  !(Script::cast(script)->source()->IsUndefined(this))) {
2034  AbstractCode abstract_code = elements->Code(i);
2035  const int code_offset = elements->Offset(i)->value();
2036  const int pos = abstract_code->SourcePosition(code_offset);
2037 
2038  Handle<Script> casted_script(Script::cast(script), this);
2039  *target = MessageLocation(casted_script, pos, pos + 1);
2040  return true;
2041  }
2042  }
2043  return false;
2044 }
2045 
2046 
2047 Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
2048  MessageLocation* location) {
2049  Handle<FixedArray> stack_trace_object;
2050  if (capture_stack_trace_for_uncaught_exceptions_) {
2051  if (exception->IsJSError()) {
2052  // We fetch the stack trace that corresponds to this error object.
2053  // If the lookup fails, the exception is probably not a valid Error
2054  // object. In that case, we fall through and capture the stack trace
2055  // at this throw site.
2056  stack_trace_object =
2057  GetDetailedStackTrace(Handle<JSObject>::cast(exception));
2058  }
2059  if (stack_trace_object.is_null()) {
2060  // Not an error object, we capture stack and location at throw site.
2061  stack_trace_object = CaptureCurrentStackTrace(
2062  stack_trace_for_uncaught_exceptions_frame_limit_,
2063  stack_trace_for_uncaught_exceptions_options_);
2064  }
2065  }
2066  MessageLocation computed_location;
2067  if (location == nullptr &&
2068  (ComputeLocationFromException(&computed_location, exception) ||
2069  ComputeLocationFromStackTrace(&computed_location, exception) ||
2070  ComputeLocation(&computed_location))) {
2071  location = &computed_location;
2072  }
2073 
2074  return MessageHandler::MakeMessageObject(
2075  this, MessageTemplate::kUncaughtException, location, exception,
2076  stack_trace_object);
2077 }
2078 
2079 
2080 bool Isolate::IsJavaScriptHandlerOnTop(Object* exception) {
2081  DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2082 
2083  // For uncatchable exceptions, the JavaScript handler cannot be on top.
2084  if (!is_catchable_by_javascript(exception)) return false;
2085 
2086  // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
2087  Address entry_handler = Isolate::handler(thread_local_top());
2088  if (entry_handler == kNullAddress) return false;
2089 
2090  // Get the address of the external handler so we can compare the address to
2091  // determine which one is closer to the top of the stack.
2092  Address external_handler = thread_local_top()->try_catch_handler_address();
2093  if (external_handler == kNullAddress) return true;
2094 
2095  // The exception has been externally caught if and only if there is an
2096  // external handler which is on top of the top-most JS_ENTRY handler.
2097  //
2098  // Note, that finally clauses would re-throw an exception unless it's aborted
2099  // by jumps in control flow (like return, break, etc.) and we'll have another
2100  // chance to set proper v8::TryCatch later.
2101  return (entry_handler < external_handler);
2102 }
2103 
2104 
2105 bool Isolate::IsExternalHandlerOnTop(Object* exception) {
2106  DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2107 
2108  // Get the address of the external handler so we can compare the address to
2109  // determine which one is closer to the top of the stack.
2110  Address external_handler = thread_local_top()->try_catch_handler_address();
2111  if (external_handler == kNullAddress) return false;
2112 
2113  // For uncatchable exceptions, the external handler is always on top.
2114  if (!is_catchable_by_javascript(exception)) return true;
2115 
2116  // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
2117  Address entry_handler = Isolate::handler(thread_local_top());
2118  if (entry_handler == kNullAddress) return true;
2119 
2120  // The exception has been externally caught if and only if there is an
2121  // external handler which is on top of the top-most JS_ENTRY handler.
2122  //
2123  // Note, that finally clauses would re-throw an exception unless it's aborted
2124  // by jumps in control flow (like return, break, etc.) and we'll have another
2125  // chance to set proper v8::TryCatch later.
2126  return (entry_handler > external_handler);
2127 }
2128 
2129 void Isolate::ReportPendingMessagesImpl(bool report_externally) {
2130  Object* exception = pending_exception();
2131 
2132  // Clear the pending message object early to avoid endless recursion.
2133  Object* message_obj = thread_local_top_.pending_message_obj_;
2134  clear_pending_message();
2135 
2136  // For uncatchable exceptions we do nothing. If needed, the exception and the
2137  // message have already been propagated to v8::TryCatch.
2138  if (!is_catchable_by_javascript(exception)) return;
2139 
2140  // Determine whether the message needs to be reported to all message handlers
2141  // depending on whether and external v8::TryCatch or an internal JavaScript
2142  // handler is on top.
2143  bool should_report_exception;
2144  if (report_externally) {
2145  // Only report the exception if the external handler is verbose.
2146  should_report_exception = try_catch_handler()->is_verbose_;
2147  } else {
2148  // Report the exception if it isn't caught by JavaScript code.
2149  should_report_exception = !IsJavaScriptHandlerOnTop(exception);
2150  }
2151 
2152  // Actually report the pending message to all message handlers.
2153  if (!message_obj->IsTheHole(this) && should_report_exception) {
2154  HandleScope scope(this);
2155  Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
2156  Handle<Script> script(message->script(), this);
2157  int start_pos = message->start_position();
2158  int end_pos = message->end_position();
2159  MessageLocation location(script, start_pos, end_pos);
2160  MessageHandler::ReportMessage(this, &location, message);
2161  }
2162 }
2163 
2164 void Isolate::ReportPendingMessages() {
2165  DCHECK(AllowExceptions::IsAllowed(this));
2166 
2167  // The embedder might run script in response to an exception.
2168  AllowJavascriptExecutionDebugOnly allow_script(this);
2169 
2170  Object* exception = pending_exception();
2171 
2172  // Try to propagate the exception to an external v8::TryCatch handler. If
2173  // propagation was unsuccessful, then we will get another chance at reporting
2174  // the pending message if the exception is re-thrown.
2175  bool has_been_propagated = PropagatePendingExceptionToExternalTryCatch();
2176  if (!has_been_propagated) return;
2177 
2178  ReportPendingMessagesImpl(IsExternalHandlerOnTop(exception));
2179 }
2180 
2181 void Isolate::ReportPendingMessagesFromJavaScript() {
2182  DCHECK(AllowExceptions::IsAllowed(this));
2183 
2184  auto IsHandledByJavaScript = [=]() {
2185  // In this situation, the exception is always a non-terminating exception.
2186 
2187  // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
2188  Address entry_handler = Isolate::handler(thread_local_top());
2189  DCHECK_NE(entry_handler, kNullAddress);
2190  entry_handler =
2191  reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
2192 
2193  // Get the address of the external handler so we can compare the address to
2194  // determine which one is closer to the top of the stack.
2195  Address external_handler = thread_local_top()->try_catch_handler_address();
2196  if (external_handler == kNullAddress) return true;
2197 
2198  return (entry_handler < external_handler);
2199  };
2200 
2201  auto IsHandledExternally = [=]() {
2202  Address external_handler = thread_local_top()->try_catch_handler_address();
2203  if (external_handler == kNullAddress) return false;
2204 
2205  // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
2206  Address entry_handler = Isolate::handler(thread_local_top());
2207  DCHECK_NE(entry_handler, kNullAddress);
2208  entry_handler =
2209  reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
2210  return (entry_handler > external_handler);
2211  };
2212 
2213  auto PropagateToExternalHandler = [=]() {
2214  if (IsHandledByJavaScript()) {
2215  thread_local_top_.external_caught_exception_ = false;
2216  return false;
2217  }
2218 
2219  if (!IsHandledExternally()) {
2220  thread_local_top_.external_caught_exception_ = false;
2221  return true;
2222  }
2223 
2224  thread_local_top_.external_caught_exception_ = true;
2225  v8::TryCatch* handler = try_catch_handler();
2226  DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
2227  thread_local_top_.pending_message_obj_->IsTheHole(this));
2228  handler->can_continue_ = true;
2229  handler->has_terminated_ = false;
2230  handler->exception_ = pending_exception();
2231  // Propagate to the external try-catch only if we got an actual message.
2232  if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
2233 
2234  handler->message_obj_ = thread_local_top_.pending_message_obj_;
2235  return true;
2236  };
2237 
2238  // Try to propagate to an external v8::TryCatch handler.
2239  if (!PropagateToExternalHandler()) return;
2240 
2241  ReportPendingMessagesImpl(true);
2242 }
2243 
2244 MessageLocation Isolate::GetMessageLocation() {
2245  DCHECK(has_pending_exception());
2246 
2247  if (thread_local_top_.pending_exception_ !=
2248  ReadOnlyRoots(heap()).termination_exception() &&
2249  !thread_local_top_.pending_message_obj_->IsTheHole(this)) {
2250  Handle<JSMessageObject> message_obj(
2251  JSMessageObject::cast(thread_local_top_.pending_message_obj_), this);
2252  Handle<Script> script(message_obj->script(), this);
2253  int start_pos = message_obj->start_position();
2254  int end_pos = message_obj->end_position();
2255  return MessageLocation(script, start_pos, end_pos);
2256  }
2257 
2258  return MessageLocation();
2259 }
2260 
2261 
2262 bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
2263  DCHECK(has_pending_exception());
2264  PropagatePendingExceptionToExternalTryCatch();
2265 
2266  bool is_termination_exception =
2267  pending_exception() == ReadOnlyRoots(this).termination_exception();
2268 
2269  // Do not reschedule the exception if this is the bottom call.
2270  bool clear_exception = is_bottom_call;
2271 
2272  if (is_termination_exception) {
2273  if (is_bottom_call) {
2274  thread_local_top()->external_caught_exception_ = false;
2275  clear_pending_exception();
2276  return false;
2277  }
2278  } else if (thread_local_top()->external_caught_exception_) {
2279  // If the exception is externally caught, clear it if there are no
2280  // JavaScript frames on the way to the C++ frame that has the
2281  // external handler.
2282  DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
2283  Address external_handler_address =
2284  thread_local_top()->try_catch_handler_address();
2285  JavaScriptFrameIterator it(this);
2286  if (it.done() || (it.frame()->sp() > external_handler_address)) {
2287  clear_exception = true;
2288  }
2289  }
2290 
2291  // Clear the exception if needed.
2292  if (clear_exception) {
2293  thread_local_top()->external_caught_exception_ = false;
2294  clear_pending_exception();
2295  return false;
2296  }
2297 
2298  // Reschedule the exception.
2299  thread_local_top()->scheduled_exception_ = pending_exception();
2300  clear_pending_exception();
2301  return true;
2302 }
2303 
2304 void Isolate::PushPromise(Handle<JSObject> promise) {
2305  ThreadLocalTop* tltop = thread_local_top();
2306  PromiseOnStack* prev = tltop->promise_on_stack_;
2307  Handle<JSObject> global_promise = global_handles()->Create(*promise);
2308  tltop->promise_on_stack_ = new PromiseOnStack(global_promise, prev);
2309 }
2310 
2311 
2312 void Isolate::PopPromise() {
2313  ThreadLocalTop* tltop = thread_local_top();
2314  if (tltop->promise_on_stack_ == nullptr) return;
2315  PromiseOnStack* prev = tltop->promise_on_stack_->prev();
2316  Handle<Object> global_promise = tltop->promise_on_stack_->promise();
2317  delete tltop->promise_on_stack_;
2318  tltop->promise_on_stack_ = prev;
2319  global_handles()->Destroy(global_promise.location());
2320 }
2321 
2322 namespace {
2323 bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
2324  Handle<JSPromise> promise);
2325 
2326 bool PromiseHandlerCheck(Isolate* isolate, Handle<JSReceiver> handler,
2327  Handle<JSReceiver> deferred_promise) {
2328  // Recurse to the forwarding Promise, if any. This may be due to
2329  // - await reaction forwarding to the throwaway Promise, which has
2330  // a dependency edge to the outer Promise.
2331  // - PromiseIdResolveHandler forwarding to the output of .then
2332  // - Promise.all/Promise.race forwarding to a throwaway Promise, which
2333  // has a dependency edge to the generated outer Promise.
2334  // Otherwise, this is a real reject handler for the Promise.
2335  Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
2336  Handle<Object> forwarding_handler = JSReceiver::GetDataProperty(handler, key);
2337  if (forwarding_handler->IsUndefined(isolate)) {
2338  return true;
2339  }
2340 
2341  if (!deferred_promise->IsJSPromise()) {
2342  return true;
2343  }
2344 
2345  return InternalPromiseHasUserDefinedRejectHandler(
2346  isolate, Handle<JSPromise>::cast(deferred_promise));
2347 }
2348 
2349 bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
2350  Handle<JSPromise> promise) {
2351  // If this promise was marked as being handled by a catch block
2352  // in an async function, then it has a user-defined reject handler.
2353  if (promise->handled_hint()) return true;
2354 
2355  // If this Promise is subsumed by another Promise (a Promise resolved
2356  // with another Promise, or an intermediate, hidden, throwaway Promise
2357  // within async/await), then recurse on the outer Promise.
2358  // In this case, the dependency is one possible way that the Promise
2359  // could be resolved, so it does not subsume the other following cases.
2360  Handle<Symbol> key = isolate->factory()->promise_handled_by_symbol();
2361  Handle<Object> outer_promise_obj = JSObject::GetDataProperty(promise, key);
2362  if (outer_promise_obj->IsJSPromise() &&
2363  InternalPromiseHasUserDefinedRejectHandler(
2364  isolate, Handle<JSPromise>::cast(outer_promise_obj))) {
2365  return true;
2366  }
2367 
2368  if (promise->status() == Promise::kPending) {
2369  for (Handle<Object> current(promise->reactions(), isolate);
2370  !current->IsSmi();) {
2371  Handle<PromiseReaction> reaction = Handle<PromiseReaction>::cast(current);
2372  Handle<HeapObject> promise_or_capability(
2373  reaction->promise_or_capability(), isolate);
2374  if (!promise_or_capability->IsUndefined(isolate)) {
2375  Handle<JSPromise> promise = Handle<JSPromise>::cast(
2376  promise_or_capability->IsJSPromise()
2377  ? promise_or_capability
2378  : handle(Handle<PromiseCapability>::cast(promise_or_capability)
2379  ->promise(),
2380  isolate));
2381  if (reaction->reject_handler()->IsUndefined(isolate)) {
2382  if (InternalPromiseHasUserDefinedRejectHandler(isolate, promise)) {
2383  return true;
2384  }
2385  } else {
2386  Handle<JSReceiver> current_handler(
2387  JSReceiver::cast(reaction->reject_handler()), isolate);
2388  if (PromiseHandlerCheck(isolate, current_handler, promise)) {
2389  return true;
2390  }
2391  }
2392  }
2393  current = handle(reaction->next(), isolate);
2394  }
2395  }
2396 
2397  return false;
2398 }
2399 
2400 } // namespace
2401 
2402 bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<Object> promise) {
2403  if (!promise->IsJSPromise()) return false;
2404  return InternalPromiseHasUserDefinedRejectHandler(
2405  this, Handle<JSPromise>::cast(promise));
2406 }
2407 
2408 Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
2409  Handle<Object> undefined = factory()->undefined_value();
2410  ThreadLocalTop* tltop = thread_local_top();
2411  if (tltop->promise_on_stack_ == nullptr) return undefined;
2412  // Find the top-most try-catch or try-finally handler.
2413  CatchType prediction = PredictExceptionCatcher();
2414  if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
2415  return undefined;
2416  }
2417  Handle<Object> retval = undefined;
2418  PromiseOnStack* promise_on_stack = tltop->promise_on_stack_;
2419  for (StackFrameIterator it(this); !it.done(); it.Advance()) {
2420  StackFrame* frame = it.frame();
2421  HandlerTable::CatchPrediction catch_prediction;
2422  if (frame->is_java_script()) {
2423  catch_prediction = PredictException(JavaScriptFrame::cast(frame));
2424  } else if (frame->type() == StackFrame::STUB) {
2425  Code code = frame->LookupCode();
2426  if (!code->IsCode() || code->kind() != Code::BUILTIN ||
2427  !code->handler_table_offset() || !code->is_turbofanned()) {
2428  continue;
2429  }
2430  catch_prediction = code->GetBuiltinCatchPrediction();
2431  } else {
2432  continue;
2433  }
2434 
2435  switch (catch_prediction) {
2436  case HandlerTable::UNCAUGHT:
2437  continue;
2438  case HandlerTable::CAUGHT:
2439  case HandlerTable::DESUGARING:
2440  if (retval->IsJSPromise()) {
2441  // Caught the result of an inner async/await invocation.
2442  // Mark the inner promise as caught in the "synchronous case" so
2443  // that Debug::OnException will see. In the synchronous case,
2444  // namely in the code in an async function before the first
2445  // await, the function which has this exception event has not yet
2446  // returned, so the generated Promise has not yet been marked
2447  // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
2448  Handle<JSPromise>::cast(retval)->set_handled_hint(true);
2449  }
2450  return retval;
2451  case HandlerTable::PROMISE:
2452  return promise_on_stack
2453  ? Handle<Object>::cast(promise_on_stack->promise())
2454  : undefined;
2455  case HandlerTable::ASYNC_AWAIT: {
2456  // If in the initial portion of async/await, continue the loop to pop up
2457  // successive async/await stack frames until an asynchronous one with
2458  // dependents is found, or a non-async stack frame is encountered, in
2459  // order to handle the synchronous async/await catch prediction case:
2460  // assume that async function calls are awaited.
2461  if (!promise_on_stack) return retval;
2462  retval = promise_on_stack->promise();
2463  if (PromiseHasUserDefinedRejectHandler(retval)) {
2464  return retval;
2465  }
2466  promise_on_stack = promise_on_stack->prev();
2467  continue;
2468  }
2469  }
2470  }
2471  return retval;
2472 }
2473 
2474 
2475 void Isolate::SetCaptureStackTraceForUncaughtExceptions(
2476  bool capture,
2477  int frame_limit,
2479  capture_stack_trace_for_uncaught_exceptions_ = capture;
2480  stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
2481  stack_trace_for_uncaught_exceptions_options_ = options;
2482 }
2483 
2484 
2485 void Isolate::SetAbortOnUncaughtExceptionCallback(
2486  v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
2487  abort_on_uncaught_exception_callback_ = callback;
2488 }
2489 
2490 bool Isolate::AreWasmThreadsEnabled(Handle<Context> context) {
2491  if (wasm_threads_enabled_callback()) {
2492  v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2493  return wasm_threads_enabled_callback()(api_context);
2494  }
2495  return FLAG_experimental_wasm_threads;
2496 }
2497 
2498 Handle<Context> Isolate::GetIncumbentContext() {
2499  JavaScriptFrameIterator it(this);
2500 
2501  // 1st candidate: most-recently-entered author function's context
2502  // if it's newer than the last Context::BackupIncumbentScope entry.
2503  //
2504  // NOTE: This code assumes that the stack grows downward.
2505 #if defined(V8_USE_ADDRESS_SANITIZER)
2506  // |it.frame()->sp()| points to an address in the real stack frame, but
2507  // |top_backup_incumbent_scope()| points to an address in a fake stack frame.
2508  // In order to compare them, convert the latter into the address in the real
2509  // stack frame.
2510  void* maybe_fake_top = const_cast<void*>(
2511  reinterpret_cast<const void*>(top_backup_incumbent_scope()));
2512  void* maybe_real_top = __asan_addr_is_in_fake_stack(
2513  __asan_get_current_fake_stack(), maybe_fake_top, nullptr, nullptr);
2514  Address top_backup_incumbent = reinterpret_cast<Address>(
2515  maybe_real_top ? maybe_real_top : maybe_fake_top);
2516 #else
2517  Address top_backup_incumbent =
2518  reinterpret_cast<Address>(top_backup_incumbent_scope());
2519 #endif
2520  if (!it.done() &&
2521  (!top_backup_incumbent || it.frame()->sp() < top_backup_incumbent)) {
2522  Context context = Context::cast(it.frame()->context());
2523  return Handle<Context>(context->native_context(), this);
2524  }
2525 
2526  // 2nd candidate: the last Context::Scope's incumbent context if any.
2527  if (top_backup_incumbent_scope()) {
2528  return Utils::OpenHandle(
2529  *top_backup_incumbent_scope()->backup_incumbent_context_);
2530  }
2531 
2532  // Last candidate: the entered context or microtask context.
2533  // Given that there is no other author function is running, there must be
2534  // no cross-context function running, then the incumbent realm must match
2535  // the entry realm.
2536  v8::Local<v8::Context> entered_context =
2537  reinterpret_cast<v8::Isolate*>(this)->GetEnteredOrMicrotaskContext();
2538  return Utils::OpenHandle(*entered_context);
2539 }
2540 
2541 char* Isolate::ArchiveThread(char* to) {
2542  MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
2543  sizeof(ThreadLocalTop));
2544  InitializeThreadLocal();
2545  clear_pending_exception();
2546  clear_pending_message();
2547  clear_scheduled_exception();
2548  return to + sizeof(ThreadLocalTop);
2549 }
2550 
2551 
2552 char* Isolate::RestoreThread(char* from) {
2553  MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
2554  sizeof(ThreadLocalTop));
2555 // This might be just paranoia, but it seems to be needed in case a
2556 // thread_local_top_ is restored on a separate OS thread.
2557 #ifdef USE_SIMULATOR
2558  thread_local_top()->simulator_ = Simulator::current(this);
2559 #endif
2560  DCHECK(context().is_null() || context()->IsContext());
2561  return from + sizeof(ThreadLocalTop);
2562 }
2563 
2564 void Isolate::ReleaseSharedPtrs() {
2565  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2566  while (managed_ptr_destructors_head_) {
2567  ManagedPtrDestructor* l = managed_ptr_destructors_head_;
2568  ManagedPtrDestructor* n = nullptr;
2569  managed_ptr_destructors_head_ = nullptr;
2570  for (; l != nullptr; l = n) {
2571  l->destructor_(l->shared_ptr_ptr_);
2572  n = l->next_;
2573  delete l;
2574  }
2575  }
2576 }
2577 
2578 void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2579  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2580  DCHECK_NULL(destructor->prev_);
2581  DCHECK_NULL(destructor->next_);
2582  if (managed_ptr_destructors_head_) {
2583  managed_ptr_destructors_head_->prev_ = destructor;
2584  }
2585  destructor->next_ = managed_ptr_destructors_head_;
2586  managed_ptr_destructors_head_ = destructor;
2587 }
2588 
2589 void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2590  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2591  if (destructor->prev_) {
2592  destructor->prev_->next_ = destructor->next_;
2593  } else {
2594  DCHECK_EQ(destructor, managed_ptr_destructors_head_);
2595  managed_ptr_destructors_head_ = destructor->next_;
2596  }
2597  if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
2598  destructor->prev_ = nullptr;
2599  destructor->next_ = nullptr;
2600 }
2601 
2602 void Isolate::SetWasmEngine(std::shared_ptr<wasm::WasmEngine> engine) {
2603  DCHECK_NULL(wasm_engine_); // Only call once before {Init}.
2604  wasm_engine_ = std::move(engine);
2605  wasm_engine_->AddIsolate(this);
2606  wasm::WasmCodeManager::InstallSamplingGCCallback(this);
2607 }
2608 
2609 // NOLINTNEXTLINE
2610 Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
2611 #if defined(USE_SIMULATOR)
2612  delete simulator_;
2613 #endif
2614 }
2615 
2616 Isolate::PerIsolateThreadData* Isolate::ThreadDataTable::Lookup(
2617  ThreadId thread_id) {
2618  auto t = table_.find(thread_id);
2619  if (t == table_.end()) return nullptr;
2620  return t->second;
2621 }
2622 
2623 
2624 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
2625  bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
2626  CHECK(inserted);
2627 }
2628 
2629 
2630 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
2631  table_.erase(data->thread_id_);
2632  delete data;
2633 }
2634 
2635 void Isolate::ThreadDataTable::RemoveAllThreads() {
2636  for (auto& x : table_) {
2637  delete x.second;
2638  }
2639  table_.clear();
2640 }
2641 
2643  public:
2644  VerboseAccountingAllocator(Heap* heap, size_t allocation_sample_bytes,
2645  size_t pool_sample_bytes)
2646  : heap_(heap),
2647  last_memory_usage_(0),
2648  last_pool_size_(0),
2649  nesting_deepth_(0),
2650  allocation_sample_bytes_(allocation_sample_bytes),
2651  pool_sample_bytes_(pool_sample_bytes) {}
2652 
2653  v8::internal::Segment* GetSegment(size_t size) override {
2654  v8::internal::Segment* memory = AccountingAllocator::GetSegment(size);
2655  if (memory) {
2656  size_t malloced_current = GetCurrentMemoryUsage();
2657  size_t pooled_current = GetCurrentPoolSize();
2658 
2659  if (last_memory_usage_ + allocation_sample_bytes_ < malloced_current ||
2660  last_pool_size_ + pool_sample_bytes_ < pooled_current) {
2661  PrintMemoryJSON(malloced_current, pooled_current);
2662  last_memory_usage_ = malloced_current;
2663  last_pool_size_ = pooled_current;
2664  }
2665  }
2666  return memory;
2667  }
2668 
2669  void ReturnSegment(v8::internal::Segment* memory) override {
2670  AccountingAllocator::ReturnSegment(memory);
2671  size_t malloced_current = GetCurrentMemoryUsage();
2672  size_t pooled_current = GetCurrentPoolSize();
2673 
2674  if (malloced_current + allocation_sample_bytes_ < last_memory_usage_ ||
2675  pooled_current + pool_sample_bytes_ < last_pool_size_) {
2676  PrintMemoryJSON(malloced_current, pooled_current);
2677  last_memory_usage_ = malloced_current;
2678  last_pool_size_ = pooled_current;
2679  }
2680  }
2681 
2682  void ZoneCreation(const Zone* zone) override {
2683  PrintZoneModificationSample(zone, "zonecreation");
2684  nesting_deepth_++;
2685  }
2686 
2687  void ZoneDestruction(const Zone* zone) override {
2688  nesting_deepth_--;
2689  PrintZoneModificationSample(zone, "zonedestruction");
2690  }
2691 
2692  private:
2693  void PrintZoneModificationSample(const Zone* zone, const char* type) {
2694  PrintF(
2695  "{"
2696  "\"type\": \"%s\", "
2697  "\"isolate\": \"%p\", "
2698  "\"time\": %f, "
2699  "\"ptr\": \"%p\", "
2700  "\"name\": \"%s\", "
2701  "\"size\": %" PRIuS
2702  ","
2703  "\"nesting\": %zu}\n",
2704  type, reinterpret_cast<void*>(heap_->isolate()),
2705  heap_->isolate()->time_millis_since_init(),
2706  reinterpret_cast<const void*>(zone), zone->name(),
2707  zone->allocation_size(), nesting_deepth_.load());
2708  }
2709 
2710  void PrintMemoryJSON(size_t malloced, size_t pooled) {
2711  // Note: Neither isolate, nor heap is locked, so be careful with accesses
2712  // as the allocator is potentially used on a concurrent thread.
2713  double time = heap_->isolate()->time_millis_since_init();
2714  PrintF(
2715  "{"
2716  "\"type\": \"zone\", "
2717  "\"isolate\": \"%p\", "
2718  "\"time\": %f, "
2719  "\"allocated\": %" PRIuS
2720  ","
2721  "\"pooled\": %" PRIuS "}\n",
2722  reinterpret_cast<void*>(heap_->isolate()), time, malloced, pooled);
2723  }
2724 
2725  Heap* heap_;
2726  std::atomic<size_t> last_memory_usage_;
2727  std::atomic<size_t> last_pool_size_;
2728  std::atomic<size_t> nesting_deepth_;
2729  size_t allocation_sample_bytes_, pool_sample_bytes_;
2730 };
2731 
2732 #ifdef DEBUG
2733 std::atomic<size_t> Isolate::non_disposed_isolates_;
2734 #endif // DEBUG
2735 
2736 // static
2737 Isolate* Isolate::New(IsolateAllocationMode mode) {
2738  // IsolateAllocator allocates the memory for the Isolate object according to
2739  // the given allocation mode.
2740  std::unique_ptr<IsolateAllocator> isolate_allocator =
2741  base::make_unique<IsolateAllocator>(mode);
2742  // Construct Isolate object in the allocated memory.
2743  void* isolate_ptr = isolate_allocator->isolate_memory();
2744  Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
2745 #ifdef V8_TARGET_ARCH_64_BIT
2746  DCHECK_IMPLIES(
2747  mode == IsolateAllocationMode::kInV8Heap,
2748  IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
2749 #endif
2750 
2751 #ifdef DEBUG
2752  non_disposed_isolates_++;
2753 #endif // DEBUG
2754 
2755  return isolate;
2756 }
2757 
2758 // static
2759 void Isolate::Delete(Isolate* isolate) {
2760  DCHECK_NOT_NULL(isolate);
2761  // Temporarily set this isolate as current so that various parts of
2762  // the isolate can access it in their destructors without having a
2763  // direct pointer. We don't use Enter/Exit here to avoid
2764  // initializing the thread data.
2765  PerIsolateThreadData* saved_data = isolate->CurrentPerIsolateThreadData();
2766  DCHECK_EQ(base::Relaxed_Load(&isolate_key_created_), 1);
2767  Isolate* saved_isolate = reinterpret_cast<Isolate*>(
2768  base::Thread::GetThreadLocal(isolate->isolate_key_));
2769  SetIsolateThreadLocals(isolate, nullptr);
2770 
2771  isolate->Deinit();
2772 
2773 #ifdef DEBUG
2774  non_disposed_isolates_--;
2775 #endif // DEBUG
2776 
2777  // Take ownership of the IsolateAllocator to ensure the Isolate memory will
2778  // be available during Isolate descructor call.
2779  std::unique_ptr<IsolateAllocator> isolate_allocator =
2780  std::move(isolate->isolate_allocator_);
2781  isolate->~Isolate();
2782  // Now free the memory owned by the allocator.
2783  isolate_allocator.reset();
2784 
2785  // Restore the previous current isolate.
2786  SetIsolateThreadLocals(saved_isolate, saved_data);
2787 }
2788 
2789 v8::PageAllocator* Isolate::page_allocator() {
2790  return isolate_allocator_->page_allocator();
2791 }
2792 
2793 Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator)
2794  : isolate_allocator_(std::move(isolate_allocator)),
2795  id_(base::Relaxed_AtomicIncrement(&isolate_counter_, 1)),
2796  stack_guard_(this),
2797  allocator_(FLAG_trace_zone_stats ? new VerboseAccountingAllocator(
2798  &heap_, 256 * KB, 128 * KB)
2799  : new AccountingAllocator()),
2800  builtins_(this),
2801  rail_mode_(PERFORMANCE_ANIMATION),
2802  code_event_dispatcher_(new CodeEventDispatcher()),
2803  cancelable_task_manager_(new CancelableTaskManager()) {
2804  TRACE_ISOLATE(constructor);
2805  CheckIsolateLayout();
2806 
2807  // ThreadManager is initialized early to support locking an isolate
2808  // before it is entered.
2809  thread_manager_ = new ThreadManager(this);
2810 
2811  handle_scope_data_.Initialize();
2812 
2813 #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
2814  name##_ = (initial_value);
2815  ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
2816 #undef ISOLATE_INIT_EXECUTE
2817 
2818 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
2819  memset(name##_, 0, sizeof(type) * length);
2820  ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
2821 #undef ISOLATE_INIT_ARRAY_EXECUTE
2822 
2823  InitializeLoggingAndCounters();
2824  debug_ = new Debug(this);
2825 
2826  InitializeDefaultEmbeddedBlob();
2827 
2828  MicrotaskQueue::SetUpDefaultMicrotaskQueue(this);
2829 }
2830 
2831 void Isolate::CheckIsolateLayout() {
2832  CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0);
2833  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)),
2834  Internals::kIsolateEmbedderDataOffset);
2835  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_)),
2836  Internals::kIsolateRootsOffset);
2837  CHECK_EQ(Internals::kExternalMemoryOffset % 8, 0);
2838  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.external_memory_)),
2839  Internals::kExternalMemoryOffset);
2840  CHECK_EQ(Internals::kExternalMemoryLimitOffset % 8, 0);
2841  CHECK_EQ(static_cast<int>(
2842  OFFSET_OF(Isolate, isolate_data_.external_memory_limit_)),
2843  Internals::kExternalMemoryLimitOffset);
2844  CHECK_EQ(Internals::kExternalMemoryAtLastMarkCompactOffset % 8, 0);
2845  CHECK_EQ(static_cast<int>(OFFSET_OF(
2846  Isolate, isolate_data_.external_memory_at_last_mark_compact_)),
2847  Internals::kExternalMemoryAtLastMarkCompactOffset);
2848 }
2849 
2850 void Isolate::ClearSerializerData() {
2851  delete external_reference_map_;
2852  external_reference_map_ = nullptr;
2853 }
2854 
2855 bool Isolate::LogObjectRelocation() {
2856  return FLAG_verify_predictable || logger()->is_logging() || is_profiling() ||
2857  heap()->isolate()->logger()->is_listening_to_code_events() ||
2858  (heap_profiler() != nullptr &&
2859  heap_profiler()->is_tracking_object_moves()) ||
2860  heap()->has_heap_object_allocation_tracker();
2861 }
2862 
2863 void Isolate::Deinit() {
2864  TRACE_ISOLATE(deinit);
2865 
2866  tracing_cpu_profiler_.reset();
2867  if (FLAG_stress_sampling_allocation_profiler > 0) {
2868  heap_profiler()->StopSamplingHeapProfiler();
2869  }
2870 
2871  debug()->Unload();
2872 
2873  wasm_engine()->DeleteCompileJobsOnIsolate(this);
2874 
2875  if (concurrent_recompilation_enabled()) {
2876  optimizing_compile_dispatcher_->Stop();
2877  delete optimizing_compile_dispatcher_;
2878  optimizing_compile_dispatcher_ = nullptr;
2879  }
2880 
2881  heap_.mark_compact_collector()->EnsureSweepingCompleted();
2882  heap_.memory_allocator()->unmapper()->EnsureUnmappingCompleted();
2883 
2884  DumpAndResetStats();
2885 
2886  if (FLAG_print_deopt_stress) {
2887  PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
2888  }
2889 
2890  // We must stop the logger before we tear down other components.
2891  sampler::Sampler* sampler = logger_->sampler();
2892  if (sampler && sampler->IsActive()) sampler->Stop();
2893 
2894  FreeThreadResources();
2895  logger_->StopProfilerThread();
2896 
2897  // We start with the heap tear down so that releasing managed objects does
2898  // not cause a GC.
2899  heap_.StartTearDown();
2900 
2901  ReleaseSharedPtrs();
2902 
2903  delete deoptimizer_data_;
2904  deoptimizer_data_ = nullptr;
2905  builtins_.TearDown();
2906  bootstrapper_->TearDown();
2907 
2908  if (runtime_profiler_ != nullptr) {
2909  delete runtime_profiler_;
2910  runtime_profiler_ = nullptr;
2911  }
2912 
2913  delete heap_profiler_;
2914  heap_profiler_ = nullptr;
2915 
2916  compiler_dispatcher_->AbortAll();
2917  delete compiler_dispatcher_;
2918  compiler_dispatcher_ = nullptr;
2919 
2920  // This stops cancelable tasks (i.e. concurrent marking tasks)
2921  cancelable_task_manager()->CancelAndWait();
2922 
2923  heap_.TearDown();
2924  logger_->TearDown();
2925 
2926  if (wasm_engine_) {
2927  wasm_engine_->RemoveIsolate(this);
2928  wasm_engine_.reset();
2929  }
2930 
2931  TearDownEmbeddedBlob();
2932 
2933  delete interpreter_;
2934  interpreter_ = nullptr;
2935 
2936  delete ast_string_constants_;
2937  ast_string_constants_ = nullptr;
2938 
2939  code_event_dispatcher_.reset();
2940 
2941  delete root_index_map_;
2942  root_index_map_ = nullptr;
2943 
2944  delete compiler_zone_;
2945  compiler_zone_ = nullptr;
2946  compiler_cache_ = nullptr;
2947 
2948  ClearSerializerData();
2949 
2950  {
2951  base::MutexGuard lock_guard(&thread_data_table_mutex_);
2952  thread_data_table_.RemoveAllThreads();
2953  }
2954 }
2955 
2956 
2957 void Isolate::SetIsolateThreadLocals(Isolate* isolate,
2958  PerIsolateThreadData* data) {
2959  base::Thread::SetThreadLocal(isolate_key_, isolate);
2960  base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
2961 }
2962 
2963 
2964 Isolate::~Isolate() {
2965  TRACE_ISOLATE(destructor);
2966 
2967  // The entry stack must be empty when we get here.
2968  DCHECK(entry_stack_ == nullptr || entry_stack_->previous_item == nullptr);
2969 
2970  delete entry_stack_;
2971  entry_stack_ = nullptr;
2972 
2973  delete unicode_cache_;
2974  unicode_cache_ = nullptr;
2975 
2976  delete date_cache_;
2977  date_cache_ = nullptr;
2978 
2979  delete regexp_stack_;
2980  regexp_stack_ = nullptr;
2981 
2982  delete descriptor_lookup_cache_;
2983  descriptor_lookup_cache_ = nullptr;
2984 
2985  delete load_stub_cache_;
2986  load_stub_cache_ = nullptr;
2987  delete store_stub_cache_;
2988  store_stub_cache_ = nullptr;
2989 
2990  delete materialized_object_store_;
2991  materialized_object_store_ = nullptr;
2992 
2993  delete logger_;
2994  logger_ = nullptr;
2995 
2996  delete handle_scope_implementer_;
2997  handle_scope_implementer_ = nullptr;
2998 
2999  delete code_tracer();
3000  set_code_tracer(nullptr);
3001 
3002  delete compilation_cache_;
3003  compilation_cache_ = nullptr;
3004  delete bootstrapper_;
3005  bootstrapper_ = nullptr;
3006  delete inner_pointer_to_code_cache_;
3007  inner_pointer_to_code_cache_ = nullptr;
3008 
3009  delete thread_manager_;
3010  thread_manager_ = nullptr;
3011 
3012  delete global_handles_;
3013  global_handles_ = nullptr;
3014  delete eternal_handles_;
3015  eternal_handles_ = nullptr;
3016 
3017  delete string_stream_debug_object_cache_;
3018  string_stream_debug_object_cache_ = nullptr;
3019 
3020  delete random_number_generator_;
3021  random_number_generator_ = nullptr;
3022 
3023  delete fuzzer_rng_;
3024  fuzzer_rng_ = nullptr;
3025 
3026  delete debug_;
3027  debug_ = nullptr;
3028 
3029  delete cancelable_task_manager_;
3030  cancelable_task_manager_ = nullptr;
3031 
3032  delete allocator_;
3033  allocator_ = nullptr;
3034 
3035  // Assert that |default_microtask_queue_| is the last MicrotaskQueue instance.
3036  DCHECK_IMPLIES(default_microtask_queue_,
3037  default_microtask_queue_ == default_microtask_queue_->next());
3038  delete default_microtask_queue_;
3039  default_microtask_queue_ = nullptr;
3040 }
3041 
3042 void Isolate::InitializeThreadLocal() { thread_local_top_.Initialize(this); }
3043 
3044 void Isolate::SetTerminationOnExternalTryCatch() {
3045  if (try_catch_handler() == nullptr) return;
3046  try_catch_handler()->can_continue_ = false;
3047  try_catch_handler()->has_terminated_ = true;
3048  try_catch_handler()->exception_ = ReadOnlyRoots(heap()).null_value();
3049 }
3050 
3051 bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
3052  Object* exception = pending_exception();
3053 
3054  if (IsJavaScriptHandlerOnTop(exception)) {
3055  thread_local_top_.external_caught_exception_ = false;
3056  return false;
3057  }
3058 
3059  if (!IsExternalHandlerOnTop(exception)) {
3060  thread_local_top_.external_caught_exception_ = false;
3061  return true;
3062  }
3063 
3064  thread_local_top_.external_caught_exception_ = true;
3065  if (!is_catchable_by_javascript(exception)) {
3066  SetTerminationOnExternalTryCatch();
3067  } else {
3068  v8::TryCatch* handler = try_catch_handler();
3069  DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
3070  thread_local_top_.pending_message_obj_->IsTheHole(this));
3071  handler->can_continue_ = true;
3072  handler->has_terminated_ = false;
3073  handler->exception_ = pending_exception();
3074  // Propagate to the external try-catch only if we got an actual message.
3075  if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
3076 
3077  handler->message_obj_ = thread_local_top_.pending_message_obj_;
3078  }
3079  return true;
3080 }
3081 
3082 bool Isolate::InitializeCounters() {
3083  if (async_counters_) return false;
3084  async_counters_ = std::make_shared<Counters>(this);
3085  return true;
3086 }
3087 
3088 void Isolate::InitializeLoggingAndCounters() {
3089  if (logger_ == nullptr) {
3090  logger_ = new Logger(this);
3091  }
3092  InitializeCounters();
3093 }
3094 
3095 namespace {
3096 
3097 void CreateOffHeapTrampolines(Isolate* isolate) {
3098  DCHECK_NOT_NULL(isolate->embedded_blob());
3099  DCHECK_NE(0, isolate->embedded_blob_size());
3100 
3101  HandleScope scope(isolate);
3102  Builtins* builtins = isolate->builtins();
3103 
3104  EmbeddedData d = EmbeddedData::FromBlob();
3105 
3106  for (int i = 0; i < Builtins::builtin_count; i++) {
3107  if (!Builtins::IsIsolateIndependent(i)) continue;
3108 
3109  Address instruction_start = d.InstructionStartOfBuiltin(i);
3110  Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
3111  builtins->builtin_handle(i), instruction_start);
3112 
3113  // Note that references to the old, on-heap code objects may still exist on
3114  // the heap. This is fine for the sake of serialization, as serialization
3115  // will canonicalize all builtins in MaybeCanonicalizeBuiltin().
3116  //
3117  // From this point onwards, some builtin code objects may be unreachable and
3118  // thus collected by the GC.
3119  builtins->set_builtin(i, *trampoline);
3120 
3121  if (isolate->logger()->is_listening_to_code_events() ||
3122  isolate->is_profiling()) {
3123  isolate->logger()->LogCodeObject(*trampoline);
3124  }
3125  }
3126 }
3127 
3128 } // namespace
3129 
3130 void Isolate::InitializeDefaultEmbeddedBlob() {
3131  const uint8_t* blob = DefaultEmbeddedBlob();
3132  uint32_t size = DefaultEmbeddedBlobSize();
3133 
3134 #ifdef V8_MULTI_SNAPSHOTS
3135  if (!FLAG_untrusted_code_mitigations) {
3136  blob = TrustedEmbeddedBlob();
3137  size = TrustedEmbeddedBlobSize();
3138  }
3139 #endif
3140 
3141  if (StickyEmbeddedBlob() != nullptr) {
3142  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3143  // Check again now that we hold the lock.
3144  if (StickyEmbeddedBlob() != nullptr) {
3145  blob = StickyEmbeddedBlob();
3146  size = StickyEmbeddedBlobSize();
3147  current_embedded_blob_refs_++;
3148  }
3149  }
3150 
3151  if (blob == nullptr) {
3152  CHECK_EQ(0, size);
3153  } else {
3154  SetEmbeddedBlob(blob, size);
3155  }
3156 }
3157 
3158 void Isolate::CreateAndSetEmbeddedBlob() {
3159  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3160 
3161  // If a sticky blob has been set, we reuse it.
3162  if (StickyEmbeddedBlob() != nullptr) {
3163  CHECK_EQ(embedded_blob(), StickyEmbeddedBlob());
3164  CHECK_EQ(CurrentEmbeddedBlob(), StickyEmbeddedBlob());
3165  } else {
3166  // Create and set a new embedded blob.
3167  uint8_t* data;
3168  uint32_t size;
3169  InstructionStream::CreateOffHeapInstructionStream(this, &data, &size);
3170 
3171  CHECK_EQ(0, current_embedded_blob_refs_);
3172  const uint8_t* const_data = const_cast<const uint8_t*>(data);
3173  SetEmbeddedBlob(const_data, size);
3174  current_embedded_blob_refs_++;
3175 
3176  SetStickyEmbeddedBlob(const_data, size);
3177  }
3178 
3179  CreateOffHeapTrampolines(this);
3180 }
3181 
3182 void Isolate::TearDownEmbeddedBlob() {
3183  // Nothing to do in case the blob is embedded into the binary or unset.
3184  if (StickyEmbeddedBlob() == nullptr) return;
3185 
3186  CHECK_EQ(embedded_blob(), StickyEmbeddedBlob());
3187  CHECK_EQ(CurrentEmbeddedBlob(), StickyEmbeddedBlob());
3188 
3189  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
3190  current_embedded_blob_refs_--;
3191  if (current_embedded_blob_refs_ == 0 && enable_embedded_blob_refcounting_) {
3192  // We own the embedded blob and are the last holder. Free it.
3193  InstructionStream::FreeOffHeapInstructionStream(
3194  const_cast<uint8_t*>(embedded_blob()), embedded_blob_size());
3195  ClearEmbeddedBlob();
3196  }
3197 }
3198 
3199 bool Isolate::Init(StartupDeserializer* des) {
3200  TRACE_ISOLATE(init);
3201 
3202  base::ElapsedTimer timer;
3203  if (des == nullptr && FLAG_profile_deserialization) timer.Start();
3204 
3205  time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
3206 
3207  stress_deopt_count_ = FLAG_deopt_every_n_times;
3208  force_slow_path_ = FLAG_force_slow_path;
3209 
3210  has_fatal_error_ = false;
3211 
3212  // The initialization process does not handle memory exhaustion.
3213  AlwaysAllocateScope always_allocate(this);
3214 
3215  // Safe after setting Heap::isolate_, and initializing StackGuard
3216  heap_.SetStackLimits();
3217 
3218 #define ASSIGN_ELEMENT(CamelName, hacker_name) \
3219  isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
3220  reinterpret_cast<Address>(hacker_name##_address());
3221  FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
3222 #undef ASSIGN_ELEMENT
3223 
3224  compilation_cache_ = new CompilationCache(this);
3225  descriptor_lookup_cache_ = new DescriptorLookupCache();
3226  unicode_cache_ = new UnicodeCache();
3227  inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
3228  global_handles_ = new GlobalHandles(this);
3229  eternal_handles_ = new EternalHandles();
3230  bootstrapper_ = new Bootstrapper(this);
3231  handle_scope_implementer_ = new HandleScopeImplementer(this);
3232  load_stub_cache_ = new StubCache(this);
3233  store_stub_cache_ = new StubCache(this);
3234  materialized_object_store_ = new MaterializedObjectStore(this);
3235  regexp_stack_ = new RegExpStack();
3236  regexp_stack_->isolate_ = this;
3237  date_cache_ = new DateCache();
3238  heap_profiler_ = new HeapProfiler(heap());
3239  interpreter_ = new interpreter::Interpreter(this);
3240 
3241  compiler_dispatcher_ =
3242  new CompilerDispatcher(this, V8::GetCurrentPlatform(), FLAG_stack_size);
3243 
3244  // Enable logging before setting up the heap
3245  logger_->SetUp(this);
3246 
3247  { // NOLINT
3248  // Ensure that the thread has a valid stack guard. The v8::Locker object
3249  // will ensure this too, but we don't have to use lockers if we are only
3250  // using one thread.
3251  ExecutionAccess lock(this);
3252  stack_guard_.InitThread(lock);
3253  }
3254 
3255  // SetUp the object heap.
3256  DCHECK(!heap_.HasBeenSetUp());
3257  heap_.SetUp();
3258 
3259  isolate_data_.external_reference_table()->Init(this);
3260 
3261  // Setup the wasm engine.
3262  if (wasm_engine_ == nullptr) {
3263  SetWasmEngine(wasm::WasmEngine::GetWasmEngine());
3264  }
3265  DCHECK_NOT_NULL(wasm_engine_);
3266 
3267  deoptimizer_data_ = new DeoptimizerData(heap());
3268 
3269  const bool create_heap_objects = (des == nullptr);
3270  if (setup_delegate_ == nullptr) {
3271  setup_delegate_ = new SetupIsolateDelegate(create_heap_objects);
3272  }
3273 
3274  if (!setup_delegate_->SetupHeap(&heap_)) {
3275  V8::FatalProcessOutOfMemory(this, "heap object creation");
3276  return false;
3277  }
3278 
3279  if (create_heap_objects) {
3280  // Terminate the partial snapshot cache so we can iterate.
3281  partial_snapshot_cache_.push_back(ReadOnlyRoots(this).undefined_value());
3282  }
3283 
3284  InitializeThreadLocal();
3285 
3286  // Profiler has to be created after ThreadLocal is initialized
3287  // because it makes use of interrupts.
3288  tracing_cpu_profiler_.reset(new TracingCpuProfilerImpl(this));
3289 
3290  bootstrapper_->Initialize(create_heap_objects);
3291 
3292  if (FLAG_embedded_builtins && create_heap_objects) {
3293  builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this);
3294  }
3295  setup_delegate_->SetupBuiltins(this);
3296  if (create_heap_objects) {
3297  // Create a copy of the the interpreter entry trampoline and store it
3298  // on the root list. It is used as a template for further copies that
3299  // may later be created to help profile interpreted code.
3300  // TODO(jgruber): Merge this with the block below once
3301  // FLAG_embedded_builtins is always true.
3302  HandleScope handle_scope(this);
3303  Handle<Code> code =
3304  factory()->CopyCode(BUILTIN_CODE(this, InterpreterEntryTrampoline));
3305  heap_.SetInterpreterEntryTrampolineForProfiling(*code);
3306  }
3307  if (FLAG_embedded_builtins && create_heap_objects) {
3308  builtins_constants_table_builder_->Finalize();
3309  delete builtins_constants_table_builder_;
3310  builtins_constants_table_builder_ = nullptr;
3311 
3312  CreateAndSetEmbeddedBlob();
3313  }
3314 
3315  if (create_heap_objects) heap_.CreateFixedStubs();
3316 
3317  if (FLAG_log_internal_timer_events) {
3318  set_event_logger(Logger::DefaultEventLoggerSentinel);
3319  }
3320 
3321  if (FLAG_trace_turbo || FLAG_trace_turbo_graph || FLAG_turbo_profiling) {
3322  PrintF("Concurrent recompilation has been disabled for tracing.\n");
3323  } else if (OptimizingCompileDispatcher::Enabled()) {
3324  optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
3325  }
3326 
3327  // Initialize runtime profiler before deserialization, because collections may
3328  // occur, clearing/updating ICs.
3329  runtime_profiler_ = new RuntimeProfiler(this);
3330 
3331  // If we are deserializing, read the state into the now-empty heap.
3332  {
3333  AlwaysAllocateScope always_allocate(this);
3334  CodeSpaceMemoryModificationScope modification_scope(&heap_);
3335 
3336  if (!create_heap_objects) des->DeserializeInto(this);
3337  load_stub_cache_->Initialize();
3338  store_stub_cache_->Initialize();
3339  interpreter_->Initialize();
3340  heap_.NotifyDeserializationComplete();
3341  }
3342  delete setup_delegate_;
3343  setup_delegate_ = nullptr;
3344 
3345  if (FLAG_print_builtin_code) builtins()->PrintBuiltinCode();
3346  if (FLAG_print_builtin_size) builtins()->PrintBuiltinSize();
3347 
3348  // Finish initialization of ThreadLocal after deserialization is done.
3349  clear_pending_exception();
3350  clear_pending_message();
3351  clear_scheduled_exception();
3352 
3353  // Deserializing may put strange things in the root array's copy of the
3354  // stack guard.
3355  heap_.SetStackLimits();
3356 
3357  // Quiet the heap NaN if needed on target platform.
3358  if (!create_heap_objects)
3359  Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
3360 
3361  if (FLAG_trace_turbo) {
3362  // Create an empty file.
3363  std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc);
3364  }
3365 
3366  {
3367  HandleScope scope(this);
3368  ast_string_constants_ = new AstStringConstants(this, heap()->HashSeed());
3369  }
3370 
3371  initialized_from_snapshot_ = (des != nullptr);
3372 
3373  if (!FLAG_inline_new) heap_.DisableInlineAllocation();
3374 
3375  if (FLAG_stress_sampling_allocation_profiler > 0) {
3376  uint64_t sample_interval = FLAG_stress_sampling_allocation_profiler;
3377  int stack_depth = 128;
3378  v8::HeapProfiler::SamplingFlags sampling_flags =
3379  v8::HeapProfiler::SamplingFlags::kSamplingForceGC;
3380  heap_profiler()->StartSamplingHeapProfiler(sample_interval, stack_depth,
3381  sampling_flags);
3382  }
3383 
3384  if (des == nullptr && FLAG_profile_deserialization) {
3385  double ms = timer.Elapsed().InMillisecondsF();
3386  PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
3387  }
3388 
3389  return true;
3390 }
3391 
3392 
3393 void Isolate::Enter() {
3394  Isolate* current_isolate = nullptr;
3395  PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
3396  if (current_data != nullptr) {
3397  current_isolate = current_data->isolate_;
3398  DCHECK_NOT_NULL(current_isolate);
3399  if (current_isolate == this) {
3400  DCHECK(Current() == this);
3401  DCHECK_NOT_NULL(entry_stack_);
3402  DCHECK(entry_stack_->previous_thread_data == nullptr ||
3403  entry_stack_->previous_thread_data->thread_id().Equals(
3404  ThreadId::Current()));
3405  // Same thread re-enters the isolate, no need to re-init anything.
3406  entry_stack_->entry_count++;
3407  return;
3408  }
3409  }
3410 
3411  PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
3412  DCHECK_NOT_NULL(data);
3413  DCHECK(data->isolate_ == this);
3414 
3415  EntryStackItem* item = new EntryStackItem(current_data,
3416  current_isolate,
3417  entry_stack_);
3418  entry_stack_ = item;
3419 
3420  SetIsolateThreadLocals(this, data);
3421 
3422  // In case it's the first time some thread enters the isolate.
3423  set_thread_id(data->thread_id());
3424 }
3425 
3426 
3427 void Isolate::Exit() {
3428  DCHECK_NOT_NULL(entry_stack_);
3429  DCHECK(entry_stack_->previous_thread_data == nullptr ||
3430  entry_stack_->previous_thread_data->thread_id().Equals(
3431  ThreadId::Current()));
3432 
3433  if (--entry_stack_->entry_count > 0) return;
3434 
3435  DCHECK_NOT_NULL(CurrentPerIsolateThreadData());
3436  DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
3437 
3438  // Pop the stack.
3439  EntryStackItem* item = entry_stack_;
3440  entry_stack_ = item->previous_item;
3441 
3442  PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
3443  Isolate* previous_isolate = item->previous_isolate;
3444 
3445  delete item;
3446 
3447  // Reinit the current thread for the isolate it was running before this one.
3448  SetIsolateThreadLocals(previous_isolate, previous_thread_data);
3449 }
3450 
3451 
3452 void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
3453  deferred->next_ = deferred_handles_head_;
3454  if (deferred_handles_head_ != nullptr) {
3455  deferred_handles_head_->previous_ = deferred;
3456  }
3457  deferred_handles_head_ = deferred;
3458 }
3459 
3460 
3461 void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
3462 #ifdef DEBUG
3463  // In debug mode assert that the linked list is well-formed.
3464  DeferredHandles* deferred_iterator = deferred;
3465  while (deferred_iterator->previous_ != nullptr) {
3466  deferred_iterator = deferred_iterator->previous_;
3467  }
3468  DCHECK(deferred_handles_head_ == deferred_iterator);
3469 #endif
3470  if (deferred_handles_head_ == deferred) {
3471  deferred_handles_head_ = deferred_handles_head_->next_;
3472  }
3473  if (deferred->next_ != nullptr) {
3474  deferred->next_->previous_ = deferred->previous_;
3475  }
3476  if (deferred->previous_ != nullptr) {
3477  deferred->previous_->next_ = deferred->next_;
3478  }
3479 }
3480 
3481 void Isolate::DumpAndResetStats() {
3482  if (turbo_statistics() != nullptr) {
3483  DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
3484  StdoutStream os;
3485  if (FLAG_turbo_stats) {
3486  AsPrintableStatistics ps = {*turbo_statistics(), false};
3487  os << ps << std::endl;
3488  }
3489  if (FLAG_turbo_stats_nvp) {
3490  AsPrintableStatistics ps = {*turbo_statistics(), true};
3491  os << ps << std::endl;
3492  }
3493  delete turbo_statistics_;
3494  turbo_statistics_ = nullptr;
3495  }
3496  // TODO(7424): There is no public API for the {WasmEngine} yet. So for now we
3497  // just dump and reset the engines statistics together with the Isolate.
3498  if (FLAG_turbo_stats_wasm) {
3499  wasm_engine()->DumpAndResetTurboStatistics();
3500  }
3501  if (V8_UNLIKELY(FLAG_runtime_stats ==
3502  v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
3503  counters()->worker_thread_runtime_call_stats()->AddToMainTable(
3504  counters()->runtime_call_stats());
3505  counters()->runtime_call_stats()->Print();
3506  counters()->runtime_call_stats()->Reset();
3507  }
3508 }
3509 
3510 void Isolate::AbortConcurrentOptimization(BlockingBehavior behavior) {
3511  if (concurrent_recompilation_enabled()) {
3512  DisallowHeapAllocation no_recursive_gc;
3513  optimizing_compile_dispatcher()->Flush(behavior);
3514  }
3515 }
3516 
3517 CompilationStatistics* Isolate::GetTurboStatistics() {
3518  if (turbo_statistics() == nullptr)
3519  set_turbo_statistics(new CompilationStatistics());
3520  return turbo_statistics();
3521 }
3522 
3523 
3524 CodeTracer* Isolate::GetCodeTracer() {
3525  if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
3526  return code_tracer();
3527 }
3528 
3529 bool Isolate::use_optimizer() {
3530  return FLAG_opt && !serializer_enabled_ && CpuFeatures::SupportsOptimizer() &&
3531  !is_precise_count_code_coverage() && !is_block_count_code_coverage();
3532 }
3533 
3534 bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
3535  return NeedsSourcePositionsForProfiling() ||
3536  detailed_source_positions_for_profiling();
3537 }
3538 
3539 bool Isolate::NeedsSourcePositionsForProfiling() const {
3540  return FLAG_trace_deopt || FLAG_trace_turbo || FLAG_trace_turbo_graph ||
3541  FLAG_turbo_profiling || FLAG_perf_prof || is_profiling() ||
3542  debug_->is_active() || logger_->is_logging() || FLAG_trace_maps;
3543 }
3544 
3545 void Isolate::SetFeedbackVectorsForProfilingTools(Object* value) {
3546  DCHECK(value->IsUndefined(this) || value->IsArrayList());
3547  heap()->set_feedback_vectors_for_profiling_tools(value);
3548 }
3549 
3550 void Isolate::MaybeInitializeVectorListFromHeap() {
3551  if (!heap()->feedback_vectors_for_profiling_tools()->IsUndefined(this)) {
3552  // Already initialized, return early.
3553  DCHECK(heap()->feedback_vectors_for_profiling_tools()->IsArrayList());
3554  return;
3555  }
3556 
3557  // Collect existing feedback vectors.
3558  std::vector<Handle<FeedbackVector>> vectors;
3559 
3560  {
3561  HeapIterator heap_iterator(heap());
3562  while (HeapObject* current_obj = heap_iterator.next()) {
3563  if (!current_obj->IsFeedbackVector()) continue;
3564 
3565  FeedbackVector* vector = FeedbackVector::cast(current_obj);
3566  SharedFunctionInfo* shared = vector->shared_function_info();
3567 
3568  // No need to preserve the feedback vector for non-user-visible functions.
3569  if (!shared->IsSubjectToDebugging()) continue;
3570 
3571  vectors.emplace_back(vector, this);
3572  }
3573  }
3574 
3575  // Add collected feedback vectors to the root list lest we lose them to GC.
3576  Handle<ArrayList> list =
3577  ArrayList::New(this, static_cast<int>(vectors.size()));
3578  for (const auto& vector : vectors) list = ArrayList::Add(this, list, vector);
3579  SetFeedbackVectorsForProfilingTools(*list);
3580 }
3581 
3582 void Isolate::set_date_cache(DateCache* date_cache) {
3583  if (date_cache != date_cache_) {
3584  delete date_cache_;
3585  }
3586  date_cache_ = date_cache;
3587 }
3588 
3589 bool Isolate::IsArrayOrObjectOrStringPrototype(Object* object) {
3590  Object* context = heap()->native_contexts_list();
3591  while (!context->IsUndefined(this)) {
3592  Context current_context = Context::cast(context);
3593  if (current_context->initial_object_prototype() == object ||
3594  current_context->initial_array_prototype() == object ||
3595  current_context->initial_string_prototype() == object) {
3596  return true;
3597  }
3598  context = current_context->next_context_link();
3599  }
3600  return false;
3601 }
3602 
3603 bool Isolate::IsInAnyContext(Object* object, uint32_t index) {
3604  DisallowHeapAllocation no_gc;
3605  Object* context = heap()->native_contexts_list();
3606  while (!context->IsUndefined(this)) {
3607  Context current_context = Context::cast(context);
3608  if (current_context->get(index) == object) {
3609  return true;
3610  }
3611  context = current_context->next_context_link();
3612  }
3613  return false;
3614 }
3615 
3616 bool Isolate::IsNoElementsProtectorIntact(Context context) {
3617  PropertyCell* no_elements_cell = heap()->no_elements_protector();
3618  bool cell_reports_intact =
3619  no_elements_cell->value()->IsSmi() &&
3620  Smi::ToInt(no_elements_cell->value()) == kProtectorValid;
3621 
3622 #ifdef DEBUG
3623  Context native_context = context->native_context();
3624 
3625  Map root_array_map =
3626  native_context->GetInitialJSArrayMap(GetInitialFastElementsKind());
3627  JSObject* initial_array_proto = JSObject::cast(
3628  native_context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
3629  JSObject* initial_object_proto = JSObject::cast(
3630  native_context->get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
3631  JSObject* initial_string_proto = JSObject::cast(
3632  native_context->get(Context::INITIAL_STRING_PROTOTYPE_INDEX));
3633 
3634  if (root_array_map.is_null() || initial_array_proto == initial_object_proto) {
3635  // We are in the bootstrapping process, and the entire check sequence
3636  // shouldn't be performed.
3637  return cell_reports_intact;
3638  }
3639 
3640  // Check that the array prototype hasn't been altered WRT empty elements.
3641  if (root_array_map->prototype() != initial_array_proto) {
3642  DCHECK_EQ(false, cell_reports_intact);
3643  return cell_reports_intact;
3644  }
3645 
3646  FixedArrayBase elements = initial_array_proto->elements();
3647  ReadOnlyRoots roots(heap());
3648  if (elements != roots.empty_fixed_array() &&
3649  elements != roots.empty_slow_element_dictionary()) {
3650  DCHECK_EQ(false, cell_reports_intact);
3651  return cell_reports_intact;
3652  }
3653 
3654  // Check that the Object.prototype hasn't been altered WRT empty elements.
3655  elements = initial_object_proto->elements();
3656  if (elements != roots.empty_fixed_array() &&
3657  elements != roots.empty_slow_element_dictionary()) {
3658  DCHECK_EQ(false, cell_reports_intact);
3659  return cell_reports_intact;
3660  }
3661 
3662  // Check that the Array.prototype has the Object.prototype as its
3663  // [[Prototype]] and that the Object.prototype has a null [[Prototype]].
3664  PrototypeIterator iter(this, initial_array_proto);
3665  if (iter.IsAtEnd() || iter.GetCurrent() != initial_object_proto) {
3666  DCHECK_EQ(false, cell_reports_intact);
3667  DCHECK(!has_pending_exception());
3668  return cell_reports_intact;
3669  }
3670  iter.Advance();
3671  if (!iter.IsAtEnd()) {
3672  DCHECK_EQ(false, cell_reports_intact);
3673  DCHECK(!has_pending_exception());
3674  return cell_reports_intact;
3675  }
3676  DCHECK(!has_pending_exception());
3677 
3678  // Check that the String.prototype hasn't been altered WRT empty elements.
3679  elements = initial_string_proto->elements();
3680  if (elements != roots.empty_fixed_array() &&
3681  elements != roots.empty_slow_element_dictionary()) {
3682  DCHECK_EQ(false, cell_reports_intact);
3683  return cell_reports_intact;
3684  }
3685 
3686  // Check that the String.prototype has the Object.prototype
3687  // as its [[Prototype]] still.
3688  if (initial_string_proto->map()->prototype() != initial_object_proto) {
3689  DCHECK_EQ(false, cell_reports_intact);
3690  return cell_reports_intact;
3691  }
3692 #endif
3693 
3694  return cell_reports_intact;
3695 }
3696 
3697 bool Isolate::IsNoElementsProtectorIntact() {
3698  return Isolate::IsNoElementsProtectorIntact(context());
3699 }
3700 
3701 bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
3702  Cell* is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
3703  bool is_is_concat_spreadable_set =
3704  Smi::ToInt(is_concat_spreadable_cell->value()) == kProtectorInvalid;
3705 #ifdef DEBUG
3706  Map root_array_map =
3707  raw_native_context()->GetInitialJSArrayMap(GetInitialFastElementsKind());
3708  if (root_array_map.is_null()) {
3709  // Ignore the value of is_concat_spreadable during bootstrap.
3710  return !is_is_concat_spreadable_set;
3711  }
3712  Handle<Object> array_prototype(array_function()->prototype(), this);
3713  Handle<Symbol> key = factory()->is_concat_spreadable_symbol();
3714  Handle<Object> value;
3715  LookupIterator it(this, array_prototype, key);
3716  if (it.IsFound() && !JSReceiver::GetDataProperty(&it)->IsUndefined(this)) {
3717  // TODO(cbruni): Currently we do not revert if we unset the
3718  // @@isConcatSpreadable property on Array.prototype or Object.prototype
3719  // hence the reverse implication doesn't hold.
3720  DCHECK(is_is_concat_spreadable_set);
3721  return false;
3722  }
3723 #endif // DEBUG
3724 
3725  return !is_is_concat_spreadable_set;
3726 }
3727 
3728 bool Isolate::IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver) {
3729  if (!IsIsConcatSpreadableLookupChainIntact()) return false;
3730  return !receiver->HasProxyInPrototype(this);
3731 }
3732 
3733 bool Isolate::IsPromiseHookProtectorIntact() {
3734  PropertyCell* promise_hook_cell = heap()->promise_hook_protector();
3735  bool is_promise_hook_protector_intact =
3736  Smi::ToInt(promise_hook_cell->value()) == kProtectorValid;
3737  DCHECK_IMPLIES(is_promise_hook_protector_intact,
3738  !promise_hook_or_async_event_delegate_);
3739  DCHECK_IMPLIES(is_promise_hook_protector_intact,
3740  !promise_hook_or_debug_is_active_or_async_event_delegate_);
3741  return is_promise_hook_protector_intact;
3742 }
3743 
3744 bool Isolate::IsPromiseResolveLookupChainIntact() {
3745  Cell* promise_resolve_cell = heap()->promise_resolve_protector();
3746  bool is_promise_resolve_protector_intact =
3747  Smi::ToInt(promise_resolve_cell->value()) == kProtectorValid;
3748  return is_promise_resolve_protector_intact;
3749 }
3750 
3751 bool Isolate::IsPromiseThenLookupChainIntact() {
3752  PropertyCell* promise_then_cell = heap()->promise_then_protector();
3753  bool is_promise_then_protector_intact =
3754  Smi::ToInt(promise_then_cell->value()) == kProtectorValid;
3755  return is_promise_then_protector_intact;
3756 }
3757 
3758 bool Isolate::IsPromiseThenLookupChainIntact(Handle<JSReceiver> receiver) {
3759  DisallowHeapAllocation no_gc;
3760  if (!receiver->IsJSPromise()) return false;
3761  if (!IsInAnyContext(receiver->map()->prototype(),
3762  Context::PROMISE_PROTOTYPE_INDEX)) {
3763  return false;
3764  }
3765  return IsPromiseThenLookupChainIntact();
3766 }
3767 
3768 void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
3769  DisallowHeapAllocation no_gc;
3770  if (!object->map()->is_prototype_map()) return;
3771  if (!IsNoElementsProtectorIntact()) return;
3772  if (!IsArrayOrObjectOrStringPrototype(*object)) return;
3773  PropertyCell::SetValueWithInvalidation(
3774  this, factory()->no_elements_protector(),
3775  handle(Smi::FromInt(kProtectorInvalid), this));
3776 }
3777 
3778 void Isolate::InvalidateIsConcatSpreadableProtector() {
3779  DCHECK(factory()->is_concat_spreadable_protector()->value()->IsSmi());
3780  DCHECK(IsIsConcatSpreadableLookupChainIntact());
3781  factory()->is_concat_spreadable_protector()->set_value(
3782  Smi::FromInt(kProtectorInvalid));
3783  DCHECK(!IsIsConcatSpreadableLookupChainIntact());
3784 }
3785 
3786 void Isolate::InvalidateArrayConstructorProtector() {
3787  DCHECK(factory()->array_constructor_protector()->value()->IsSmi());
3788  DCHECK(IsArrayConstructorIntact());
3789  factory()->array_constructor_protector()->set_value(
3790  Smi::FromInt(kProtectorInvalid));
3791  DCHECK(!IsArrayConstructorIntact());
3792 }
3793 
3794 void Isolate::InvalidateArraySpeciesProtector() {
3795  DCHECK(factory()->array_species_protector()->value()->IsSmi());
3796  DCHECK(IsArraySpeciesLookupChainIntact());
3797  PropertyCell::SetValueWithInvalidation(
3798  this, factory()->array_species_protector(),
3799  handle(Smi::FromInt(kProtectorInvalid), this));
3800  DCHECK(!IsArraySpeciesLookupChainIntact());
3801 }
3802 
3803 void Isolate::InvalidateTypedArraySpeciesProtector() {
3804  DCHECK(factory()->typed_array_species_protector()->value()->IsSmi());
3805  DCHECK(IsTypedArraySpeciesLookupChainIntact());
3806  PropertyCell::SetValueWithInvalidation(
3807  this, factory()->typed_array_species_protector(),
3808  handle(Smi::FromInt(kProtectorInvalid), this));
3809  DCHECK(!IsTypedArraySpeciesLookupChainIntact());
3810 }
3811 
3812 void Isolate::InvalidateRegExpSpeciesProtector() {
3813  DCHECK(factory()->regexp_species_protector()->value()->IsSmi());
3814  DCHECK(IsRegExpSpeciesLookupChainIntact());
3815  PropertyCell::SetValueWithInvalidation(
3816  this, factory()->regexp_species_protector(),
3817  handle(Smi::FromInt(kProtectorInvalid), this));
3818  DCHECK(!IsRegExpSpeciesLookupChainIntact());
3819 }
3820 
3821 void Isolate::InvalidatePromiseSpeciesProtector() {
3822  DCHECK(factory()->promise_species_protector()->value()->IsSmi());
3823  DCHECK(IsPromiseSpeciesLookupChainIntact());
3824  PropertyCell::SetValueWithInvalidation(
3825  this, factory()->promise_species_protector(),
3826  handle(Smi::FromInt(kProtectorInvalid), this));
3827  DCHECK(!IsPromiseSpeciesLookupChainIntact());
3828 }
3829 
3830 void Isolate::InvalidateStringLengthOverflowProtector() {
3831  DCHECK(factory()->string_length_protector()->value()->IsSmi());
3832  DCHECK(IsStringLengthOverflowIntact());
3833  factory()->string_length_protector()->set_value(
3834  Smi::FromInt(kProtectorInvalid));
3835  DCHECK(!IsStringLengthOverflowIntact());
3836 }
3837 
3838 void Isolate::InvalidateArrayIteratorProtector() {
3839  DCHECK(factory()->array_iterator_protector()->value()->IsSmi());
3840  DCHECK(IsArrayIteratorLookupChainIntact());
3841  PropertyCell::SetValueWithInvalidation(
3842  this, factory()->array_iterator_protector(),
3843  handle(Smi::FromInt(kProtectorInvalid), this));
3844  DCHECK(!IsArrayIteratorLookupChainIntact());
3845 }
3846 
3847 void Isolate::InvalidateMapIteratorProtector() {
3848  DCHECK(factory()->map_iterator_protector()->value()->IsSmi());
3849  DCHECK(IsMapIteratorLookupChainIntact());
3850  PropertyCell::SetValueWithInvalidation(
3851  this, factory()->map_iterator_protector(),
3852  handle(Smi::FromInt(kProtectorInvalid), this));
3853  DCHECK(!IsMapIteratorLookupChainIntact());
3854 }
3855 
3856 void Isolate::InvalidateSetIteratorProtector() {
3857  DCHECK(factory()->set_iterator_protector()->value()->IsSmi());
3858  DCHECK(IsSetIteratorLookupChainIntact());
3859  PropertyCell::SetValueWithInvalidation(
3860  this, factory()->set_iterator_protector(),
3861  handle(Smi::FromInt(kProtectorInvalid), this));
3862  DCHECK(!IsSetIteratorLookupChainIntact());
3863 }
3864 
3865 void Isolate::InvalidateStringIteratorProtector() {
3866  DCHECK(factory()->string_iterator_protector()->value()->IsSmi());
3867  DCHECK(IsStringIteratorLookupChainIntact());
3868  PropertyCell::SetValueWithInvalidation(
3869  this, factory()->string_iterator_protector(),
3870  handle(Smi::FromInt(kProtectorInvalid), this));
3871  DCHECK(!IsStringIteratorLookupChainIntact());
3872 }
3873 
3874 void Isolate::InvalidateArrayBufferNeuteringProtector() {
3875  DCHECK(factory()->array_buffer_neutering_protector()->value()->IsSmi());
3876  DCHECK(IsArrayBufferNeuteringIntact());
3877  PropertyCell::SetValueWithInvalidation(
3878  this, factory()->array_buffer_neutering_protector(),
3879  handle(Smi::FromInt(kProtectorInvalid), this));
3880  DCHECK(!IsArrayBufferNeuteringIntact());
3881 }
3882 
3883 void Isolate::InvalidatePromiseHookProtector() {
3884  DCHECK(factory()->promise_hook_protector()->value()->IsSmi());
3885  DCHECK(IsPromiseHookProtectorIntact());
3886  PropertyCell::SetValueWithInvalidation(
3887  this, factory()->promise_hook_protector(),
3888  handle(Smi::FromInt(kProtectorInvalid), this));
3889  DCHECK(!IsPromiseHookProtectorIntact());
3890 }
3891 
3892 void Isolate::InvalidatePromiseResolveProtector() {
3893  DCHECK(factory()->promise_resolve_protector()->value()->IsSmi());
3894  DCHECK(IsPromiseResolveLookupChainIntact());
3895  factory()->promise_resolve_protector()->set_value(
3896  Smi::FromInt(kProtectorInvalid));
3897  DCHECK(!IsPromiseResolveLookupChainIntact());
3898 }
3899 
3900 void Isolate::InvalidatePromiseThenProtector() {
3901  DCHECK(factory()->promise_then_protector()->value()->IsSmi());
3902  DCHECK(IsPromiseThenLookupChainIntact());
3903  PropertyCell::SetValueWithInvalidation(
3904  this, factory()->promise_then_protector(),
3905  handle(Smi::FromInt(kProtectorInvalid), this));
3906  DCHECK(!IsPromiseThenLookupChainIntact());
3907 }
3908 
3909 bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
3910  DisallowHeapAllocation no_gc;
3911  return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
3912 }
3913 
3914 static base::RandomNumberGenerator* ensure_rng_exists(
3915  base::RandomNumberGenerator** rng, int seed) {
3916  if (*rng == nullptr) {
3917  if (seed != 0) {
3918  *rng = new base::RandomNumberGenerator(seed);
3919  } else {
3920  *rng = new base::RandomNumberGenerator();
3921  }
3922  }
3923  return *rng;
3924 }
3925 
3926 base::RandomNumberGenerator* Isolate::random_number_generator() {
3927  // TODO(bmeurer) Initialized lazily because it depends on flags; can
3928  // be fixed once the default isolate cleanup is done.
3929  return ensure_rng_exists(&random_number_generator_, FLAG_random_seed);
3930 }
3931 
3932 base::RandomNumberGenerator* Isolate::fuzzer_rng() {
3933  if (fuzzer_rng_ == nullptr) {
3934  int64_t seed = FLAG_fuzzer_random_seed;
3935  if (seed == 0) {
3936  seed = random_number_generator()->initial_seed();
3937  }
3938 
3939  fuzzer_rng_ = new base::RandomNumberGenerator(seed);
3940  }
3941 
3942  return fuzzer_rng_;
3943 }
3944 
3945 int Isolate::GenerateIdentityHash(uint32_t mask) {
3946  int hash;
3947  int attempts = 0;
3948  do {
3949  hash = random_number_generator()->NextInt() & mask;
3950  } while (hash == 0 && attempts++ < 30);
3951  return hash != 0 ? hash : 1;
3952 }
3953 
3954 Code Isolate::FindCodeObject(Address a) {
3955  return heap()->GcSafeFindCodeForInnerPointer(a);
3956 }
3957 
3958 
3959 #ifdef DEBUG
3960 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
3961 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
3962 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
3963 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
3964 #undef ISOLATE_FIELD_OFFSET
3965 #endif
3966 
3967 Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
3968  Handle<String> name, bool private_symbol) {
3969  Handle<String> key = factory()->InternalizeString(name);
3970  Handle<NameDictionary> dictionary =
3971  Handle<NameDictionary>::cast(root_handle(dictionary_index));
3972  int entry = dictionary->FindEntry(this, key);
3973  Handle<Symbol> symbol;
3974  if (entry == NameDictionary::kNotFound) {
3975  symbol =
3976  private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
3977  symbol->set_name(*key);
3978  dictionary = NameDictionary::Add(this, dictionary, key, symbol,
3979  PropertyDetails::Empty(), &entry);
3980  switch (dictionary_index) {
3981  case RootIndex::kPublicSymbolTable:
3982  symbol->set_is_public(true);
3983  heap()->set_public_symbol_table(*dictionary);
3984  break;
3985  case RootIndex::kApiSymbolTable:
3986  heap()->set_api_symbol_table(*dictionary);
3987  break;
3988  case RootIndex::kApiPrivateSymbolTable:
3989  heap()->set_api_private_symbol_table(*dictionary);
3990  break;
3991  default:
3992  UNREACHABLE();
3993  }
3994  } else {
3995  symbol = Handle<Symbol>(Symbol::cast(dictionary->ValueAt(entry)), this);
3996  }
3997  return symbol;
3998 }
3999 
4000 void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
4001  auto pos = std::find(before_call_entered_callbacks_.begin(),
4002  before_call_entered_callbacks_.end(), callback);
4003  if (pos != before_call_entered_callbacks_.end()) return;
4004  before_call_entered_callbacks_.push_back(callback);
4005 }
4006 
4007 void Isolate::RemoveBeforeCallEnteredCallback(
4008  BeforeCallEnteredCallback callback) {
4009  auto pos = std::find(before_call_entered_callbacks_.begin(),
4010  before_call_entered_callbacks_.end(), callback);
4011  if (pos == before_call_entered_callbacks_.end()) return;
4012  before_call_entered_callbacks_.erase(pos);
4013 }
4014 
4015 void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
4016  auto pos = std::find(call_completed_callbacks_.begin(),
4017  call_completed_callbacks_.end(), callback);
4018  if (pos != call_completed_callbacks_.end()) return;
4019  call_completed_callbacks_.push_back(callback);
4020 }
4021 
4022 void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
4023  auto pos = std::find(call_completed_callbacks_.begin(),
4024  call_completed_callbacks_.end(), callback);
4025  if (pos == call_completed_callbacks_.end()) return;
4026  call_completed_callbacks_.erase(pos);
4027 }
4028 
4029 void Isolate::AddMicrotasksCompletedCallback(
4030  MicrotasksCompletedCallback callback) {
4031  auto pos = std::find(microtasks_completed_callbacks_.begin(),
4032  microtasks_completed_callbacks_.end(), callback);
4033  if (pos != microtasks_completed_callbacks_.end()) return;
4034  microtasks_completed_callbacks_.push_back(callback);
4035 }
4036 
4037 void Isolate::RemoveMicrotasksCompletedCallback(
4038  MicrotasksCompletedCallback callback) {
4039  auto pos = std::find(microtasks_completed_callbacks_.begin(),
4040  microtasks_completed_callbacks_.end(), callback);
4041  if (pos == microtasks_completed_callbacks_.end()) return;
4042  microtasks_completed_callbacks_.erase(pos);
4043 }
4044 
4045 void Isolate::FireCallCompletedCallback() {
4046  if (!handle_scope_implementer()->CallDepthIsZero()) return;
4047 
4048  bool run_microtasks =
4049  default_microtask_queue()->size() &&
4050  !handle_scope_implementer()->HasMicrotasksSuppressions() &&
4051  handle_scope_implementer()->microtasks_policy() ==
4052  v8::MicrotasksPolicy::kAuto;
4053 
4054  if (run_microtasks) {
4055  RunMicrotasks();
4056  } else {
4057  // TODO(marja): (spec) The discussion about when to clear the KeepDuringJob
4058  // set is still open (whether to clear it after every microtask or once
4059  // during a microtask checkpoint). See also
4060  // https://github.com/tc39/proposal-weakrefs/issues/39 .
4061  heap()->ClearKeepDuringJobSet();
4062  }
4063 
4064  if (call_completed_callbacks_.empty()) return;
4065  // Fire callbacks. Increase call depth to prevent recursive callbacks.
4066  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
4068  std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
4069  for (auto& callback : callbacks) {
4070  callback(reinterpret_cast<v8::Isolate*>(this));
4071  }
4072 }
4073 
4074 void Isolate::PromiseHookStateUpdated() {
4075  bool promise_hook_or_async_event_delegate =
4076  promise_hook_ || async_event_delegate_;
4077  bool promise_hook_or_debug_is_active_or_async_event_delegate =
4078  promise_hook_or_async_event_delegate || debug()->is_active();
4079  if (promise_hook_or_debug_is_active_or_async_event_delegate &&
4080  IsPromiseHookProtectorIntact()) {
4081  HandleScope scope(this);
4082  InvalidatePromiseHookProtector();
4083  }
4084  promise_hook_or_async_event_delegate_ = promise_hook_or_async_event_delegate;
4085  promise_hook_or_debug_is_active_or_async_event_delegate_ =
4086  promise_hook_or_debug_is_active_or_async_event_delegate;
4087 }
4088 
4089 namespace {
4090 
4091 MaybeHandle<JSPromise> NewRejectedPromise(Isolate* isolate,
4092  v8::Local<v8::Context> api_context,
4093  Handle<Object> exception) {
4095  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4096  isolate, resolver, v8::Promise::Resolver::New(api_context),
4097  MaybeHandle<JSPromise>());
4098 
4099  RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4100  isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
4101  MaybeHandle<JSPromise>());
4102 
4103  v8::Local<v8::Promise> promise = resolver->GetPromise();
4104  return v8::Utils::OpenHandle(*promise);
4105 }
4106 
4107 } // namespace
4108 
4109 MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
4110  Handle<Script> referrer, Handle<Object> specifier) {
4111  v8::Local<v8::Context> api_context =
4112  v8::Utils::ToLocal(Handle<Context>(native_context()));
4113 
4114  if (host_import_module_dynamically_callback_ == nullptr) {
4115  Handle<Object> exception =
4116  factory()->NewError(error_function(), MessageTemplate::kUnsupported);
4117  return NewRejectedPromise(this, api_context, exception);
4118  }
4119 
4120  Handle<String> specifier_str;
4121  MaybeHandle<String> maybe_specifier = Object::ToString(this, specifier);
4122  if (!maybe_specifier.ToHandle(&specifier_str)) {
4123  Handle<Object> exception(pending_exception(), this);
4124  clear_pending_exception();
4125 
4126  return NewRejectedPromise(this, api_context, exception);
4127  }
4128  DCHECK(!has_pending_exception());
4129 
4130  v8::Local<v8::Promise> promise;
4131  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4132  this, promise,
4133  host_import_module_dynamically_callback_(
4134  api_context, v8::Utils::ScriptOrModuleToLocal(referrer),
4135  v8::Utils::ToLocal(specifier_str)),
4136  MaybeHandle<JSPromise>());
4137  return v8::Utils::OpenHandle(*promise);
4138 }
4139 
4140 void Isolate::SetHostImportModuleDynamicallyCallback(
4141  HostImportModuleDynamicallyCallback callback) {
4142  host_import_module_dynamically_callback_ = callback;
4143 }
4144 
4145 Handle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
4146  Handle<Module> module) {
4147  Handle<Object> host_meta(module->import_meta(), this);
4148  if (host_meta->IsTheHole(this)) {
4149  host_meta = factory()->NewJSObjectWithNullProto();
4150  if (host_initialize_import_meta_object_callback_ != nullptr) {
4151  v8::Local<v8::Context> api_context =
4152  v8::Utils::ToLocal(Handle<Context>(native_context()));
4153  host_initialize_import_meta_object_callback_(
4154  api_context, Utils::ToLocal(module),
4155  v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(host_meta)));
4156  }
4157  module->set_import_meta(*host_meta);
4158  }
4159  return Handle<JSObject>::cast(host_meta);
4160 }
4161 
4162 void Isolate::SetHostInitializeImportMetaObjectCallback(
4163  HostInitializeImportMetaObjectCallback callback) {
4164  host_initialize_import_meta_object_callback_ = callback;
4165 }
4166 
4167 MaybeHandle<Object> Isolate::RunPrepareStackTraceCallback(
4168  Handle<Context> context, Handle<JSObject> error, Handle<JSArray> sites) {
4169  v8::Local<v8::Context> api_context = Utils::ToLocal(context);
4170 
4171  v8::Local<v8::Value> stack;
4172  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
4173  this, stack,
4174  prepare_stack_trace_callback_(api_context, Utils::ToLocal(error),
4175  Utils::ToLocal(sites)),
4176  MaybeHandle<Object>());
4177  return Utils::OpenHandle(*stack);
4178 }
4179 
4180 void Isolate::SetPrepareStackTraceCallback(PrepareStackTraceCallback callback) {
4181  prepare_stack_trace_callback_ = callback;
4182 }
4183 
4184 bool Isolate::HasPrepareStackTraceCallback() const {
4185  return prepare_stack_trace_callback_ != nullptr;
4186 }
4187 
4188 void Isolate::SetAtomicsWaitCallback(v8::Isolate::AtomicsWaitCallback callback,
4189  void* data) {
4190  atomics_wait_callback_ = callback;
4191  atomics_wait_callback_data_ = data;
4192 }
4193 
4194 void Isolate::RunAtomicsWaitCallback(v8::Isolate::AtomicsWaitEvent event,
4195  Handle<JSArrayBuffer> array_buffer,
4196  size_t offset_in_bytes, int32_t value,
4197  double timeout_in_ms,
4198  AtomicsWaitWakeHandle* stop_handle) {
4199  DCHECK(array_buffer->is_shared());
4200  if (atomics_wait_callback_ == nullptr) return;
4201  HandleScope handle_scope(this);
4202  atomics_wait_callback_(
4203  event, v8::Utils::ToLocalShared(array_buffer), offset_in_bytes, value,
4204  timeout_in_ms,
4205  reinterpret_cast<v8::Isolate::AtomicsWaitWakeHandle*>(stop_handle),
4206  atomics_wait_callback_data_);
4207 }
4208 
4209 void Isolate::SetPromiseHook(PromiseHook hook) {
4210  promise_hook_ = hook;
4211  PromiseHookStateUpdated();
4212 }
4213 
4214 void Isolate::RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
4215  Handle<Object> parent) {
4216  RunPromiseHookForAsyncEventDelegate(type, promise);
4217  if (promise_hook_ == nullptr) return;
4218  promise_hook_(type, v8::Utils::PromiseToLocal(promise),
4219  v8::Utils::ToLocal(parent));
4220 }
4221 
4222 void Isolate::RunPromiseHookForAsyncEventDelegate(PromiseHookType type,
4223  Handle<JSPromise> promise) {
4224  if (!async_event_delegate_) return;
4225  if (type == PromiseHookType::kResolve) return;
4226 
4227  if (type == PromiseHookType::kBefore) {
4228  if (!promise->async_task_id()) return;
4229  async_event_delegate_->AsyncEventOccurred(debug::kDebugWillHandle,
4230  promise->async_task_id(), false);
4231  } else if (type == PromiseHookType::kAfter) {
4232  if (!promise->async_task_id()) return;
4233  async_event_delegate_->AsyncEventOccurred(debug::kDebugDidHandle,
4234  promise->async_task_id(), false);
4235  } else {
4236  DCHECK(type == PromiseHookType::kInit);
4237  debug::DebugAsyncActionType type = debug::kDebugPromiseThen;
4238  bool last_frame_was_promise_builtin = false;
4239  JavaScriptFrameIterator it(this);
4240  while (!it.done()) {
4241  std::vector<Handle<SharedFunctionInfo>> infos;
4242  it.frame()->GetFunctions(&infos);
4243  for (size_t i = 1; i <= infos.size(); ++i) {
4244  Handle<SharedFunctionInfo> info = infos[infos.size() - i];
4245  if (info->IsUserJavaScript()) {
4246  // We should not report PromiseThen and PromiseCatch which is called
4247  // indirectly, e.g. Promise.all calls Promise.then internally.
4248  if (last_frame_was_promise_builtin) {
4249  if (!promise->async_task_id()) {
4250  promise->set_async_task_id(++async_task_count_);
4251  }
4252  async_event_delegate_->AsyncEventOccurred(
4253  type, promise->async_task_id(), debug()->IsBlackboxed(info));
4254  }
4255  return;
4256  }
4257  last_frame_was_promise_builtin = false;
4258  if (info->HasBuiltinId()) {
4259  if (info->builtin_id() == Builtins::kPromisePrototypeThen) {
4260  type = debug::kDebugPromiseThen;
4261  last_frame_was_promise_builtin = true;
4262  } else if (info->builtin_id() == Builtins::kPromisePrototypeCatch) {
4263  type = debug::kDebugPromiseCatch;
4264  last_frame_was_promise_builtin = true;
4265  } else if (info->builtin_id() == Builtins::kPromisePrototypeFinally) {
4266  type = debug::kDebugPromiseFinally;
4267  last_frame_was_promise_builtin = true;
4268  }
4269  }
4270  }
4271  it.Advance();
4272  }
4273  }
4274 }
4275 
4276 void Isolate::OnAsyncFunctionStateChanged(Handle<JSPromise> promise,
4277  debug::DebugAsyncActionType event) {
4278  if (!async_event_delegate_) return;
4279  if (!promise->async_task_id()) {
4280  promise->set_async_task_id(++async_task_count_);
4281  }
4282  async_event_delegate_->AsyncEventOccurred(event, promise->async_task_id(),
4283  false);
4284 }
4285 
4286 void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
4287  promise_reject_callback_ = callback;
4288 }
4289 
4290 void Isolate::ReportPromiseReject(Handle<JSPromise> promise,
4291  Handle<Object> value,
4292  v8::PromiseRejectEvent event) {
4293  if (promise_reject_callback_ == nullptr) return;
4294  Handle<FixedArray> stack_trace;
4295  if (event != v8::kPromiseHandlerAddedAfterReject && value->IsJSObject()) {
4296  stack_trace = GetDetailedStackTrace(Handle<JSObject>::cast(value));
4297  }
4298  promise_reject_callback_(v8::PromiseRejectMessage(
4299  v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value),
4300  v8::Utils::StackTraceToLocal(stack_trace)));
4301 }
4302 
4303 void Isolate::EnqueueMicrotask(Handle<Microtask> microtask) {
4304  default_microtask_queue()->EnqueueMicrotask(*microtask);
4305 }
4306 
4307 
4308 void Isolate::RunMicrotasks() {
4309  // TODO(tzik): Move the suppression, |is_running_microtask_|, and the
4310  // completion callbacks into MicrotaskQueue.
4311 
4312  // Increase call depth to prevent recursive callbacks.
4314  reinterpret_cast<v8::Isolate*>(this));
4315  if (default_microtask_queue()->size()) {
4316  is_running_microtasks_ = true;
4317  TRACE_EVENT0("v8.execute", "RunMicrotasks");
4318  TRACE_EVENT_CALL_STATS_SCOPED(this, "v8", "V8.RunMicrotasks");
4319 
4320  // If execution is terminating, bail out, clean up, and propagate to
4321  // TryCatch scope.
4322  if (default_microtask_queue()->RunMicrotasks(this) < 0) {
4323  handle_scope_implementer()->LeaveMicrotaskContext();
4324  SetTerminationOnExternalTryCatch();
4325  }
4326  DCHECK_EQ(0, default_microtask_queue()->size());
4327  is_running_microtasks_ = false;
4328  }
4329  // TODO(marja): (spec) The discussion about when to clear the KeepDuringJob
4330  // set is still open (whether to clear it after every microtask or once
4331  // during a microtask checkpoint). See also
4332  // https://github.com/tc39/proposal-weakrefs/issues/39 .
4333  heap()->ClearKeepDuringJobSet();
4334 
4335  FireMicrotasksCompletedCallback();
4336 }
4337 
4338 void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
4339  DCHECK(!use_counter_callback_);
4340  use_counter_callback_ = callback;
4341 }
4342 
4343 
4344 void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
4345  // The counter callback may cause the embedder to call into V8, which is not
4346  // generally possible during GC.
4347  if (heap_.gc_state() == Heap::NOT_IN_GC) {
4348  if (use_counter_callback_) {
4349  HandleScope handle_scope(this);
4350  use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
4351  }
4352  } else {
4353  heap_.IncrementDeferredCount(feature);
4354  }
4355 }
4356 
4357 // static
4358 std::string Isolate::GetTurboCfgFileName(Isolate* isolate) {
4359  if (FLAG_trace_turbo_cfg_file == nullptr) {
4360  std::ostringstream os;
4361  os << "turbo-" << base::OS::GetCurrentProcessId() << "-";
4362  if (isolate != nullptr) {
4363  os << isolate->id();
4364  } else {
4365  os << "any";
4366  }
4367  os << ".cfg";
4368  return os.str();
4369  } else {
4370  return FLAG_trace_turbo_cfg_file;
4371  }
4372 }
4373 
4374 // Heap::detached_contexts tracks detached contexts as pairs
4375 // (number of GC since the context was detached, the context).
4376 void Isolate::AddDetachedContext(Handle<Context> context) {
4377  HandleScope scope(this);
4378  Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4379  detached_contexts = WeakArrayList::AddToEnd(
4380  this, detached_contexts, MaybeObjectHandle(Smi::kZero, this));
4381  detached_contexts = WeakArrayList::AddToEnd(this, detached_contexts,
4382  MaybeObjectHandle::Weak(context));
4383  heap()->set_detached_contexts(*detached_contexts);
4384 }
4385 
4386 
4387 void Isolate::CheckDetachedContextsAfterGC() {
4388  HandleScope scope(this);
4389  Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4390  int length = detached_contexts->length();
4391  if (length == 0) return;
4392  int new_length = 0;
4393  for (int i = 0; i < length; i += 2) {
4394  int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
4395  MaybeObject context = detached_contexts->Get(i + 1);
4396  DCHECK(context->IsWeakOrCleared());
4397  if (!context->IsCleared()) {
4398  detached_contexts->Set(
4399  new_length, MaybeObject::FromSmi(Smi::FromInt(mark_sweeps + 1)));
4400  detached_contexts->Set(new_length + 1, context);
4401  new_length += 2;
4402  }
4403  }
4404  detached_contexts->set_length(new_length);
4405  while (new_length < length) {
4406  detached_contexts->Set(new_length, MaybeObject::FromSmi(Smi::zero()));
4407  ++new_length;
4408  }
4409 
4410  if (FLAG_trace_detached_contexts) {
4411  PrintF("%d detached contexts are collected out of %d\n",
4412  length - new_length, length);
4413  for (int i = 0; i < new_length; i += 2) {
4414  int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
4415  MaybeObject context = detached_contexts->Get(i + 1);
4416  DCHECK(context->IsWeakOrCleared());
4417  if (mark_sweeps > 3) {
4418  PrintF("detached context %p\n survived %d GCs (leak?)\n",
4419  reinterpret_cast<void*>(context.ptr()), mark_sweeps);
4420  }
4421  }
4422  }
4423 }
4424 
4425 double Isolate::LoadStartTimeMs() {
4426  base::MutexGuard guard(&rail_mutex_);
4427  return load_start_time_ms_;
4428 }
4429 
4430 void Isolate::SetRAILMode(RAILMode rail_mode) {
4431  RAILMode old_rail_mode = rail_mode_.Value();
4432  if (old_rail_mode != PERFORMANCE_LOAD && rail_mode == PERFORMANCE_LOAD) {
4433  base::MutexGuard guard(&rail_mutex_);
4434  load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
4435  }
4436  rail_mode_.SetValue(rail_mode);
4437  if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
4438  heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
4439  heap());
4440  }
4441  if (FLAG_trace_rail) {
4442  PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
4443  }
4444 }
4445 
4446 void Isolate::IsolateInBackgroundNotification() {
4447  is_isolate_in_background_ = true;
4448  heap()->ActivateMemoryReducerIfNeeded();
4449 }
4450 
4451 void Isolate::IsolateInForegroundNotification() {
4452  is_isolate_in_background_ = false;
4453 }
4454 
4455 void Isolate::PrintWithTimestamp(const char* format, ...) {
4456  base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
4457  static_cast<void*>(this), time_millis_since_init());
4458  va_list arguments;
4459  va_start(arguments, format);
4460  base::OS::VPrint(format, arguments);
4461  va_end(arguments);
4462 }
4463 
4464 void Isolate::SetIdle(bool is_idle) {
4465  if (!is_profiling()) return;
4466  StateTag state = current_vm_state();
4467  DCHECK(state == EXTERNAL || state == IDLE);
4468  if (js_entry_sp() != kNullAddress) return;
4469  if (is_idle) {
4470  set_current_vm_state(IDLE);
4471  } else if (state == IDLE) {
4472  set_current_vm_state(EXTERNAL);
4473  }
4474 }
4475 
4476 #ifdef V8_INTL_SUPPORT
4477 icu::UObject* Isolate::get_cached_icu_object(ICUObjectCacheType cache_type) {
4478  return icu_object_cache_[cache_type].get();
4479 }
4480 
4481 void Isolate::set_icu_object_in_cache(ICUObjectCacheType cache_type,
4482  std::shared_ptr<icu::UObject> obj) {
4483  icu_object_cache_[cache_type] = obj;
4484 }
4485 
4486 void Isolate::clear_cached_icu_object(ICUObjectCacheType cache_type) {
4487  icu_object_cache_.erase(cache_type);
4488 }
4489 #endif // V8_INTL_SUPPORT
4490 
4491 bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
4492  StackGuard* stack_guard = isolate_->stack_guard();
4493 #ifdef USE_SIMULATOR
4494  // The simulator uses a separate JS stack.
4495  Address jssp_address = Simulator::current(isolate_)->get_sp();
4496  uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
4497  if (jssp - gap < stack_guard->real_jslimit()) return true;
4498 #endif // USE_SIMULATOR
4499  return GetCurrentStackPosition() - gap < stack_guard->real_climit();
4500 }
4501 
4502 SaveContext::SaveContext(Isolate* isolate)
4503  : isolate_(isolate), prev_(isolate->save_context()) {
4504  if (!isolate->context().is_null()) {
4505  context_ = Handle<Context>(isolate->context(), isolate);
4506  }
4507  isolate->set_save_context(this);
4508 
4509  c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
4510 }
4511 
4512 SaveContext::~SaveContext() {
4513  isolate_->set_context(context_.is_null() ? Context() : *context_);
4514  isolate_->set_save_context(prev_);
4515 }
4516 
4517 bool SaveContext::IsBelowFrame(StandardFrame* frame) {
4518  return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
4519 }
4520 
4521 #ifdef DEBUG
4522 AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
4523  : isolate_(isolate), context_(isolate->context(), isolate) {}
4524 #endif // DEBUG
4525 
4526 bool InterruptsScope::Intercept(StackGuard::InterruptFlag flag) {
4527  InterruptsScope* last_postpone_scope = nullptr;
4528  for (InterruptsScope* current = this; current; current = current->prev_) {
4529  // We only consider scopes related to passed flag.
4530  if (!(current->intercept_mask_ & flag)) continue;
4531  if (current->mode_ == kRunInterrupts) {
4532  // If innermost scope is kRunInterrupts scope, prevent interrupt from
4533  // being intercepted.
4534  break;
4535  } else {
4536  DCHECK_EQ(current->mode_, kPostponeInterrupts);
4537  last_postpone_scope = current;
4538  }
4539  }
4540  // If there is no postpone scope for passed flag then we should not intercept.
4541  if (!last_postpone_scope) return false;
4542  last_postpone_scope->intercepted_flags_ |= flag;
4543  return true;
4544 }
4545 
4546 #undef TRACE_ISOLATE
4547 
4548 } // namespace internal
4549 } // namespace v8
static V8_WARN_UNUSED_RESULT MaybeLocal< Resolver > New(Local< Context > context)
Definition: api.cc:7240
STL namespace.
bool(* AccessCheckCallback)(Local< Context > accessing_context, Local< Object > accessed_object, Local< Value > data)
Definition: v8.h:5614
Definition: libplatform.h:13
Local< Promise > GetPromise()
Definition: api.cc:7250
StackTraceOptions
Definition: v8.h:1691