V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
heap-snapshot-generator.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/profiler/heap-snapshot-generator.h"
6 
7 #include <utility>
8 
9 #include "src/api-inl.h"
10 #include "src/code-stubs.h"
11 #include "src/conversions.h"
12 #include "src/debug/debug.h"
13 #include "src/global-handles.h"
14 #include "src/layout-descriptor.h"
15 #include "src/objects-body-descriptors.h"
16 #include "src/objects-inl.h"
17 #include "src/objects/api-callbacks.h"
18 #include "src/objects/hash-table-inl.h"
19 #include "src/objects/js-array-buffer-inl.h"
20 #include "src/objects/js-array-inl.h"
21 #include "src/objects/js-collection-inl.h"
22 #include "src/objects/js-generator-inl.h"
23 #include "src/objects/js-promise-inl.h"
24 #include "src/objects/js-regexp-inl.h"
25 #include "src/objects/literal-objects-inl.h"
26 #include "src/objects/slots-inl.h"
27 #include "src/profiler/allocation-tracker.h"
28 #include "src/profiler/heap-profiler.h"
29 #include "src/profiler/heap-snapshot-generator-inl.h"
30 #include "src/prototype.h"
31 #include "src/transitions.h"
32 #include "src/visitors.h"
33 
34 namespace v8 {
35 namespace internal {
36 
37 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, HeapEntry* from,
38  HeapEntry* to)
39  : bit_field_(TypeField::encode(type) |
40  FromIndexField::encode(from->index())),
41  to_entry_(to),
42  name_(name) {
43  DCHECK(type == kContextVariable
44  || type == kProperty
45  || type == kInternal
46  || type == kShortcut
47  || type == kWeak);
48 }
49 
50 HeapGraphEdge::HeapGraphEdge(Type type, int index, HeapEntry* from,
51  HeapEntry* to)
52  : bit_field_(TypeField::encode(type) |
53  FromIndexField::encode(from->index())),
54  to_entry_(to),
55  index_(index) {
56  DCHECK(type == kElement || type == kHidden);
57 }
58 
59 HeapEntry::HeapEntry(HeapSnapshot* snapshot, int index, Type type,
60  const char* name, SnapshotObjectId id, size_t self_size,
61  unsigned trace_node_id)
62  : type_(type),
63  index_(index),
64  children_count_(0),
65  self_size_(self_size),
66  snapshot_(snapshot),
67  name_(name),
68  id_(id),
69  trace_node_id_(trace_node_id) {
70  DCHECK_GE(index, 0);
71 }
72 
73 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
74  const char* name,
75  HeapEntry* entry) {
76  ++children_count_;
77  snapshot_->edges().emplace_back(type, name, this, entry);
78 }
79 
80 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
81  int index,
82  HeapEntry* entry) {
83  ++children_count_;
84  snapshot_->edges().emplace_back(type, index, this, entry);
85 }
86 
87 void HeapEntry::SetNamedAutoIndexReference(HeapGraphEdge::Type type,
88  const char* description,
89  HeapEntry* child,
90  StringsStorage* names) {
91  int index = children_count_ + 1;
92  const char* name = description
93  ? names->GetFormatted("%d / %s", index, description)
94  : names->GetName(index);
95  SetNamedReference(type, name, child);
96 }
97 
98 void HeapEntry::Print(
99  const char* prefix, const char* edge_name, int max_depth, int indent) {
100  STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
101  base::OS::Print("%6" PRIuS " @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
102  prefix, edge_name);
103  if (type() != kString) {
104  base::OS::Print("%s %.40s\n", TypeAsString(), name_);
105  } else {
106  base::OS::Print("\"");
107  const char* c = name_;
108  while (*c && (c - name_) <= 40) {
109  if (*c != '\n')
110  base::OS::Print("%c", *c);
111  else
112  base::OS::Print("\\n");
113  ++c;
114  }
115  base::OS::Print("\"\n");
116  }
117  if (--max_depth == 0) return;
118  for (auto i = children_begin(); i != children_end(); ++i) {
119  HeapGraphEdge& edge = **i;
120  const char* edge_prefix = "";
121  EmbeddedVector<char, 64> index;
122  const char* edge_name = index.start();
123  switch (edge.type()) {
124  case HeapGraphEdge::kContextVariable:
125  edge_prefix = "#";
126  edge_name = edge.name();
127  break;
128  case HeapGraphEdge::kElement:
129  SNPrintF(index, "%d", edge.index());
130  break;
131  case HeapGraphEdge::kInternal:
132  edge_prefix = "$";
133  edge_name = edge.name();
134  break;
135  case HeapGraphEdge::kProperty:
136  edge_name = edge.name();
137  break;
138  case HeapGraphEdge::kHidden:
139  edge_prefix = "$";
140  SNPrintF(index, "%d", edge.index());
141  break;
142  case HeapGraphEdge::kShortcut:
143  edge_prefix = "^";
144  edge_name = edge.name();
145  break;
146  case HeapGraphEdge::kWeak:
147  edge_prefix = "w";
148  edge_name = edge.name();
149  break;
150  default:
151  SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
152  }
153  edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
154  }
155 }
156 
157 const char* HeapEntry::TypeAsString() {
158  switch (type()) {
159  case kHidden: return "/hidden/";
160  case kObject: return "/object/";
161  case kClosure: return "/closure/";
162  case kString: return "/string/";
163  case kCode: return "/code/";
164  case kArray: return "/array/";
165  case kRegExp: return "/regexp/";
166  case kHeapNumber: return "/number/";
167  case kNative: return "/native/";
168  case kSynthetic: return "/synthetic/";
169  case kConsString: return "/concatenated string/";
170  case kSlicedString: return "/sliced string/";
171  case kSymbol: return "/symbol/";
172  case kBigInt:
173  return "/bigint/";
174  default: return "???";
175  }
176 }
177 
178 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler) : profiler_(profiler) {
179  // It is very important to keep objects that form a heap snapshot
180  // as small as possible. Check assumptions about data structure sizes.
181  STATIC_ASSERT((kPointerSize == 4 && sizeof(HeapGraphEdge) == 12) ||
182  (kPointerSize == 8 && sizeof(HeapGraphEdge) == 24));
183  STATIC_ASSERT((kPointerSize == 4 && sizeof(HeapEntry) == 28) ||
184  (kPointerSize == 8 && sizeof(HeapEntry) == 40));
185  memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
186 }
187 
188 void HeapSnapshot::Delete() {
189  profiler_->RemoveSnapshot(this);
190 }
191 
192 void HeapSnapshot::RememberLastJSObjectId() {
193  max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
194 }
195 
196 void HeapSnapshot::AddSyntheticRootEntries() {
197  AddRootEntry();
198  AddGcRootsEntry();
199  SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
200  for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
201  AddGcSubrootEntry(static_cast<Root>(root), id);
202  id += HeapObjectsMap::kObjectIdStep;
203  }
204  DCHECK_EQ(HeapObjectsMap::kFirstAvailableObjectId, id);
205 }
206 
207 void HeapSnapshot::AddRootEntry() {
208  DCHECK_NULL(root_entry_);
209  DCHECK(entries_.empty()); // Root entry must be the first one.
210  root_entry_ = AddEntry(HeapEntry::kSynthetic, "",
211  HeapObjectsMap::kInternalRootObjectId, 0, 0);
212  DCHECK_EQ(1u, entries_.size());
213  DCHECK_EQ(root_entry_, &entries_.front());
214 }
215 
216 void HeapSnapshot::AddGcRootsEntry() {
217  DCHECK_NULL(gc_roots_entry_);
218  gc_roots_entry_ = AddEntry(HeapEntry::kSynthetic, "(GC roots)",
219  HeapObjectsMap::kGcRootsObjectId, 0, 0);
220 }
221 
222 void HeapSnapshot::AddGcSubrootEntry(Root root, SnapshotObjectId id) {
223  DCHECK_NULL(gc_subroot_entries_[static_cast<int>(root)]);
224  gc_subroot_entries_[static_cast<int>(root)] =
225  AddEntry(HeapEntry::kSynthetic, RootVisitor::RootName(root), id, 0, 0);
226 }
227 
228 void HeapSnapshot::AddLocation(HeapEntry* entry, int scriptId, int line,
229  int col) {
230  locations_.emplace_back(entry->index(), scriptId, line, col);
231 }
232 
233 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
234  const char* name,
235  SnapshotObjectId id,
236  size_t size,
237  unsigned trace_node_id) {
238  DCHECK(!is_complete());
239  entries_.emplace_back(this, static_cast<int>(entries_.size()), type, name, id,
240  size, trace_node_id);
241  return &entries_.back();
242 }
243 
244 void HeapSnapshot::FillChildren() {
245  DCHECK(children().empty());
246  int children_index = 0;
247  for (HeapEntry& entry : entries()) {
248  children_index = entry.set_children_index(children_index);
249  }
250  DCHECK_EQ(edges().size(), static_cast<size_t>(children_index));
251  children().resize(edges().size());
252  for (HeapGraphEdge& edge : edges()) {
253  edge.from()->add_child(&edge);
254  }
255 }
256 
257 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
258  if (entries_by_id_cache_.empty()) {
259  CHECK(is_complete());
260  entries_by_id_cache_.reserve(entries_.size());
261  for (HeapEntry& entry : entries_) {
262  entries_by_id_cache_.emplace(entry.id(), &entry);
263  }
264  }
265  auto it = entries_by_id_cache_.find(id);
266  return it != entries_by_id_cache_.end() ? it->second : nullptr;
267 }
268 
269 void HeapSnapshot::Print(int max_depth) {
270  root()->Print("", "", max_depth, 0);
271 }
272 
273 // We split IDs on evens for embedder objects (see
274 // HeapObjectsMap::GenerateId) and odds for native objects.
275 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
276 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
277  HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
278 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
279  HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
280 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
281  HeapObjectsMap::kGcRootsFirstSubrootId +
282  static_cast<int>(Root::kNumberOfRoots) * HeapObjectsMap::kObjectIdStep;
283 
284 HeapObjectsMap::HeapObjectsMap(Heap* heap)
285  : next_id_(kFirstAvailableObjectId), heap_(heap) {
286  // The dummy element at zero index is needed as entries_map_ cannot hold
287  // an entry with zero value. Otherwise it's impossible to tell if
288  // LookupOrInsert has added a new item or just returning exisiting one
289  // having the value of zero.
290  entries_.emplace_back(0, kNullAddress, 0, true);
291 }
292 
293 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
294  DCHECK_NE(kNullAddress, to);
295  DCHECK_NE(kNullAddress, from);
296  if (from == to) return false;
297  void* from_value = entries_map_.Remove(reinterpret_cast<void*>(from),
298  ComputeAddressHash(from));
299  if (from_value == nullptr) {
300  // It may occur that some untracked object moves to an address X and there
301  // is a tracked object at that address. In this case we should remove the
302  // entry as we know that the object has died.
303  void* to_value = entries_map_.Remove(reinterpret_cast<void*>(to),
304  ComputeAddressHash(to));
305  if (to_value != nullptr) {
306  int to_entry_info_index =
307  static_cast<int>(reinterpret_cast<intptr_t>(to_value));
308  entries_.at(to_entry_info_index).addr = kNullAddress;
309  }
310  } else {
311  base::HashMap::Entry* to_entry = entries_map_.LookupOrInsert(
312  reinterpret_cast<void*>(to), ComputeAddressHash(to));
313  if (to_entry->value != nullptr) {
314  // We found the existing entry with to address for an old object.
315  // Without this operation we will have two EntryInfo's with the same
316  // value in addr field. It is bad because later at RemoveDeadEntries
317  // one of this entry will be removed with the corresponding entries_map_
318  // entry.
319  int to_entry_info_index =
320  static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
321  entries_.at(to_entry_info_index).addr = kNullAddress;
322  }
323  int from_entry_info_index =
324  static_cast<int>(reinterpret_cast<intptr_t>(from_value));
325  entries_.at(from_entry_info_index).addr = to;
326  // Size of an object can change during its life, so to keep information
327  // about the object in entries_ consistent, we have to adjust size when the
328  // object is migrated.
329  if (FLAG_heap_profiler_trace_objects) {
330  PrintF("Move object from %p to %p old size %6d new size %6d\n",
331  reinterpret_cast<void*>(from), reinterpret_cast<void*>(to),
332  entries_.at(from_entry_info_index).size, object_size);
333  }
334  entries_.at(from_entry_info_index).size = object_size;
335  to_entry->value = from_value;
336  }
337  return from_value != nullptr;
338 }
339 
340 
341 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
342  FindOrAddEntry(addr, size, false);
343 }
344 
345 
346 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
347  base::HashMap::Entry* entry = entries_map_.Lookup(
348  reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
349  if (entry == nullptr) return 0;
350  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
351  EntryInfo& entry_info = entries_.at(entry_index);
352  DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
353  return entry_info.id;
354 }
355 
356 
357 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
358  unsigned int size,
359  bool accessed) {
360  DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
361  base::HashMap::Entry* entry = entries_map_.LookupOrInsert(
362  reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
363  if (entry->value != nullptr) {
364  int entry_index =
365  static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
366  EntryInfo& entry_info = entries_.at(entry_index);
367  entry_info.accessed = accessed;
368  if (FLAG_heap_profiler_trace_objects) {
369  PrintF("Update object size : %p with old size %d and new size %d\n",
370  reinterpret_cast<void*>(addr), entry_info.size, size);
371  }
372  entry_info.size = size;
373  return entry_info.id;
374  }
375  entry->value = reinterpret_cast<void*>(entries_.size());
376  SnapshotObjectId id = next_id_;
377  next_id_ += kObjectIdStep;
378  entries_.push_back(EntryInfo(id, addr, size, accessed));
379  DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
380  return id;
381 }
382 
383 void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); }
384 
385 void HeapObjectsMap::UpdateHeapObjectsMap() {
386  if (FLAG_heap_profiler_trace_objects) {
387  PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
388  entries_map_.occupancy());
389  }
390  heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
391  GarbageCollectionReason::kHeapProfiler);
392  HeapIterator iterator(heap_);
393  for (HeapObject* obj = iterator.next(); obj != nullptr;
394  obj = iterator.next()) {
395  FindOrAddEntry(obj->address(), obj->Size());
396  if (FLAG_heap_profiler_trace_objects) {
397  PrintF("Update object : %p %6d. Next address is %p\n",
398  reinterpret_cast<void*>(obj->address()), obj->Size(),
399  reinterpret_cast<void*>(obj->address() + obj->Size()));
400  }
401  }
402  RemoveDeadEntries();
403  if (FLAG_heap_profiler_trace_objects) {
404  PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
405  entries_map_.occupancy());
406  }
407 }
408 
409 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
410  int64_t* timestamp_us) {
411  UpdateHeapObjectsMap();
412  time_intervals_.emplace_back(next_id_);
413  int prefered_chunk_size = stream->GetChunkSize();
414  std::vector<v8::HeapStatsUpdate> stats_buffer;
415  DCHECK(!entries_.empty());
416  EntryInfo* entry_info = &entries_.front();
417  EntryInfo* end_entry_info = &entries_.back() + 1;
418  for (size_t time_interval_index = 0;
419  time_interval_index < time_intervals_.size(); ++time_interval_index) {
420  TimeInterval& time_interval = time_intervals_[time_interval_index];
421  SnapshotObjectId time_interval_id = time_interval.id;
422  uint32_t entries_size = 0;
423  EntryInfo* start_entry_info = entry_info;
424  while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
425  entries_size += entry_info->size;
426  ++entry_info;
427  }
428  uint32_t entries_count =
429  static_cast<uint32_t>(entry_info - start_entry_info);
430  if (time_interval.count != entries_count ||
431  time_interval.size != entries_size) {
432  stats_buffer.emplace_back(static_cast<uint32_t>(time_interval_index),
433  time_interval.count = entries_count,
434  time_interval.size = entries_size);
435  if (static_cast<int>(stats_buffer.size()) >= prefered_chunk_size) {
436  OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
437  &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
438  if (result == OutputStream::kAbort) return last_assigned_id();
439  stats_buffer.clear();
440  }
441  }
442  }
443  DCHECK(entry_info == end_entry_info);
444  if (!stats_buffer.empty()) {
445  OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
446  &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
447  if (result == OutputStream::kAbort) return last_assigned_id();
448  }
449  stream->EndOfStream();
450  if (timestamp_us) {
451  *timestamp_us =
452  (time_intervals_.back().timestamp - time_intervals_.front().timestamp)
453  .InMicroseconds();
454  }
455  return last_assigned_id();
456 }
457 
458 
459 void HeapObjectsMap::RemoveDeadEntries() {
460  DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 &&
461  entries_.at(0).addr == kNullAddress);
462  size_t first_free_entry = 1;
463  for (size_t i = 1; i < entries_.size(); ++i) {
464  EntryInfo& entry_info = entries_.at(i);
465  if (entry_info.accessed) {
466  if (first_free_entry != i) {
467  entries_.at(first_free_entry) = entry_info;
468  }
469  entries_.at(first_free_entry).accessed = false;
470  base::HashMap::Entry* entry =
471  entries_map_.Lookup(reinterpret_cast<void*>(entry_info.addr),
472  ComputeAddressHash(entry_info.addr));
473  DCHECK(entry);
474  entry->value = reinterpret_cast<void*>(first_free_entry);
475  ++first_free_entry;
476  } else {
477  if (entry_info.addr) {
478  entries_map_.Remove(reinterpret_cast<void*>(entry_info.addr),
479  ComputeAddressHash(entry_info.addr));
480  }
481  }
482  }
483  entries_.erase(entries_.begin() + first_free_entry, entries_.end());
484 
485  DCHECK(static_cast<uint32_t>(entries_.size()) - 1 ==
486  entries_map_.occupancy());
487 }
488 
489 
490 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
491  SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
492  const char* label = info->GetLabel();
493  id ^= StringHasher::HashSequentialString(label,
494  static_cast<int>(strlen(label)),
495  heap_->HashSeed());
496  intptr_t element_count = info->GetElementCount();
497  if (element_count != -1) {
498  id ^= ComputeUnseededHash(static_cast<uint32_t>(element_count));
499  }
500  return id << 1;
501 }
502 
503 V8HeapExplorer::V8HeapExplorer(HeapSnapshot* snapshot,
504  SnapshottingProgressReportingInterface* progress,
506  : heap_(snapshot->profiler()->heap_object_map()->heap()),
507  snapshot_(snapshot),
508  names_(snapshot_->profiler()->names()),
509  heap_object_map_(snapshot_->profiler()->heap_object_map()),
510  progress_(progress),
511  generator_(nullptr),
512  global_object_name_resolver_(resolver) {}
513 
514 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
515  return AddEntry(reinterpret_cast<HeapObject*>(ptr));
516 }
517 
518 void V8HeapExplorer::ExtractLocation(HeapEntry* entry, HeapObject* object) {
519  if (object->IsJSFunction()) {
520  JSFunction* func = JSFunction::cast(object);
521  ExtractLocationForJSFunction(entry, func);
522 
523  } else if (object->IsJSGeneratorObject()) {
524  JSGeneratorObject* gen = JSGeneratorObject::cast(object);
525  ExtractLocationForJSFunction(entry, gen->function());
526 
527  } else if (object->IsJSObject()) {
528  JSObject* obj = JSObject::cast(object);
529  JSFunction* maybe_constructor = GetConstructor(obj);
530 
531  if (maybe_constructor)
532  ExtractLocationForJSFunction(entry, maybe_constructor);
533  }
534 }
535 
536 void V8HeapExplorer::ExtractLocationForJSFunction(HeapEntry* entry,
537  JSFunction* func) {
538  if (!func->shared()->script()->IsScript()) return;
539  Script* script = Script::cast(func->shared()->script());
540  int scriptId = script->id();
541  int start = func->shared()->StartPosition();
542  int line = script->GetLineNumber(start);
543  int col = script->GetColumnNumber(start);
544  snapshot_->AddLocation(entry, scriptId, line, col);
545 }
546 
547 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
548  if (object->IsJSFunction()) {
549  JSFunction* func = JSFunction::cast(object);
550  SharedFunctionInfo* shared = func->shared();
551  const char* name = names_->GetName(shared->Name());
552  return AddEntry(object, HeapEntry::kClosure, name);
553  } else if (object->IsJSBoundFunction()) {
554  return AddEntry(object, HeapEntry::kClosure, "native_bind");
555  } else if (object->IsJSRegExp()) {
556  JSRegExp* re = JSRegExp::cast(object);
557  return AddEntry(object,
558  HeapEntry::kRegExp,
559  names_->GetName(re->Pattern()));
560  } else if (object->IsJSObject()) {
561  const char* name = names_->GetName(
562  GetConstructorName(JSObject::cast(object)));
563  if (object->IsJSGlobalObject()) {
564  auto it = objects_tags_.find(JSGlobalObject::cast(object));
565  if (it != objects_tags_.end()) {
566  name = names_->GetFormatted("%s / %s", name, it->second);
567  }
568  }
569  return AddEntry(object, HeapEntry::kObject, name);
570  } else if (object->IsString()) {
571  String string = String::cast(object);
572  if (string->IsConsString()) {
573  return AddEntry(object, HeapEntry::kConsString, "(concatenated string)");
574  } else if (string->IsSlicedString()) {
575  return AddEntry(object, HeapEntry::kSlicedString, "(sliced string)");
576  } else {
577  return AddEntry(object, HeapEntry::kString,
578  names_->GetName(String::cast(object)));
579  }
580  } else if (object->IsSymbol()) {
581  if (Symbol::cast(object)->is_private())
582  return AddEntry(object, HeapEntry::kHidden, "private symbol");
583  else
584  return AddEntry(object, HeapEntry::kSymbol, "symbol");
585  } else if (object->IsBigInt()) {
586  return AddEntry(object, HeapEntry::kBigInt, "bigint");
587  } else if (object->IsCode()) {
588  return AddEntry(object, HeapEntry::kCode, "");
589  } else if (object->IsSharedFunctionInfo()) {
590  String name = SharedFunctionInfo::cast(object)->Name();
591  return AddEntry(object, HeapEntry::kCode, names_->GetName(name));
592  } else if (object->IsScript()) {
593  Object* name = Script::cast(object)->name();
594  return AddEntry(
595  object, HeapEntry::kCode,
596  name->IsString() ? names_->GetName(String::cast(name)) : "");
597  } else if (object->IsNativeContext()) {
598  return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
599  } else if (object->IsContext()) {
600  return AddEntry(object, HeapEntry::kObject, "system / Context");
601  } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
602  object->IsByteArray()) {
603  return AddEntry(object, HeapEntry::kArray, "");
604  } else if (object->IsHeapNumber()) {
605  return AddEntry(object, HeapEntry::kHeapNumber, "number");
606  }
607  return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
608 }
609 
610 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
611  HeapEntry::Type type,
612  const char* name) {
613  return AddEntry(object->address(), type, name, object->Size());
614 }
615 
616 HeapEntry* V8HeapExplorer::AddEntry(Address address,
617  HeapEntry::Type type,
618  const char* name,
619  size_t size) {
620  SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
621  address, static_cast<unsigned int>(size));
622  unsigned trace_node_id = 0;
623  if (AllocationTracker* allocation_tracker =
624  snapshot_->profiler()->allocation_tracker()) {
625  trace_node_id =
626  allocation_tracker->address_to_trace()->GetTraceNodeId(address);
627  }
628  return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
629 }
630 
631 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
632  switch (object->map()->instance_type()) {
633  case MAP_TYPE:
634  switch (Map::cast(object)->instance_type()) {
635 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
636  case instance_type: return "system / Map (" #Name ")";
637  STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
638 #undef MAKE_STRING_MAP_CASE
639  default: return "system / Map";
640  }
641  case CELL_TYPE: return "system / Cell";
642  case PROPERTY_CELL_TYPE: return "system / PropertyCell";
643  case FOREIGN_TYPE: return "system / Foreign";
644  case ODDBALL_TYPE: return "system / Oddball";
645  case ALLOCATION_SITE_TYPE:
646  return "system / AllocationSite";
647 #define MAKE_STRUCT_CASE(TYPE, Name, name) \
648  case TYPE: \
649  return "system / " #Name;
650  STRUCT_LIST(MAKE_STRUCT_CASE)
651 #undef MAKE_STRUCT_CASE
652  default: return "system";
653  }
654 }
655 
656 int V8HeapExplorer::EstimateObjectsCount() {
657  HeapIterator it(heap_, HeapIterator::kFilterUnreachable);
658  int objects_count = 0;
659  while (it.next()) ++objects_count;
660  return objects_count;
661 }
662 
664  public:
665  IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj,
666  HeapEntry* parent)
667  : generator_(generator),
668  parent_obj_(parent_obj),
669  parent_start_(HeapObject::RawMaybeWeakField(parent_obj_, 0)),
670  parent_end_(
671  HeapObject::RawMaybeWeakField(parent_obj_, parent_obj_->Size())),
672  parent_(parent) {}
673  void VisitPointers(HeapObject* host, ObjectSlot start,
674  ObjectSlot end) override {
675  VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
676  }
677  void VisitPointers(HeapObject* host, MaybeObjectSlot start,
678  MaybeObjectSlot end) override {
679  int next_index = 0;
680  for (MaybeObjectSlot p = start; p < end; ++p) {
681  int index = -1;
682  // |p| could be outside of the object, e.g., while visiting RelocInfo of
683  // code objects.
684  if (parent_start_ <= p && p < parent_end_) {
685  index = static_cast<int>(p - parent_start_);
686  if (generator_->visited_fields_[index]) {
687  generator_->visited_fields_[index] = false;
688  continue;
689  }
690  }
691  HeapObject* heap_object;
692  if ((*p)->GetHeapObject(&heap_object)) {
693  // The last parameter {field_offset} is only used to check some
694  // well-known skipped references, so passing -1 * kPointerSize
695  // for out-of-object slots is fine.
696  generator_->SetHiddenReference(parent_obj_, parent_, next_index++,
697  heap_object, index * kPointerSize);
698  }
699  }
700  }
701 
702  private:
703  V8HeapExplorer* generator_;
704  HeapObject* parent_obj_;
705  MaybeObjectSlot parent_start_;
706  MaybeObjectSlot parent_end_;
707  HeapEntry* parent_;
708 };
709 
710 void V8HeapExplorer::ExtractReferences(HeapEntry* entry, HeapObject* obj) {
711  if (obj->IsJSGlobalProxy()) {
712  ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
713  } else if (obj->IsJSArrayBuffer()) {
714  ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
715  } else if (obj->IsJSObject()) {
716  if (obj->IsJSWeakSet()) {
717  ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
718  } else if (obj->IsJSWeakMap()) {
719  ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
720  } else if (obj->IsJSSet()) {
721  ExtractJSCollectionReferences(entry, JSSet::cast(obj));
722  } else if (obj->IsJSMap()) {
723  ExtractJSCollectionReferences(entry, JSMap::cast(obj));
724  } else if (obj->IsJSPromise()) {
725  ExtractJSPromiseReferences(entry, JSPromise::cast(obj));
726  } else if (obj->IsJSGeneratorObject()) {
727  ExtractJSGeneratorObjectReferences(entry, JSGeneratorObject::cast(obj));
728  }
729  ExtractJSObjectReferences(entry, JSObject::cast(obj));
730  } else if (obj->IsString()) {
731  ExtractStringReferences(entry, String::cast(obj));
732  } else if (obj->IsSymbol()) {
733  ExtractSymbolReferences(entry, Symbol::cast(obj));
734  } else if (obj->IsMap()) {
735  ExtractMapReferences(entry, Map::cast(obj));
736  } else if (obj->IsSharedFunctionInfo()) {
737  ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
738  } else if (obj->IsScript()) {
739  ExtractScriptReferences(entry, Script::cast(obj));
740  } else if (obj->IsAccessorInfo()) {
741  ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
742  } else if (obj->IsAccessorPair()) {
743  ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
744  } else if (obj->IsCode()) {
745  ExtractCodeReferences(entry, Code::cast(obj));
746  } else if (obj->IsCell()) {
747  ExtractCellReferences(entry, Cell::cast(obj));
748  } else if (obj->IsFeedbackCell()) {
749  ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj));
750  } else if (obj->IsPropertyCell()) {
751  ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
752  } else if (obj->IsAllocationSite()) {
753  ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
754  } else if (obj->IsArrayBoilerplateDescription()) {
755  ExtractArrayBoilerplateDescriptionReferences(
756  entry, ArrayBoilerplateDescription::cast(obj));
757  } else if (obj->IsFeedbackVector()) {
758  ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj));
759  } else if (obj->IsDescriptorArray()) {
760  ExtractDescriptorArrayReferences(entry, DescriptorArray::cast(obj));
761  } else if (obj->IsWeakFixedArray()) {
762  ExtractWeakArrayReferences(WeakFixedArray::kHeaderSize, entry,
763  WeakFixedArray::cast(obj));
764  } else if (obj->IsWeakArrayList()) {
765  ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry,
766  WeakArrayList::cast(obj));
767  } else if (obj->IsContext()) {
768  ExtractContextReferences(entry, Context::cast(obj));
769  } else if (obj->IsEphemeronHashTable()) {
770  ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj));
771  } else if (obj->IsFixedArray()) {
772  ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
773  }
774 }
775 
776 void V8HeapExplorer::ExtractJSGlobalProxyReferences(HeapEntry* entry,
777  JSGlobalProxy* proxy) {
778  SetInternalReference(entry, "native_context", proxy->native_context(),
779  JSGlobalProxy::kNativeContextOffset);
780 }
781 
782 void V8HeapExplorer::ExtractJSObjectReferences(HeapEntry* entry,
783  JSObject* js_obj) {
784  HeapObject* obj = js_obj;
785  ExtractPropertyReferences(js_obj, entry);
786  ExtractElementReferences(js_obj, entry);
787  ExtractInternalReferences(js_obj, entry);
788  PrototypeIterator iter(heap_->isolate(), js_obj);
789  ReadOnlyRoots roots(heap_);
790  SetPropertyReference(entry, roots.proto_string(), iter.GetCurrent());
791  if (obj->IsJSBoundFunction()) {
792  JSBoundFunction* js_fun = JSBoundFunction::cast(obj);
793  TagObject(js_fun->bound_arguments(), "(bound arguments)");
794  SetInternalReference(entry, "bindings", js_fun->bound_arguments(),
795  JSBoundFunction::kBoundArgumentsOffset);
796  SetInternalReference(entry, "bound_this", js_fun->bound_this(),
797  JSBoundFunction::kBoundThisOffset);
798  SetInternalReference(entry, "bound_function",
799  js_fun->bound_target_function(),
800  JSBoundFunction::kBoundTargetFunctionOffset);
801  FixedArray bindings = js_fun->bound_arguments();
802  for (int i = 0; i < bindings->length(); i++) {
803  const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
804  SetNativeBindReference(entry, reference_name, bindings->get(i));
805  }
806  } else if (obj->IsJSFunction()) {
807  JSFunction* js_fun = JSFunction::cast(js_obj);
808  if (js_fun->has_prototype_slot()) {
809  Object* proto_or_map = js_fun->prototype_or_initial_map();
810  if (!proto_or_map->IsTheHole(heap_->isolate())) {
811  if (!proto_or_map->IsMap()) {
812  SetPropertyReference(entry, roots.prototype_string(), proto_or_map,
813  nullptr,
814  JSFunction::kPrototypeOrInitialMapOffset);
815  } else {
816  SetPropertyReference(entry, roots.prototype_string(),
817  js_fun->prototype());
818  SetInternalReference(entry, "initial_map", proto_or_map,
819  JSFunction::kPrototypeOrInitialMapOffset);
820  }
821  }
822  }
823  SharedFunctionInfo* shared_info = js_fun->shared();
824  TagObject(js_fun->feedback_cell(), "(function feedback cell)");
825  SetInternalReference(entry, "feedback_cell", js_fun->feedback_cell(),
826  JSFunction::kFeedbackCellOffset);
827  TagObject(shared_info, "(shared function info)");
828  SetInternalReference(entry, "shared", shared_info,
829  JSFunction::kSharedFunctionInfoOffset);
830  TagObject(js_fun->context(), "(context)");
831  SetInternalReference(entry, "context", js_fun->context(),
832  JSFunction::kContextOffset);
833  TagCodeObject(js_fun->code());
834  SetInternalReference(entry, "code", js_fun->code(),
835  JSFunction::kCodeOffset);
836  } else if (obj->IsJSGlobalObject()) {
837  JSGlobalObject* global_obj = JSGlobalObject::cast(obj);
838  SetInternalReference(entry, "native_context", global_obj->native_context(),
839  JSGlobalObject::kNativeContextOffset);
840  SetInternalReference(entry, "global_proxy", global_obj->global_proxy(),
841  JSGlobalObject::kGlobalProxyOffset);
842  STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
843  2 * kPointerSize);
844  } else if (obj->IsJSArrayBufferView()) {
845  JSArrayBufferView* view = JSArrayBufferView::cast(obj);
846  SetInternalReference(entry, "buffer", view->buffer(),
847  JSArrayBufferView::kBufferOffset);
848  }
849 
850  TagObject(js_obj->raw_properties_or_hash(), "(object properties)");
851  SetInternalReference(entry, "properties", js_obj->raw_properties_or_hash(),
852  JSObject::kPropertiesOrHashOffset);
853 
854  TagObject(js_obj->elements(), "(object elements)");
855  SetInternalReference(entry, "elements", js_obj->elements(),
856  JSObject::kElementsOffset);
857 }
858 
859 void V8HeapExplorer::ExtractStringReferences(HeapEntry* entry, String string) {
860  if (string->IsConsString()) {
861  ConsString cs = ConsString::cast(string);
862  SetInternalReference(entry, "first", cs->first(), ConsString::kFirstOffset);
863  SetInternalReference(entry, "second", cs->second(),
864  ConsString::kSecondOffset);
865  } else if (string->IsSlicedString()) {
866  SlicedString ss = SlicedString::cast(string);
867  SetInternalReference(entry, "parent", ss->parent(),
868  SlicedString::kParentOffset);
869  } else if (string->IsThinString()) {
870  ThinString ts = ThinString::cast(string);
871  SetInternalReference(entry, "actual", ts->actual(),
872  ThinString::kActualOffset);
873  }
874 }
875 
876 void V8HeapExplorer::ExtractSymbolReferences(HeapEntry* entry, Symbol symbol) {
877  SetInternalReference(entry, "name", symbol->name(), Symbol::kNameOffset);
878 }
879 
880 void V8HeapExplorer::ExtractJSCollectionReferences(HeapEntry* entry,
881  JSCollection* collection) {
882  SetInternalReference(entry, "table", collection->table(),
883  JSCollection::kTableOffset);
884 }
885 
886 void V8HeapExplorer::ExtractJSWeakCollectionReferences(HeapEntry* entry,
887  JSWeakCollection* obj) {
888  SetInternalReference(entry, "table", obj->table(),
889  JSWeakCollection::kTableOffset);
890 }
891 
892 void V8HeapExplorer::ExtractEphemeronHashTableReferences(
893  HeapEntry* entry, EphemeronHashTable table) {
894  for (int i = 0, capacity = table->Capacity(); i < capacity; ++i) {
895  int key_index = EphemeronHashTable::EntryToIndex(i) +
896  EphemeronHashTable::kEntryKeyIndex;
897  int value_index = EphemeronHashTable::EntryToValueIndex(i);
898  Object* key = table->get(key_index);
899  Object* value = table->get(value_index);
900  SetWeakReference(entry, key_index, key,
901  table->OffsetOfElementAt(key_index));
902  SetWeakReference(entry, value_index, value,
903  table->OffsetOfElementAt(value_index));
904  HeapEntry* key_entry = GetEntry(key);
905  HeapEntry* value_entry = GetEntry(value);
906  if (key_entry && value_entry) {
907  const char* edge_name =
908  names_->GetFormatted("key %s in WeakMap", key_entry->name());
909  key_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal, edge_name,
910  value_entry, names_);
911  }
912  }
913 }
914 
915 // These static arrays are used to prevent excessive code-size in
916 // ExtractContextReferences below, which would happen if we called
917 // SetInternalReference for every native context field in a macro.
918 static const struct {
919  int index;
920  const char* name;
921 } native_context_names[] = {
922 #define CONTEXT_FIELD_INDEX_NAME(index, _, name) {Context::index, #name},
923  NATIVE_CONTEXT_FIELDS(CONTEXT_FIELD_INDEX_NAME)
924 #undef CONTEXT_FIELD_INDEX_NAME
925 };
926 
927 void V8HeapExplorer::ExtractContextReferences(HeapEntry* entry,
928  Context context) {
929  if (!context->IsNativeContext() && context->is_declaration_context()) {
930  ScopeInfo scope_info = context->scope_info();
931  // Add context allocated locals.
932  int context_locals = scope_info->ContextLocalCount();
933  for (int i = 0; i < context_locals; ++i) {
934  String local_name = scope_info->ContextLocalName(i);
935  int idx = Context::MIN_CONTEXT_SLOTS + i;
936  SetContextReference(entry, local_name, context->get(idx),
937  Context::OffsetOfElementAt(idx));
938  }
939  if (scope_info->HasFunctionName()) {
940  String name = String::cast(scope_info->FunctionName());
941  int idx = scope_info->FunctionContextSlotIndex(name);
942  if (idx >= 0) {
943  SetContextReference(entry, name, context->get(idx),
944  Context::OffsetOfElementAt(idx));
945  }
946  }
947  }
948 
949  SetInternalReference(
950  entry, "scope_info", context->get(Context::SCOPE_INFO_INDEX),
951  FixedArray::OffsetOfElementAt(Context::SCOPE_INFO_INDEX));
952  SetInternalReference(entry, "previous", context->get(Context::PREVIOUS_INDEX),
953  FixedArray::OffsetOfElementAt(Context::PREVIOUS_INDEX));
954  SetInternalReference(entry, "extension",
955  context->get(Context::EXTENSION_INDEX),
956  FixedArray::OffsetOfElementAt(Context::EXTENSION_INDEX));
957  SetInternalReference(
958  entry, "native_context", context->get(Context::NATIVE_CONTEXT_INDEX),
959  FixedArray::OffsetOfElementAt(Context::NATIVE_CONTEXT_INDEX));
960 
961  if (context->IsNativeContext()) {
962  TagObject(context->normalized_map_cache(), "(context norm. map cache)");
963  TagObject(context->embedder_data(), "(context data)");
964  for (size_t i = 0; i < arraysize(native_context_names); i++) {
965  int index = native_context_names[i].index;
966  const char* name = native_context_names[i].name;
967  SetInternalReference(entry, name, context->get(index),
968  FixedArray::OffsetOfElementAt(index));
969  }
970 
971  SetWeakReference(
972  entry, "optimized_code_list",
973  context->get(Context::OPTIMIZED_CODE_LIST),
974  FixedArray::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST));
975  SetWeakReference(
976  entry, "deoptimized_code_list",
977  context->get(Context::DEOPTIMIZED_CODE_LIST),
978  FixedArray::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST));
979  STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT);
980  STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
981  Context::NATIVE_CONTEXT_SLOTS);
982  STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 3 ==
983  Context::NATIVE_CONTEXT_SLOTS);
984  }
985 }
986 
987 void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map map) {
988  MaybeObject maybe_raw_transitions_or_prototype_info = map->raw_transitions();
989  HeapObject* raw_transitions_or_prototype_info;
990  if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfWeak(
991  &raw_transitions_or_prototype_info)) {
992  DCHECK(raw_transitions_or_prototype_info->IsMap());
993  SetWeakReference(entry, "transition", raw_transitions_or_prototype_info,
994  Map::kTransitionsOrPrototypeInfoOffset);
995  } else if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfStrong(
996  &raw_transitions_or_prototype_info)) {
997  if (raw_transitions_or_prototype_info->IsTransitionArray()) {
998  TransitionArray* transitions =
999  TransitionArray::cast(raw_transitions_or_prototype_info);
1000  if (map->CanTransition() && transitions->HasPrototypeTransitions()) {
1001  TagObject(transitions->GetPrototypeTransitions(),
1002  "(prototype transitions)");
1003  }
1004  TagObject(transitions, "(transition array)");
1005  SetInternalReference(entry, "transitions", transitions,
1006  Map::kTransitionsOrPrototypeInfoOffset);
1007  } else if (raw_transitions_or_prototype_info->IsTuple3() ||
1008  raw_transitions_or_prototype_info->IsFixedArray()) {
1009  TagObject(raw_transitions_or_prototype_info, "(transition)");
1010  SetInternalReference(entry, "transition",
1011  raw_transitions_or_prototype_info,
1012  Map::kTransitionsOrPrototypeInfoOffset);
1013  } else if (map->is_prototype_map()) {
1014  TagObject(raw_transitions_or_prototype_info, "prototype_info");
1015  SetInternalReference(entry, "prototype_info",
1016  raw_transitions_or_prototype_info,
1017  Map::kTransitionsOrPrototypeInfoOffset);
1018  }
1019  }
1020  DescriptorArray* descriptors = map->instance_descriptors();
1021  TagObject(descriptors, "(map descriptors)");
1022  SetInternalReference(entry, "descriptors", descriptors,
1023  Map::kDescriptorsOffset);
1024  SetInternalReference(entry, "prototype", map->prototype(),
1025  Map::kPrototypeOffset);
1026  if (FLAG_unbox_double_fields) {
1027  SetInternalReference(entry, "layout_descriptor", map->layout_descriptor(),
1028  Map::kLayoutDescriptorOffset);
1029  }
1030  Object* constructor_or_backpointer = map->constructor_or_backpointer();
1031  if (constructor_or_backpointer->IsMap()) {
1032  TagObject(constructor_or_backpointer, "(back pointer)");
1033  SetInternalReference(entry, "back_pointer", constructor_or_backpointer,
1034  Map::kConstructorOrBackPointerOffset);
1035  } else if (constructor_or_backpointer->IsFunctionTemplateInfo()) {
1036  TagObject(constructor_or_backpointer, "(constructor function data)");
1037  SetInternalReference(entry, "constructor_function_data",
1038  constructor_or_backpointer,
1039  Map::kConstructorOrBackPointerOffset);
1040  } else {
1041  SetInternalReference(entry, "constructor", constructor_or_backpointer,
1042  Map::kConstructorOrBackPointerOffset);
1043  }
1044  TagObject(map->dependent_code(), "(dependent code)");
1045  SetInternalReference(entry, "dependent_code", map->dependent_code(),
1046  Map::kDependentCodeOffset);
1047 }
1048 
1049 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1050  HeapEntry* entry, SharedFunctionInfo* shared) {
1051  String shared_name = shared->DebugName();
1052  const char* name = nullptr;
1053  if (shared_name != ReadOnlyRoots(heap_).empty_string()) {
1054  name = names_->GetName(shared_name);
1055  TagObject(shared->GetCode(), names_->GetFormatted("(code for %s)", name));
1056  } else {
1057  TagObject(shared->GetCode(),
1058  names_->GetFormatted(
1059  "(%s code)", Code::Kind2String(shared->GetCode()->kind())));
1060  }
1061 
1062  if (shared->name_or_scope_info()->IsScopeInfo()) {
1063  TagObject(shared->name_or_scope_info(), "(function scope info)");
1064  }
1065  SetInternalReference(entry, "name_or_scope_info",
1066  shared->name_or_scope_info(),
1067  SharedFunctionInfo::kNameOrScopeInfoOffset);
1068  SetInternalReference(entry, "script_or_debug_info",
1069  shared->script_or_debug_info(),
1070  SharedFunctionInfo::kScriptOrDebugInfoOffset);
1071  SetInternalReference(entry, "function_data", shared->function_data(),
1072  SharedFunctionInfo::kFunctionDataOffset);
1073  SetInternalReference(
1074  entry, "raw_outer_scope_info_or_feedback_metadata",
1075  shared->raw_outer_scope_info_or_feedback_metadata(),
1076  SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset);
1077 }
1078 
1079 void V8HeapExplorer::ExtractScriptReferences(HeapEntry* entry, Script* script) {
1080  SetInternalReference(entry, "source", script->source(),
1081  Script::kSourceOffset);
1082  SetInternalReference(entry, "name", script->name(), Script::kNameOffset);
1083  SetInternalReference(entry, "context_data", script->context_data(),
1084  Script::kContextOffset);
1085  TagObject(script->line_ends(), "(script line ends)");
1086  SetInternalReference(entry, "line_ends", script->line_ends(),
1087  Script::kLineEndsOffset);
1088 }
1089 
1090 void V8HeapExplorer::ExtractAccessorInfoReferences(
1091  HeapEntry* entry, AccessorInfo* accessor_info) {
1092  SetInternalReference(entry, "name", accessor_info->name(),
1093  AccessorInfo::kNameOffset);
1094  SetInternalReference(entry, "expected_receiver_type",
1095  accessor_info->expected_receiver_type(),
1096  AccessorInfo::kExpectedReceiverTypeOffset);
1097  SetInternalReference(entry, "getter", accessor_info->getter(),
1098  AccessorInfo::kGetterOffset);
1099  SetInternalReference(entry, "setter", accessor_info->setter(),
1100  AccessorInfo::kSetterOffset);
1101  SetInternalReference(entry, "data", accessor_info->data(),
1102  AccessorInfo::kDataOffset);
1103 }
1104 
1105 void V8HeapExplorer::ExtractAccessorPairReferences(HeapEntry* entry,
1106  AccessorPair* accessors) {
1107  SetInternalReference(entry, "getter", accessors->getter(),
1108  AccessorPair::kGetterOffset);
1109  SetInternalReference(entry, "setter", accessors->setter(),
1110  AccessorPair::kSetterOffset);
1111 }
1112 
1113 void V8HeapExplorer::TagBuiltinCodeObject(Code code, const char* name) {
1114  TagObject(code, names_->GetFormatted("(%s builtin)", name));
1115 }
1116 
1117 void V8HeapExplorer::TagCodeObject(Code code) {
1118  if (code->kind() == Code::STUB) {
1119  TagObject(code, names_->GetFormatted(
1120  "(%s code)",
1121  CodeStub::MajorName(CodeStub::GetMajorKey(code))));
1122  }
1123 }
1124 
1125 void V8HeapExplorer::ExtractCodeReferences(HeapEntry* entry, Code code) {
1126  TagCodeObject(code);
1127  TagObject(code->relocation_info(), "(code relocation info)");
1128  SetInternalReference(entry, "relocation_info", code->relocation_info(),
1129  Code::kRelocationInfoOffset);
1130  TagObject(code->deoptimization_data(), "(code deopt data)");
1131  SetInternalReference(entry, "deoptimization_data",
1132  code->deoptimization_data(),
1133  Code::kDeoptimizationDataOffset);
1134  TagObject(code->source_position_table(), "(source position table)");
1135  SetInternalReference(entry, "source_position_table",
1136  code->source_position_table(),
1137  Code::kSourcePositionTableOffset);
1138 }
1139 
1140 void V8HeapExplorer::ExtractCellReferences(HeapEntry* entry, Cell* cell) {
1141  SetInternalReference(entry, "value", cell->value(), Cell::kValueOffset);
1142 }
1143 
1144 void V8HeapExplorer::ExtractFeedbackCellReferences(
1145  HeapEntry* entry, FeedbackCell* feedback_cell) {
1146  TagObject(feedback_cell, "(feedback cell)");
1147  SetInternalReference(entry, "value", feedback_cell->value(),
1148  FeedbackCell::kValueOffset);
1149 }
1150 
1151 void V8HeapExplorer::ExtractPropertyCellReferences(HeapEntry* entry,
1152  PropertyCell* cell) {
1153  SetInternalReference(entry, "value", cell->value(),
1154  PropertyCell::kValueOffset);
1155  TagObject(cell->dependent_code(), "(dependent code)");
1156  SetInternalReference(entry, "dependent_code", cell->dependent_code(),
1157  PropertyCell::kDependentCodeOffset);
1158 }
1159 
1160 void V8HeapExplorer::ExtractAllocationSiteReferences(HeapEntry* entry,
1161  AllocationSite* site) {
1162  SetInternalReference(entry, "transition_info",
1163  site->transition_info_or_boilerplate(),
1164  AllocationSite::kTransitionInfoOrBoilerplateOffset);
1165  SetInternalReference(entry, "nested_site", site->nested_site(),
1166  AllocationSite::kNestedSiteOffset);
1167  TagObject(site->dependent_code(), "(dependent code)");
1168  SetInternalReference(entry, "dependent_code", site->dependent_code(),
1169  AllocationSite::kDependentCodeOffset);
1170 }
1171 
1172 void V8HeapExplorer::ExtractArrayBoilerplateDescriptionReferences(
1173  HeapEntry* entry, ArrayBoilerplateDescription* value) {
1174  SetInternalReference(entry, "constant_elements", value->constant_elements(),
1175  ArrayBoilerplateDescription::kConstantElementsOffset);
1176 }
1177 
1179  public:
1180  JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1181  : size_(size)
1182  , explorer_(explorer) {
1183  }
1184  HeapEntry* AllocateEntry(HeapThing ptr) override {
1185  return explorer_->AddEntry(reinterpret_cast<Address>(ptr),
1186  HeapEntry::kNative, "system / JSArrayBufferData",
1187  size_);
1188  }
1189  private:
1190  size_t size_;
1191  V8HeapExplorer* explorer_;
1192 };
1193 
1194 void V8HeapExplorer::ExtractJSArrayBufferReferences(HeapEntry* entry,
1195  JSArrayBuffer* buffer) {
1196  // Setup a reference to a native memory backing_store object.
1197  if (!buffer->backing_store())
1198  return;
1199  size_t data_size = buffer->byte_length();
1200  JSArrayBufferDataEntryAllocator allocator(data_size, this);
1201  HeapEntry* data_entry =
1202  generator_->FindOrAddEntry(buffer->backing_store(), &allocator);
1203  entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store",
1204  data_entry);
1205 }
1206 
1207 void V8HeapExplorer::ExtractJSPromiseReferences(HeapEntry* entry,
1208  JSPromise* promise) {
1209  SetInternalReference(entry, "reactions_or_result",
1210  promise->reactions_or_result(),
1211  JSPromise::kReactionsOrResultOffset);
1212 }
1213 
1214 void V8HeapExplorer::ExtractJSGeneratorObjectReferences(
1215  HeapEntry* entry, JSGeneratorObject* generator) {
1216  SetInternalReference(entry, "function", generator->function(),
1217  JSGeneratorObject::kFunctionOffset);
1218  SetInternalReference(entry, "context", generator->context(),
1219  JSGeneratorObject::kContextOffset);
1220  SetInternalReference(entry, "receiver", generator->receiver(),
1221  JSGeneratorObject::kReceiverOffset);
1222  SetInternalReference(entry, "parameters_and_registers",
1223  generator->parameters_and_registers(),
1224  JSGeneratorObject::kParametersAndRegistersOffset);
1225 }
1226 
1227 void V8HeapExplorer::ExtractFixedArrayReferences(HeapEntry* entry,
1228  FixedArray array) {
1229  for (int i = 0, l = array->length(); i < l; ++i) {
1230  DCHECK(!HasWeakHeapObjectTag(array->get(i)));
1231  SetInternalReference(entry, i, array->get(i), array->OffsetOfElementAt(i));
1232  }
1233 }
1234 
1235 void V8HeapExplorer::ExtractFeedbackVectorReferences(
1236  HeapEntry* entry, FeedbackVector* feedback_vector) {
1237  MaybeObject code = feedback_vector->optimized_code_weak_or_smi();
1238  HeapObject* code_heap_object;
1239  if (code->GetHeapObjectIfWeak(&code_heap_object)) {
1240  SetWeakReference(entry, "optimized code", code_heap_object,
1241  FeedbackVector::kOptimizedCodeOffset);
1242  }
1243 }
1244 
1245 void V8HeapExplorer::ExtractDescriptorArrayReferences(HeapEntry* entry,
1246  DescriptorArray* array) {
1247  SetInternalReference(entry, "enum_cache", array->enum_cache(),
1248  DescriptorArray::kEnumCacheOffset);
1249  MaybeObjectSlot start = MaybeObjectSlot(array->GetDescriptorSlot(0));
1250  MaybeObjectSlot end = MaybeObjectSlot(
1251  array->GetDescriptorSlot(array->number_of_all_descriptors()));
1252  for (int i = 0; start + i < end; ++i) {
1253  MaybeObjectSlot slot = start + i;
1254  int offset = static_cast<int>(slot.address() - array->address());
1255  MaybeObject object = *slot;
1256  HeapObject* heap_object;
1257  if (object->GetHeapObjectIfWeak(&heap_object)) {
1258  SetWeakReference(entry, i, heap_object, offset);
1259  } else if (object->GetHeapObjectIfStrong(&heap_object)) {
1260  SetInternalReference(entry, i, heap_object, offset);
1261  }
1262  }
1263 }
1264 
1265 template <typename T>
1266 void V8HeapExplorer::ExtractWeakArrayReferences(int header_size,
1267  HeapEntry* entry, T* array) {
1268  for (int i = 0; i < array->length(); ++i) {
1269  MaybeObject object = array->Get(i);
1270  HeapObject* heap_object;
1271  if (object->GetHeapObjectIfWeak(&heap_object)) {
1272  SetWeakReference(entry, i, heap_object, header_size + i * kPointerSize);
1273  } else if (object->GetHeapObjectIfStrong(&heap_object)) {
1274  SetInternalReference(entry, i, heap_object,
1275  header_size + i * kPointerSize);
1276  }
1277  }
1278 }
1279 
1280 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
1281  HeapEntry* entry) {
1282  Isolate* isolate = js_obj->GetIsolate();
1283  if (js_obj->HasFastProperties()) {
1284  DescriptorArray* descs = js_obj->map()->instance_descriptors();
1285  int real_size = js_obj->map()->NumberOfOwnDescriptors();
1286  for (int i = 0; i < real_size; i++) {
1287  PropertyDetails details = descs->GetDetails(i);
1288  switch (details.location()) {
1289  case kField: {
1290  Representation r = details.representation();
1291  if (r.IsSmi() || r.IsDouble()) break;
1292 
1293  Name k = descs->GetKey(i);
1294  FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1295  Object* value = js_obj->RawFastPropertyAt(field_index);
1296  int field_offset =
1297  field_index.is_inobject() ? field_index.offset() : -1;
1298 
1299  SetDataOrAccessorPropertyReference(details.kind(), entry, k, value,
1300  nullptr, field_offset);
1301  break;
1302  }
1303  case kDescriptor:
1304  SetDataOrAccessorPropertyReference(details.kind(), entry,
1305  descs->GetKey(i),
1306  descs->GetStrongValue(i));
1307  break;
1308  }
1309  }
1310  } else if (js_obj->IsJSGlobalObject()) {
1311  // We assume that global objects can only have slow properties.
1312  GlobalDictionary dictionary =
1313  JSGlobalObject::cast(js_obj)->global_dictionary();
1314  int length = dictionary->Capacity();
1315  ReadOnlyRoots roots(isolate);
1316  for (int i = 0; i < length; ++i) {
1317  if (!dictionary->IsKey(roots, dictionary->KeyAt(i))) continue;
1318  PropertyCell* cell = dictionary->CellAt(i);
1319  Name name = cell->name();
1320  Object* value = cell->value();
1321  PropertyDetails details = cell->property_details();
1322  SetDataOrAccessorPropertyReference(details.kind(), entry, name, value);
1323  }
1324  } else {
1325  NameDictionary dictionary = js_obj->property_dictionary();
1326  int length = dictionary->Capacity();
1327  ReadOnlyRoots roots(isolate);
1328  for (int i = 0; i < length; ++i) {
1329  Object* k = dictionary->KeyAt(i);
1330  if (!dictionary->IsKey(roots, k)) continue;
1331  Object* value = dictionary->ValueAt(i);
1332  PropertyDetails details = dictionary->DetailsAt(i);
1333  SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k),
1334  value);
1335  }
1336  }
1337 }
1338 
1339 void V8HeapExplorer::ExtractAccessorPairProperty(HeapEntry* entry, Name key,
1340  Object* callback_obj,
1341  int field_offset) {
1342  if (!callback_obj->IsAccessorPair()) return;
1343  AccessorPair* accessors = AccessorPair::cast(callback_obj);
1344  SetPropertyReference(entry, key, accessors, nullptr, field_offset);
1345  Object* getter = accessors->getter();
1346  if (!getter->IsOddball()) {
1347  SetPropertyReference(entry, key, getter, "get %s");
1348  }
1349  Object* setter = accessors->setter();
1350  if (!setter->IsOddball()) {
1351  SetPropertyReference(entry, key, setter, "set %s");
1352  }
1353 }
1354 
1355 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
1356  HeapEntry* entry) {
1357  ReadOnlyRoots roots = js_obj->GetReadOnlyRoots();
1358  if (js_obj->HasObjectElements()) {
1359  FixedArray elements = FixedArray::cast(js_obj->elements());
1360  int length = js_obj->IsJSArray()
1361  ? Smi::ToInt(JSArray::cast(js_obj)->length())
1362  : elements->length();
1363  for (int i = 0; i < length; ++i) {
1364  if (!elements->get(i)->IsTheHole(roots)) {
1365  SetElementReference(entry, i, elements->get(i));
1366  }
1367  }
1368  } else if (js_obj->HasDictionaryElements()) {
1369  NumberDictionary dictionary = js_obj->element_dictionary();
1370  int length = dictionary->Capacity();
1371  for (int i = 0; i < length; ++i) {
1372  Object* k = dictionary->KeyAt(i);
1373  if (!dictionary->IsKey(roots, k)) continue;
1374  DCHECK(k->IsNumber());
1375  uint32_t index = static_cast<uint32_t>(k->Number());
1376  SetElementReference(entry, index, dictionary->ValueAt(i));
1377  }
1378  }
1379 }
1380 
1381 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
1382  HeapEntry* entry) {
1383  int length = js_obj->GetEmbedderFieldCount();
1384  for (int i = 0; i < length; ++i) {
1385  Object* o = js_obj->GetEmbedderField(i);
1386  SetInternalReference(entry, i, o, js_obj->GetEmbedderFieldOffset(i));
1387  }
1388 }
1389 
1390 JSFunction* V8HeapExplorer::GetConstructor(JSReceiver* receiver) {
1391  Isolate* isolate = receiver->GetIsolate();
1392  DisallowHeapAllocation no_gc;
1393  HandleScope scope(isolate);
1394  MaybeHandle<JSFunction> maybe_constructor =
1395  JSReceiver::GetConstructor(handle(receiver, isolate));
1396 
1397  if (maybe_constructor.is_null()) return nullptr;
1398 
1399  return *maybe_constructor.ToHandleChecked();
1400 }
1401 
1402 String V8HeapExplorer::GetConstructorName(JSObject* object) {
1403  Isolate* isolate = object->GetIsolate();
1404  if (object->IsJSFunction()) return ReadOnlyRoots(isolate).closure_string();
1405  DisallowHeapAllocation no_gc;
1406  HandleScope scope(isolate);
1407  return *JSReceiver::GetConstructorName(handle(object, isolate));
1408 }
1409 
1410 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1411  return obj->IsHeapObject() ? generator_->FindOrAddEntry(obj, this) : nullptr;
1412 }
1413 
1415  public:
1416  explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
1417  : explorer_(explorer), visiting_weak_roots_(false) {}
1418 
1419  void SetVisitingWeakRoots() { visiting_weak_roots_ = true; }
1420 
1421  void VisitRootPointer(Root root, const char* description,
1422  ObjectSlot object) override {
1423  if (root == Root::kBuiltins) {
1424  explorer_->TagBuiltinCodeObject(Code::cast(*object), description);
1425  }
1426  explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
1427  *object);
1428  }
1429 
1430  void VisitRootPointers(Root root, const char* description, ObjectSlot start,
1431  ObjectSlot end) override {
1432  for (ObjectSlot p = start; p < end; ++p)
1433  VisitRootPointer(root, description, p);
1434  }
1435 
1436  private:
1437  V8HeapExplorer* explorer_;
1438  bool visiting_weak_roots_;
1439 };
1440 
1441 bool V8HeapExplorer::IterateAndExtractReferences(
1442  HeapSnapshotGenerator* generator) {
1443  generator_ = generator;
1444 
1445  // Create references to the synthetic roots.
1446  SetRootGcRootsReference();
1447  for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
1448  SetGcRootsReference(static_cast<Root>(root));
1449  }
1450 
1451  // Make sure builtin code objects get their builtin tags
1452  // first. Otherwise a particular JSFunction object could set
1453  // its custom name to a generic builtin.
1454  RootsReferencesExtractor extractor(this);
1455  ReadOnlyRoots(heap_).Iterate(&extractor);
1456  heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1457  extractor.SetVisitingWeakRoots();
1458  heap_->IterateWeakGlobalHandles(&extractor);
1459 
1460  bool interrupted = false;
1461 
1462  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1463  // Heap iteration with filtering must be finished in any case.
1464  for (HeapObject *obj = iterator.next(); obj != nullptr;
1465  obj = iterator.next(), progress_->ProgressStep()) {
1466  if (interrupted) continue;
1467 
1468  size_t max_pointer = obj->Size() / kPointerSize;
1469  if (max_pointer > visited_fields_.size()) {
1470  // Clear the current bits.
1471  std::vector<bool>().swap(visited_fields_);
1472  // Reallocate to right size.
1473  visited_fields_.resize(max_pointer, false);
1474  }
1475 
1476  HeapEntry* entry = GetEntry(obj);
1477  ExtractReferences(entry, obj);
1478  SetInternalReference(entry, "map", obj->map(), HeapObject::kMapOffset);
1479  // Extract unvisited fields as hidden references and restore tags
1480  // of visited fields.
1481  IndexedReferencesExtractor refs_extractor(this, obj, entry);
1482  obj->Iterate(&refs_extractor);
1483 
1484  // Ensure visited_fields_ doesn't leak to the next object.
1485  for (size_t i = 0; i < max_pointer; ++i) {
1486  DCHECK(!visited_fields_[i]);
1487  }
1488 
1489  // Extract location for specific object types
1490  ExtractLocation(entry, obj);
1491 
1492  if (!progress_->ProgressReport(false)) interrupted = true;
1493  }
1494 
1495  generator_ = nullptr;
1496  return interrupted ? false : progress_->ProgressReport(true);
1497 }
1498 
1499 
1500 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1501  ReadOnlyRoots roots(heap_);
1502  return object->IsHeapObject() && !object->IsOddball() &&
1503  object != roots.empty_byte_array() &&
1504  object != roots.empty_fixed_array() &&
1505  object != roots.empty_weak_fixed_array() &&
1506  object != roots.empty_descriptor_array() &&
1507  object != roots.fixed_array_map() && object != roots.cell_map() &&
1508  object != roots.global_property_cell_map() &&
1509  object != roots.shared_function_info_map() &&
1510  object != roots.free_space_map() &&
1511  object != roots.one_pointer_filler_map() &&
1512  object != roots.two_pointer_filler_map();
1513 }
1514 
1515 bool V8HeapExplorer::IsEssentialHiddenReference(Object* parent,
1516  int field_offset) {
1517  if (parent->IsAllocationSite() &&
1518  field_offset == AllocationSite::kWeakNextOffset)
1519  return false;
1520  if (parent->IsCodeDataContainer() &&
1521  field_offset == CodeDataContainer::kNextCodeLinkOffset)
1522  return false;
1523  if (parent->IsContext() &&
1524  field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
1525  return false;
1526  return true;
1527 }
1528 
1529 void V8HeapExplorer::SetContextReference(HeapEntry* parent_entry,
1530  String reference_name,
1531  Object* child_obj, int field_offset) {
1532  HeapEntry* child_entry = GetEntry(child_obj);
1533  if (child_entry == nullptr) return;
1534  parent_entry->SetNamedReference(HeapGraphEdge::kContextVariable,
1535  names_->GetName(reference_name), child_entry);
1536  MarkVisitedField(field_offset);
1537 }
1538 
1539 void V8HeapExplorer::MarkVisitedField(int offset) {
1540  if (offset < 0) return;
1541  int index = offset / kPointerSize;
1542  DCHECK(!visited_fields_[index]);
1543  visited_fields_[index] = true;
1544 }
1545 
1546 void V8HeapExplorer::SetNativeBindReference(HeapEntry* parent_entry,
1547  const char* reference_name,
1548  Object* child_obj) {
1549  HeapEntry* child_entry = GetEntry(child_obj);
1550  if (child_entry == nullptr) return;
1551  parent_entry->SetNamedReference(HeapGraphEdge::kShortcut, reference_name,
1552  child_entry);
1553 }
1554 
1555 void V8HeapExplorer::SetElementReference(HeapEntry* parent_entry, int index,
1556  Object* child_obj) {
1557  HeapEntry* child_entry = GetEntry(child_obj);
1558  if (child_entry == nullptr) return;
1559  parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index,
1560  child_entry);
1561 }
1562 
1563 void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry,
1564  const char* reference_name,
1565  Object* child_obj, int field_offset) {
1566  HeapEntry* child_entry = GetEntry(child_obj);
1567  if (child_entry == nullptr) return;
1568  if (IsEssentialObject(child_obj)) {
1569  parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
1570  child_entry);
1571  }
1572  MarkVisitedField(field_offset);
1573 }
1574 
1575 void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, int index,
1576  Object* child_obj, int field_offset) {
1577  HeapEntry* child_entry = GetEntry(child_obj);
1578  if (child_entry == nullptr) return;
1579  if (IsEssentialObject(child_obj)) {
1580  parent_entry->SetNamedReference(HeapGraphEdge::kInternal,
1581  names_->GetName(index), child_entry);
1582  }
1583  MarkVisitedField(field_offset);
1584 }
1585 
1586 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1587  HeapEntry* parent_entry, int index,
1588  Object* child_obj, int field_offset) {
1589  DCHECK_EQ(parent_entry, GetEntry(parent_obj));
1590  HeapEntry* child_entry = GetEntry(child_obj);
1591  if (child_entry != nullptr && IsEssentialObject(child_obj) &&
1592  IsEssentialHiddenReference(parent_obj, field_offset)) {
1593  parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index,
1594  child_entry);
1595  }
1596 }
1597 
1598 void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry,
1599  const char* reference_name,
1600  Object* child_obj, int field_offset) {
1601  HeapEntry* child_entry = GetEntry(child_obj);
1602  if (child_entry == nullptr) return;
1603  if (IsEssentialObject(child_obj)) {
1604  parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
1605  child_entry);
1606  }
1607  MarkVisitedField(field_offset);
1608 }
1609 
1610 void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index,
1611  Object* child_obj, int field_offset) {
1612  HeapEntry* child_entry = GetEntry(child_obj);
1613  if (child_entry == nullptr) return;
1614  if (IsEssentialObject(child_obj)) {
1615  parent_entry->SetNamedReference(
1616  HeapGraphEdge::kWeak, names_->GetFormatted("%d", index), child_entry);
1617  }
1618  MarkVisitedField(field_offset);
1619 }
1620 
1621 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
1622  PropertyKind kind, HeapEntry* parent_entry, Name reference_name,
1623  Object* child_obj, const char* name_format_string, int field_offset) {
1624  if (kind == kAccessor) {
1625  ExtractAccessorPairProperty(parent_entry, reference_name, child_obj,
1626  field_offset);
1627  } else {
1628  SetPropertyReference(parent_entry, reference_name, child_obj,
1629  name_format_string, field_offset);
1630  }
1631 }
1632 
1633 void V8HeapExplorer::SetPropertyReference(HeapEntry* parent_entry,
1634  Name reference_name,
1635  Object* child_obj,
1636  const char* name_format_string,
1637  int field_offset) {
1638  HeapEntry* child_entry = GetEntry(child_obj);
1639  if (child_entry == nullptr) return;
1640  HeapGraphEdge::Type type =
1641  reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
1642  ? HeapGraphEdge::kProperty
1643  : HeapGraphEdge::kInternal;
1644  const char* name =
1645  name_format_string != nullptr && reference_name->IsString()
1646  ? names_->GetFormatted(
1647  name_format_string,
1648  String::cast(reference_name)
1649  ->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
1650  .get())
1651  : names_->GetName(reference_name);
1652 
1653  parent_entry->SetNamedReference(type, name, child_entry);
1654  MarkVisitedField(field_offset);
1655 }
1656 
1657 void V8HeapExplorer::SetRootGcRootsReference() {
1658  snapshot_->root()->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
1659  snapshot_->gc_roots());
1660 }
1661 
1662 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1663  HeapEntry* child_entry = GetEntry(child_obj);
1664  DCHECK_NOT_NULL(child_entry);
1665  snapshot_->root()->SetNamedAutoIndexReference(HeapGraphEdge::kShortcut,
1666  nullptr, child_entry, names_);
1667 }
1668 
1669 void V8HeapExplorer::SetGcRootsReference(Root root) {
1670  snapshot_->gc_roots()->SetIndexedAutoIndexReference(
1671  HeapGraphEdge::kElement, snapshot_->gc_subroot(root));
1672 }
1673 
1674 void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
1675  bool is_weak, Object* child_obj) {
1676  HeapEntry* child_entry = GetEntry(child_obj);
1677  if (child_entry == nullptr) return;
1678  const char* name = GetStrongGcSubrootName(child_obj);
1679  HeapGraphEdge::Type edge_type =
1680  is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal;
1681  if (name != nullptr) {
1682  snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name,
1683  child_entry);
1684  } else {
1685  snapshot_->gc_subroot(root)->SetNamedAutoIndexReference(
1686  edge_type, description, child_entry, names_);
1687  }
1688 
1689  // Add a shortcut to JS global object reference at snapshot root.
1690  // That allows the user to easily find global objects. They are
1691  // also used as starting points in distance calculations.
1692  if (is_weak || !child_obj->IsNativeContext()) return;
1693 
1694  JSGlobalObject* global = Context::cast(child_obj)->global_object();
1695  if (!global->IsJSGlobalObject()) return;
1696 
1697  if (!user_roots_.insert(global).second) return;
1698 
1699  SetUserGlobalReference(global);
1700 }
1701 
1702 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1703  if (strong_gc_subroot_names_.empty()) {
1704  Isolate* isolate = heap_->isolate();
1705  for (RootIndex root_index = RootIndex::kFirstStrongOrReadOnlyRoot;
1706  root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) {
1707  const char* name = RootsTable::name(root_index);
1708  strong_gc_subroot_names_.emplace(isolate->root(root_index), name);
1709  }
1710  CHECK(!strong_gc_subroot_names_.empty());
1711  }
1712  auto it = strong_gc_subroot_names_.find(object);
1713  return it != strong_gc_subroot_names_.end() ? it->second : nullptr;
1714 }
1715 
1716 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
1717  if (IsEssentialObject(obj)) {
1718  HeapEntry* entry = GetEntry(obj);
1719  if (entry->name()[0] == '\0') {
1720  entry->set_name(tag);
1721  }
1722  }
1723 }
1724 
1726  public:
1727  void VisitRootPointers(Root root, const char* description, ObjectSlot start,
1728  ObjectSlot end) override {
1729  for (ObjectSlot p = start; p < end; ++p) {
1730  if (!(*p)->IsNativeContext()) continue;
1731  JSObject* proxy = Context::cast(*p)->global_proxy();
1732  if (!proxy->IsJSGlobalProxy()) continue;
1733  Object* global = proxy->map()->prototype();
1734  if (!global->IsJSGlobalObject()) continue;
1735  objects_.push_back(Handle<JSGlobalObject>(JSGlobalObject::cast(global),
1736  proxy->GetIsolate()));
1737  }
1738  }
1739  int count() const { return static_cast<int>(objects_.size()); }
1740  Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
1741 
1742  private:
1743  std::vector<Handle<JSGlobalObject>> objects_;
1744 };
1745 
1746 
1747 // Modifies heap. Must not be run during heap traversal.
1748 void V8HeapExplorer::TagGlobalObjects() {
1749  Isolate* isolate = heap_->isolate();
1750  HandleScope scope(isolate);
1751  GlobalObjectsEnumerator enumerator;
1752  isolate->global_handles()->IterateAllRoots(&enumerator);
1753  std::vector<const char*> urls(enumerator.count());
1754  for (int i = 0, l = enumerator.count(); i < l; ++i) {
1755  urls[i] = global_object_name_resolver_
1756  ? global_object_name_resolver_->GetName(Utils::ToLocal(
1757  Handle<JSObject>::cast(enumerator.at(i))))
1758  : nullptr;
1759  }
1760 
1761  DisallowHeapAllocation no_allocation;
1762  for (int i = 0, l = enumerator.count(); i < l; ++i) {
1763  if (urls[i]) objects_tags_.emplace(*enumerator.at(i), urls[i]);
1764  }
1765 }
1766 
1768  public:
1769  struct Edge {
1770  Node* from;
1771  Node* to;
1772  const char* name;
1773  };
1774 
1775  class V8NodeImpl : public Node {
1776  public:
1777  explicit V8NodeImpl(Object* object) : object_(object) {}
1778  Object* GetObject() { return object_; }
1779 
1780  // Node overrides.
1781  bool IsEmbedderNode() override { return false; }
1782  const char* Name() override {
1783  // The name should be retrieved via GetObject().
1784  UNREACHABLE();
1785  return "";
1786  }
1787  size_t SizeInBytes() override {
1788  // The size should be retrieved via GetObject().
1789  UNREACHABLE();
1790  return 0;
1791  }
1792 
1793  private:
1794  Object* object_;
1795  };
1796 
1797  Node* V8Node(const v8::Local<v8::Value>& value) final {
1798  Handle<Object> object = v8::Utils::OpenHandle(*value);
1799  DCHECK(!object.is_null());
1800  return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object)));
1801  }
1802 
1803  Node* AddNode(std::unique_ptr<Node> node) final {
1804  Node* result = node.get();
1805  nodes_.push_back(std::move(node));
1806  return result;
1807  }
1808 
1809  void AddEdge(Node* from, Node* to, const char* name) final {
1810  edges_.push_back({from, to, name});
1811  }
1812 
1813  const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; }
1814  const std::vector<Edge>& edges() { return edges_; }
1815 
1816  private:
1817  std::vector<std::unique_ptr<Node>> nodes_;
1818  std::vector<Edge> edges_;
1819 };
1820 
1821 class GlobalHandlesExtractor : public PersistentHandleVisitor {
1822  public:
1823  explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
1824  : explorer_(explorer) {}
1825  ~GlobalHandlesExtractor() override = default;
1826  void VisitPersistentHandle(Persistent<Value>* value,
1827  uint16_t class_id) override {
1828  Handle<Object> object = Utils::OpenPersistent(value);
1829  // TODO(3770): Get rid of Object** here.
1830  explorer_->VisitSubtreeWrapper(
1831  reinterpret_cast<Object**>(object.location()), class_id);
1832  }
1833 
1834  private:
1835  NativeObjectsExplorer* explorer_;
1836 };
1837 
1838 
1840  public:
1842  HeapSnapshot* snapshot,
1843  HeapEntry::Type entries_type)
1844  : snapshot_(snapshot),
1845  names_(snapshot_->profiler()->names()),
1846  heap_object_map_(snapshot_->profiler()->heap_object_map()),
1847  entries_type_(entries_type) {
1848  }
1849  HeapEntry* AllocateEntry(HeapThing ptr) override;
1850  private:
1851  HeapSnapshot* snapshot_;
1852  StringsStorage* names_;
1853  HeapObjectsMap* heap_object_map_;
1854  HeapEntry::Type entries_type_;
1855 };
1856 
1857 
1858 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
1859  v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
1860  intptr_t elements = info->GetElementCount();
1861  intptr_t size = info->GetSizeInBytes();
1862  const char* name = elements != -1
1863  ? names_->GetFormatted("%s / %" V8PRIdPTR " entries",
1864  info->GetLabel(), elements)
1865  : names_->GetCopy(info->GetLabel());
1866  return snapshot_->AddEntry(
1867  entries_type_,
1868  name,
1869  heap_object_map_->GenerateId(info),
1870  size != -1 ? static_cast<int>(size) : 0,
1871  0);
1872 }
1873 
1875  public:
1876  explicit EmbedderGraphEntriesAllocator(HeapSnapshot* snapshot)
1877  : snapshot_(snapshot),
1878  names_(snapshot_->profiler()->names()),
1879  heap_object_map_(snapshot_->profiler()->heap_object_map()) {}
1880  HeapEntry* AllocateEntry(HeapThing ptr) override;
1881 
1882  private:
1883  HeapSnapshot* snapshot_;
1884  StringsStorage* names_;
1885  HeapObjectsMap* heap_object_map_;
1886 };
1887 
1888 namespace {
1889 
1890 const char* EmbedderGraphNodeName(StringsStorage* names,
1891  EmbedderGraphImpl::Node* node) {
1892  const char* prefix = node->NamePrefix();
1893  return prefix ? names->GetFormatted("%s %s", prefix, node->Name())
1894  : names->GetCopy(node->Name());
1895 }
1896 
1897 HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) {
1898  return HeapEntry::kNative;
1899 }
1900 
1901 // Merges the names of an embedder node and its wrapper node.
1902 // If the wrapper node name contains a tag suffix (part after '/') then the
1903 // result is the embedder node name concatenated with the tag suffix.
1904 // Otherwise, the result is the embedder node name.
1905 const char* MergeNames(StringsStorage* names, const char* embedder_name,
1906  const char* wrapper_name) {
1907  const char* suffix = strchr(wrapper_name, '/');
1908  return suffix ? names->GetFormatted("%s %s", embedder_name, suffix)
1909  : embedder_name;
1910 }
1911 
1912 } // anonymous namespace
1913 
1914 HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) {
1915  EmbedderGraphImpl::Node* node =
1916  reinterpret_cast<EmbedderGraphImpl::Node*>(ptr);
1917  DCHECK(node->IsEmbedderNode());
1918  size_t size = node->SizeInBytes();
1919  return snapshot_->AddEntry(
1920  EmbedderGraphNodeType(node), EmbedderGraphNodeName(names_, node),
1921  static_cast<SnapshotObjectId>(reinterpret_cast<uintptr_t>(node) << 1),
1922  static_cast<int>(size), 0);
1923 }
1924 
1926  public:
1927  explicit NativeGroupRetainedObjectInfo(const char* label)
1928  : disposed_(false),
1929  hash_(reinterpret_cast<intptr_t>(label)),
1930  label_(label) {}
1931 
1932  ~NativeGroupRetainedObjectInfo() override = default;
1933  void Dispose() override {
1934  CHECK(!disposed_);
1935  disposed_ = true;
1936  delete this;
1937  }
1938  bool IsEquivalent(RetainedObjectInfo* other) override {
1939  return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
1940  }
1941  intptr_t GetHash() override { return hash_; }
1942  const char* GetLabel() override { return label_; }
1943 
1944  private:
1945  bool disposed_;
1946  intptr_t hash_;
1947  const char* label_;
1948 };
1949 
1950 NativeObjectsExplorer::NativeObjectsExplorer(
1951  HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
1952  : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
1953  snapshot_(snapshot),
1954  names_(snapshot_->profiler()->names()),
1955  embedder_queried_(false),
1956  native_groups_(0, SeededStringHasher(isolate_->heap()->HashSeed())),
1957  synthetic_entries_allocator_(
1958  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic)),
1959  native_entries_allocator_(
1960  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative)),
1961  embedder_graph_entries_allocator_(
1962  new EmbedderGraphEntriesAllocator(snapshot)) {}
1963 
1964 NativeObjectsExplorer::~NativeObjectsExplorer() {
1965  for (auto map_entry : objects_by_info_) {
1966  v8::RetainedObjectInfo* info = map_entry.first;
1967  info->Dispose();
1968  std::vector<HeapObject*>* objects = map_entry.second;
1969  delete objects;
1970  }
1971  for (auto map_entry : native_groups_) {
1972  NativeGroupRetainedObjectInfo* info = map_entry.second;
1973  info->Dispose();
1974  }
1975 }
1976 
1977 
1978 int NativeObjectsExplorer::EstimateObjectsCount() {
1979  FillRetainedObjects();
1980  return static_cast<int>(objects_by_info_.size());
1981 }
1982 
1983 
1984 void NativeObjectsExplorer::FillRetainedObjects() {
1985  if (embedder_queried_) return;
1986  v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
1988  snapshot_->profiler()->GetRetainerInfos(isolate_);
1989  for (auto& pair : infos.groups) {
1990  std::vector<HeapObject*>* info = GetVectorMaybeDisposeInfo(pair.first);
1991  for (auto& persistent : pair.second) {
1992  if (persistent->IsEmpty()) continue;
1993 
1994  Handle<Object> object = v8::Utils::OpenHandle(
1995  *persistent->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
1996  DCHECK(!object.is_null());
1997  HeapObject* heap_object = HeapObject::cast(*object);
1998  info->push_back(heap_object);
1999  in_groups_.insert(heap_object);
2000  }
2001  }
2002 
2003  // Record objects that are not in ObjectGroups, but have class ID.
2004  GlobalHandlesExtractor extractor(this);
2005  isolate_->global_handles()->IterateAllRootsWithClassIds(&extractor);
2006 
2007  edges_ = std::move(infos.edges);
2008  embedder_queried_ = true;
2009 }
2010 
2011 void NativeObjectsExplorer::FillEdges() {
2012  v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
2013  // Fill in actual edges found.
2014  for (auto& pair : edges_) {
2015  if (pair.first->IsEmpty() || pair.second->IsEmpty()) continue;
2016 
2017  Handle<Object> parent_object = v8::Utils::OpenHandle(
2018  *pair.first->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
2019  HeapObject* parent = HeapObject::cast(*parent_object);
2020  HeapEntry* parent_entry =
2021  generator_->FindOrAddEntry(parent, native_entries_allocator_.get());
2022  DCHECK_NOT_NULL(parent_entry);
2023  Handle<Object> child_object = v8::Utils::OpenHandle(
2024  *pair.second->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
2025  HeapObject* child = HeapObject::cast(*child_object);
2026  HeapEntry* child_entry =
2027  generator_->FindOrAddEntry(child, native_entries_allocator_.get());
2028  parent_entry->SetNamedReference(HeapGraphEdge::kInternal, "native",
2029  child_entry);
2030  }
2031  edges_.clear();
2032 }
2033 
2034 std::vector<HeapObject*>* NativeObjectsExplorer::GetVectorMaybeDisposeInfo(
2035  v8::RetainedObjectInfo* info) {
2036  if (objects_by_info_.count(info)) {
2037  info->Dispose();
2038  } else {
2039  objects_by_info_[info] = new std::vector<HeapObject*>();
2040  }
2041  return objects_by_info_[info];
2042 }
2043 
2044 HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode(
2045  EmbedderGraphImpl::Node* node) {
2046  EmbedderGraphImpl::Node* wrapper = node->WrapperNode();
2047  if (wrapper) {
2048  node = wrapper;
2049  }
2050  if (node->IsEmbedderNode()) {
2051  return generator_->FindOrAddEntry(node,
2052  embedder_graph_entries_allocator_.get());
2053  } else {
2054  EmbedderGraphImpl::V8NodeImpl* v8_node =
2055  static_cast<EmbedderGraphImpl::V8NodeImpl*>(node);
2056  Object* object = v8_node->GetObject();
2057  if (object->IsSmi()) return nullptr;
2058  return generator_->FindEntry(HeapObject::cast(object));
2059  }
2060 }
2061 
2062 bool NativeObjectsExplorer::IterateAndExtractReferences(
2063  HeapSnapshotGenerator* generator) {
2064  generator_ = generator;
2065 
2066  if (FLAG_heap_profiler_use_embedder_graph &&
2067  snapshot_->profiler()->HasBuildEmbedderGraphCallback()) {
2068  v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
2069  DisallowHeapAllocation no_allocation;
2070  EmbedderGraphImpl graph;
2071  snapshot_->profiler()->BuildEmbedderGraph(isolate_, &graph);
2072  for (const auto& node : graph.nodes()) {
2073  if (node->IsRootNode()) {
2074  snapshot_->root()->SetIndexedAutoIndexReference(
2075  HeapGraphEdge::kElement, EntryForEmbedderGraphNode(node.get()));
2076  }
2077  // Adjust the name and the type of the V8 wrapper node.
2078  auto wrapper = node->WrapperNode();
2079  if (wrapper) {
2080  HeapEntry* wrapper_entry = EntryForEmbedderGraphNode(wrapper);
2081  wrapper_entry->set_name(
2082  MergeNames(names_, EmbedderGraphNodeName(names_, node.get()),
2083  wrapper_entry->name()));
2084  wrapper_entry->set_type(EmbedderGraphNodeType(node.get()));
2085  }
2086  }
2087  // Fill edges of the graph.
2088  for (const auto& edge : graph.edges()) {
2089  HeapEntry* from = EntryForEmbedderGraphNode(edge.from);
2090  // |from| and |to| can be nullptr if the corresponding node is a V8 node
2091  // pointing to a Smi.
2092  if (!from) continue;
2093  HeapEntry* to = EntryForEmbedderGraphNode(edge.to);
2094  if (!to) continue;
2095  if (edge.name == nullptr) {
2096  from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to);
2097  } else {
2098  from->SetNamedReference(HeapGraphEdge::kInternal, edge.name, to);
2099  }
2100  }
2101  } else {
2102  FillRetainedObjects();
2103  FillEdges();
2104  if (EstimateObjectsCount() > 0) {
2105  for (auto map_entry : objects_by_info_) {
2106  v8::RetainedObjectInfo* info = map_entry.first;
2107  SetNativeRootReference(info);
2108  std::vector<HeapObject*>* objects = map_entry.second;
2109  for (HeapObject* object : *objects) {
2110  SetWrapperNativeReferences(object, info);
2111  }
2112  }
2113  SetRootNativeRootsReference();
2114  }
2115  }
2116  generator_ = nullptr;
2117  return true;
2118 }
2119 
2120 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2121  const char* label) {
2122  const char* label_copy = names_->GetCopy(label);
2123  if (!native_groups_.count(label_copy)) {
2124  native_groups_[label_copy] = new NativeGroupRetainedObjectInfo(label);
2125  }
2126  return native_groups_[label_copy];
2127 }
2128 
2129 void NativeObjectsExplorer::SetNativeRootReference(
2130  v8::RetainedObjectInfo* info) {
2131  HeapEntry* child_entry =
2132  generator_->FindOrAddEntry(info, native_entries_allocator_.get());
2133  DCHECK_NOT_NULL(child_entry);
2134  NativeGroupRetainedObjectInfo* group_info =
2135  FindOrAddGroupInfo(info->GetGroupLabel());
2136  HeapEntry* group_entry = generator_->FindOrAddEntry(
2137  group_info, synthetic_entries_allocator_.get());
2138  group_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal, nullptr,
2139  child_entry, names_);
2140 }
2141 
2142 void NativeObjectsExplorer::SetWrapperNativeReferences(
2143  HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2144  HeapEntry* wrapper_entry = generator_->FindEntry(wrapper);
2145  DCHECK_NOT_NULL(wrapper_entry);
2146  HeapEntry* info_entry =
2147  generator_->FindOrAddEntry(info, native_entries_allocator_.get());
2148  DCHECK_NOT_NULL(info_entry);
2149  wrapper_entry->SetNamedReference(HeapGraphEdge::kInternal, "native",
2150  info_entry);
2151  info_entry->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2152  wrapper_entry);
2153 }
2154 
2155 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2156  for (auto map_entry : native_groups_) {
2157  NativeGroupRetainedObjectInfo* group_info = map_entry.second;
2158  HeapEntry* group_entry =
2159  generator_->FindOrAddEntry(group_info, native_entries_allocator_.get());
2160  DCHECK_NOT_NULL(group_entry);
2161  snapshot_->root()->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2162  group_entry);
2163  }
2164 }
2165 
2166 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2167  if (in_groups_.count(*p)) return;
2168  v8::RetainedObjectInfo* info =
2169  isolate_->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2170  if (info == nullptr) return;
2171  GetVectorMaybeDisposeInfo(info)->push_back(HeapObject::cast(*p));
2172 }
2173 
2174 HeapSnapshotGenerator::HeapSnapshotGenerator(
2175  HeapSnapshot* snapshot,
2176  v8::ActivityControl* control,
2178  Heap* heap)
2179  : snapshot_(snapshot),
2180  control_(control),
2181  v8_heap_explorer_(snapshot_, this, resolver),
2182  dom_explorer_(snapshot_, this),
2183  heap_(heap) {
2184 }
2185 
2186 namespace {
2187 class NullContextScope {
2188  public:
2189  explicit NullContextScope(Isolate* isolate)
2190  : isolate_(isolate), prev_(isolate->context()) {
2191  isolate_->set_context(Context());
2192  }
2193  ~NullContextScope() { isolate_->set_context(prev_); }
2194 
2195  private:
2196  Isolate* isolate_;
2197  Context prev_;
2198 };
2199 } // namespace
2200 
2201 bool HeapSnapshotGenerator::GenerateSnapshot() {
2202  v8_heap_explorer_.TagGlobalObjects();
2203 
2204  // TODO(1562) Profiler assumes that any object that is in the heap after
2205  // full GC is reachable from the root when computing dominators.
2206  // This is not true for weakly reachable objects.
2207  // As a temporary solution we call GC twice.
2208  heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
2209  GarbageCollectionReason::kHeapProfiler);
2210  heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
2211  GarbageCollectionReason::kHeapProfiler);
2212 
2213  NullContextScope null_context_scope(heap_->isolate());
2214 
2215 #ifdef VERIFY_HEAP
2216  Heap* debug_heap = heap_;
2217  if (FLAG_verify_heap) {
2218  debug_heap->Verify();
2219  }
2220 #endif
2221 
2222  InitProgressCounter();
2223 
2224 #ifdef VERIFY_HEAP
2225  if (FLAG_verify_heap) {
2226  debug_heap->Verify();
2227  }
2228 #endif
2229 
2230  snapshot_->AddSyntheticRootEntries();
2231 
2232  if (!FillReferences()) return false;
2233 
2234  snapshot_->FillChildren();
2235  snapshot_->RememberLastJSObjectId();
2236 
2237  progress_counter_ = progress_total_;
2238  if (!ProgressReport(true)) return false;
2239  return true;
2240 }
2241 
2242 void HeapSnapshotGenerator::ProgressStep() {
2243  ++progress_counter_;
2244 }
2245 
2246 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2247  const int kProgressReportGranularity = 10000;
2248  if (control_ != nullptr &&
2249  (force || progress_counter_ % kProgressReportGranularity == 0)) {
2250  return control_->ReportProgressValue(progress_counter_, progress_total_) ==
2251  v8::ActivityControl::kContinue;
2252  }
2253  return true;
2254 }
2255 
2256 void HeapSnapshotGenerator::InitProgressCounter() {
2257  if (control_ == nullptr) return;
2258  // The +1 ensures that intermediate ProgressReport calls will never signal
2259  // that the work is finished (i.e. progress_counter_ == progress_total_).
2260  // Only the forced ProgressReport() at the end of GenerateSnapshot()
2261  // should signal that the work is finished because signalling finished twice
2262  // breaks the DevTools frontend.
2263  progress_total_ = v8_heap_explorer_.EstimateObjectsCount() +
2264  dom_explorer_.EstimateObjectsCount() + 1;
2265  progress_counter_ = 0;
2266 }
2267 
2268 bool HeapSnapshotGenerator::FillReferences() {
2269  return v8_heap_explorer_.IterateAndExtractReferences(this) &&
2270  dom_explorer_.IterateAndExtractReferences(this);
2271 }
2272 
2273 template<int bytes> struct MaxDecimalDigitsIn;
2274 template<> struct MaxDecimalDigitsIn<4> {
2275  static const int kSigned = 11;
2276  static const int kUnsigned = 10;
2277 };
2278 template<> struct MaxDecimalDigitsIn<8> {
2279  static const int kSigned = 20;
2280  static const int kUnsigned = 20;
2281 };
2282 
2284  public:
2285  explicit OutputStreamWriter(v8::OutputStream* stream)
2286  : stream_(stream),
2287  chunk_size_(stream->GetChunkSize()),
2288  chunk_(chunk_size_),
2289  chunk_pos_(0),
2290  aborted_(false) {
2291  DCHECK_GT(chunk_size_, 0);
2292  }
2293  bool aborted() { return aborted_; }
2294  void AddCharacter(char c) {
2295  DCHECK_NE(c, '\0');
2296  DCHECK(chunk_pos_ < chunk_size_);
2297  chunk_[chunk_pos_++] = c;
2298  MaybeWriteChunk();
2299  }
2300  void AddString(const char* s) {
2301  AddSubstring(s, StrLength(s));
2302  }
2303  void AddSubstring(const char* s, int n) {
2304  if (n <= 0) return;
2305  DCHECK(static_cast<size_t>(n) <= strlen(s));
2306  const char* s_end = s + n;
2307  while (s < s_end) {
2308  int s_chunk_size =
2309  Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2310  DCHECK_GT(s_chunk_size, 0);
2311  MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2312  s += s_chunk_size;
2313  chunk_pos_ += s_chunk_size;
2314  MaybeWriteChunk();
2315  }
2316  }
2317  void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2318  void Finalize() {
2319  if (aborted_) return;
2320  DCHECK(chunk_pos_ < chunk_size_);
2321  if (chunk_pos_ != 0) {
2322  WriteChunk();
2323  }
2324  stream_->EndOfStream();
2325  }
2326 
2327  private:
2328  template<typename T>
2329  void AddNumberImpl(T n, const char* format) {
2330  // Buffer for the longest value plus trailing \0
2331  static const int kMaxNumberSize =
2333  if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2334  int result = SNPrintF(
2335  chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2336  DCHECK_NE(result, -1);
2337  chunk_pos_ += result;
2338  MaybeWriteChunk();
2339  } else {
2341  int result = SNPrintF(buffer, format, n);
2342  USE(result);
2343  DCHECK_NE(result, -1);
2344  AddString(buffer.start());
2345  }
2346  }
2347  void MaybeWriteChunk() {
2348  DCHECK(chunk_pos_ <= chunk_size_);
2349  if (chunk_pos_ == chunk_size_) {
2350  WriteChunk();
2351  }
2352  }
2353  void WriteChunk() {
2354  if (aborted_) return;
2355  if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2356  v8::OutputStream::kAbort) aborted_ = true;
2357  chunk_pos_ = 0;
2358  }
2359 
2360  v8::OutputStream* stream_;
2361  int chunk_size_;
2362  ScopedVector<char> chunk_;
2363  int chunk_pos_;
2364  bool aborted_;
2365 };
2366 
2367 
2368 // type, name|index, to_node.
2369 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2370 // type, name, id, self_size, edge_count, trace_node_id.
2371 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2372 
2373 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2374  if (AllocationTracker* allocation_tracker =
2375  snapshot_->profiler()->allocation_tracker()) {
2376  allocation_tracker->PrepareForSerialization();
2377  }
2378  DCHECK_NULL(writer_);
2379  writer_ = new OutputStreamWriter(stream);
2380  SerializeImpl();
2381  delete writer_;
2382  writer_ = nullptr;
2383 }
2384 
2385 
2386 void HeapSnapshotJSONSerializer::SerializeImpl() {
2387  DCHECK_EQ(0, snapshot_->root()->index());
2388  writer_->AddCharacter('{');
2389  writer_->AddString("\"snapshot\":{");
2390  SerializeSnapshot();
2391  if (writer_->aborted()) return;
2392  writer_->AddString("},\n");
2393  writer_->AddString("\"nodes\":[");
2394  SerializeNodes();
2395  if (writer_->aborted()) return;
2396  writer_->AddString("],\n");
2397  writer_->AddString("\"edges\":[");
2398  SerializeEdges();
2399  if (writer_->aborted()) return;
2400  writer_->AddString("],\n");
2401 
2402  writer_->AddString("\"trace_function_infos\":[");
2403  SerializeTraceNodeInfos();
2404  if (writer_->aborted()) return;
2405  writer_->AddString("],\n");
2406  writer_->AddString("\"trace_tree\":[");
2407  SerializeTraceTree();
2408  if (writer_->aborted()) return;
2409  writer_->AddString("],\n");
2410 
2411  writer_->AddString("\"samples\":[");
2412  SerializeSamples();
2413  if (writer_->aborted()) return;
2414  writer_->AddString("],\n");
2415 
2416  writer_->AddString("\"locations\":[");
2417  SerializeLocations();
2418  if (writer_->aborted()) return;
2419  writer_->AddString("],\n");
2420 
2421  writer_->AddString("\"strings\":[");
2422  SerializeStrings();
2423  if (writer_->aborted()) return;
2424  writer_->AddCharacter(']');
2425  writer_->AddCharacter('}');
2426  writer_->Finalize();
2427 }
2428 
2429 
2430 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2431  base::HashMap::Entry* cache_entry =
2432  strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2433  if (cache_entry->value == nullptr) {
2434  cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2435  }
2436  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2437 }
2438 
2439 
2440 namespace {
2441 
2442 template<size_t size> struct ToUnsigned;
2443 
2444 template<> struct ToUnsigned<4> {
2445  typedef uint32_t Type;
2446 };
2447 
2448 template<> struct ToUnsigned<8> {
2449  typedef uint64_t Type;
2450 };
2451 
2452 } // namespace
2453 
2454 
2455 template<typename T>
2456 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2457  STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2458  int number_of_digits = 0;
2459  T t = value;
2460  do {
2461  ++number_of_digits;
2462  } while (t /= 10);
2463 
2464  buffer_pos += number_of_digits;
2465  int result = buffer_pos;
2466  do {
2467  int last_digit = static_cast<int>(value % 10);
2468  buffer[--buffer_pos] = '0' + last_digit;
2469  value /= 10;
2470  } while (value);
2471  return result;
2472 }
2473 
2474 
2475 template<typename T>
2476 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2477  typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2478  STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2479  return utoa_impl(unsigned_value, buffer, buffer_pos);
2480 }
2481 
2482 
2483 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2484  bool first_edge) {
2485  // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2486  static const int kBufferSize =
2487  MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2488  EmbeddedVector<char, kBufferSize> buffer;
2489  int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2490  || edge->type() == HeapGraphEdge::kHidden
2491  ? edge->index() : GetStringId(edge->name());
2492  int buffer_pos = 0;
2493  if (!first_edge) {
2494  buffer[buffer_pos++] = ',';
2495  }
2496  buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2497  buffer[buffer_pos++] = ',';
2498  buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2499  buffer[buffer_pos++] = ',';
2500  buffer_pos = utoa(to_node_index(edge->to()), buffer, buffer_pos);
2501  buffer[buffer_pos++] = '\n';
2502  buffer[buffer_pos++] = '\0';
2503  writer_->AddString(buffer.start());
2504 }
2505 
2506 void HeapSnapshotJSONSerializer::SerializeEdges() {
2507  std::vector<HeapGraphEdge*>& edges = snapshot_->children();
2508  for (size_t i = 0; i < edges.size(); ++i) {
2509  DCHECK(i == 0 ||
2510  edges[i - 1]->from()->index() <= edges[i]->from()->index());
2511  SerializeEdge(edges[i], i == 0);
2512  if (writer_->aborted()) return;
2513  }
2514 }
2515 
2516 void HeapSnapshotJSONSerializer::SerializeNode(const HeapEntry* entry) {
2517  // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2518  static const int kBufferSize =
2519  5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2520  + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2521  + 6 + 1 + 1;
2522  EmbeddedVector<char, kBufferSize> buffer;
2523  int buffer_pos = 0;
2524  if (to_node_index(entry) != 0) {
2525  buffer[buffer_pos++] = ',';
2526  }
2527  buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2528  buffer[buffer_pos++] = ',';
2529  buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2530  buffer[buffer_pos++] = ',';
2531  buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2532  buffer[buffer_pos++] = ',';
2533  buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2534  buffer[buffer_pos++] = ',';
2535  buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2536  buffer[buffer_pos++] = ',';
2537  buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2538  buffer[buffer_pos++] = '\n';
2539  buffer[buffer_pos++] = '\0';
2540  writer_->AddString(buffer.start());
2541 }
2542 
2543 void HeapSnapshotJSONSerializer::SerializeNodes() {
2544  const std::deque<HeapEntry>& entries = snapshot_->entries();
2545  for (const HeapEntry& entry : entries) {
2546  SerializeNode(&entry);
2547  if (writer_->aborted()) return;
2548  }
2549 }
2550 
2551 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2552  writer_->AddString("\"meta\":");
2553  // The object describing node serialization layout.
2554  // We use a set of macros to improve readability.
2555 
2556 // clang-format off
2557 #define JSON_A(s) "[" s "]"
2558 #define JSON_O(s) "{" s "}"
2559 #define JSON_S(s) "\"" s "\""
2560  writer_->AddString(JSON_O(
2561  JSON_S("node_fields") ":" JSON_A(
2562  JSON_S("type") ","
2563  JSON_S("name") ","
2564  JSON_S("id") ","
2565  JSON_S("self_size") ","
2566  JSON_S("edge_count") ","
2567  JSON_S("trace_node_id")) ","
2568  JSON_S("node_types") ":" JSON_A(
2569  JSON_A(
2570  JSON_S("hidden") ","
2571  JSON_S("array") ","
2572  JSON_S("string") ","
2573  JSON_S("object") ","
2574  JSON_S("code") ","
2575  JSON_S("closure") ","
2576  JSON_S("regexp") ","
2577  JSON_S("number") ","
2578  JSON_S("native") ","
2579  JSON_S("synthetic") ","
2580  JSON_S("concatenated string") ","
2581  JSON_S("sliced string") ","
2582  JSON_S("symbol") ","
2583  JSON_S("bigint")) ","
2584  JSON_S("string") ","
2585  JSON_S("number") ","
2586  JSON_S("number") ","
2587  JSON_S("number") ","
2588  JSON_S("number") ","
2589  JSON_S("number")) ","
2590  JSON_S("edge_fields") ":" JSON_A(
2591  JSON_S("type") ","
2592  JSON_S("name_or_index") ","
2593  JSON_S("to_node")) ","
2594  JSON_S("edge_types") ":" JSON_A(
2595  JSON_A(
2596  JSON_S("context") ","
2597  JSON_S("element") ","
2598  JSON_S("property") ","
2599  JSON_S("internal") ","
2600  JSON_S("hidden") ","
2601  JSON_S("shortcut") ","
2602  JSON_S("weak")) ","
2603  JSON_S("string_or_number") ","
2604  JSON_S("node")) ","
2605  JSON_S("trace_function_info_fields") ":" JSON_A(
2606  JSON_S("function_id") ","
2607  JSON_S("name") ","
2608  JSON_S("script_name") ","
2609  JSON_S("script_id") ","
2610  JSON_S("line") ","
2611  JSON_S("column")) ","
2612  JSON_S("trace_node_fields") ":" JSON_A(
2613  JSON_S("id") ","
2614  JSON_S("function_info_index") ","
2615  JSON_S("count") ","
2616  JSON_S("size") ","
2617  JSON_S("children")) ","
2618  JSON_S("sample_fields") ":" JSON_A(
2619  JSON_S("timestamp_us") ","
2620  JSON_S("last_assigned_id")) ","
2621  JSON_S("location_fields") ":" JSON_A(
2622  JSON_S("object_index") ","
2623  JSON_S("script_id") ","
2624  JSON_S("line") ","
2625  JSON_S("column"))));
2626 // clang-format on
2627 #undef JSON_S
2628 #undef JSON_O
2629 #undef JSON_A
2630  writer_->AddString(",\"node_count\":");
2631  writer_->AddNumber(static_cast<unsigned>(snapshot_->entries().size()));
2632  writer_->AddString(",\"edge_count\":");
2633  writer_->AddNumber(static_cast<double>(snapshot_->edges().size()));
2634  writer_->AddString(",\"trace_function_count\":");
2635  uint32_t count = 0;
2636  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2637  if (tracker) {
2638  count = static_cast<uint32_t>(tracker->function_info_list().size());
2639  }
2640  writer_->AddNumber(count);
2641 }
2642 
2643 
2644 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2645  static const char hex_chars[] = "0123456789ABCDEF";
2646  w->AddString("\\u");
2647  w->AddCharacter(hex_chars[(u >> 12) & 0xF]);
2648  w->AddCharacter(hex_chars[(u >> 8) & 0xF]);
2649  w->AddCharacter(hex_chars[(u >> 4) & 0xF]);
2650  w->AddCharacter(hex_chars[u & 0xF]);
2651 }
2652 
2653 
2654 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2655  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2656  if (!tracker) return;
2657  AllocationTraceTree* traces = tracker->trace_tree();
2658  SerializeTraceNode(traces->root());
2659 }
2660 
2661 
2662 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2663  // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2664  const int kBufferSize =
2665  4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2666  + 4 + 1 + 1;
2667  EmbeddedVector<char, kBufferSize> buffer;
2668  int buffer_pos = 0;
2669  buffer_pos = utoa(node->id(), buffer, buffer_pos);
2670  buffer[buffer_pos++] = ',';
2671  buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2672  buffer[buffer_pos++] = ',';
2673  buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2674  buffer[buffer_pos++] = ',';
2675  buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2676  buffer[buffer_pos++] = ',';
2677  buffer[buffer_pos++] = '[';
2678  buffer[buffer_pos++] = '\0';
2679  writer_->AddString(buffer.start());
2680 
2681  int i = 0;
2682  for (AllocationTraceNode* child : node->children()) {
2683  if (i++ > 0) {
2684  writer_->AddCharacter(',');
2685  }
2686  SerializeTraceNode(child);
2687  }
2688  writer_->AddCharacter(']');
2689 }
2690 
2691 
2692 // 0-based position is converted to 1-based during the serialization.
2693 static int SerializePosition(int position, const Vector<char>& buffer,
2694  int buffer_pos) {
2695  if (position == -1) {
2696  buffer[buffer_pos++] = '0';
2697  } else {
2698  DCHECK_GE(position, 0);
2699  buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2700  }
2701  return buffer_pos;
2702 }
2703 
2704 
2705 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2706  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2707  if (!tracker) return;
2708  // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2709  const int kBufferSize =
2710  6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2711  + 6 + 1 + 1;
2712  EmbeddedVector<char, kBufferSize> buffer;
2713  int i = 0;
2714  for (AllocationTracker::FunctionInfo* info : tracker->function_info_list()) {
2715  int buffer_pos = 0;
2716  if (i++ > 0) {
2717  buffer[buffer_pos++] = ',';
2718  }
2719  buffer_pos = utoa(info->function_id, buffer, buffer_pos);
2720  buffer[buffer_pos++] = ',';
2721  buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
2722  buffer[buffer_pos++] = ',';
2723  buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
2724  buffer[buffer_pos++] = ',';
2725  // The cast is safe because script id is a non-negative Smi.
2726  buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
2727  buffer_pos);
2728  buffer[buffer_pos++] = ',';
2729  buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
2730  buffer[buffer_pos++] = ',';
2731  buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
2732  buffer[buffer_pos++] = '\n';
2733  buffer[buffer_pos++] = '\0';
2734  writer_->AddString(buffer.start());
2735  }
2736 }
2737 
2738 
2739 void HeapSnapshotJSONSerializer::SerializeSamples() {
2740  const std::vector<HeapObjectsMap::TimeInterval>& samples =
2741  snapshot_->profiler()->heap_object_map()->samples();
2742  if (samples.empty()) return;
2743  base::TimeTicks start_time = samples[0].timestamp;
2744  // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
2745  const int kBufferSize = MaxDecimalDigitsIn<sizeof(
2746  base::TimeDelta().InMicroseconds())>::kUnsigned +
2747  MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
2748  2 + 1 + 1;
2749  EmbeddedVector<char, kBufferSize> buffer;
2750  int i = 0;
2751  for (const HeapObjectsMap::TimeInterval& sample : samples) {
2752  int buffer_pos = 0;
2753  if (i++ > 0) {
2754  buffer[buffer_pos++] = ',';
2755  }
2756  base::TimeDelta time_delta = sample.timestamp - start_time;
2757  buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
2758  buffer[buffer_pos++] = ',';
2759  buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
2760  buffer[buffer_pos++] = '\n';
2761  buffer[buffer_pos++] = '\0';
2762  writer_->AddString(buffer.start());
2763  }
2764 }
2765 
2766 
2767 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2768  writer_->AddCharacter('\n');
2769  writer_->AddCharacter('\"');
2770  for ( ; *s != '\0'; ++s) {
2771  switch (*s) {
2772  case '\b':
2773  writer_->AddString("\\b");
2774  continue;
2775  case '\f':
2776  writer_->AddString("\\f");
2777  continue;
2778  case '\n':
2779  writer_->AddString("\\n");
2780  continue;
2781  case '\r':
2782  writer_->AddString("\\r");
2783  continue;
2784  case '\t':
2785  writer_->AddString("\\t");
2786  continue;
2787  case '\"':
2788  case '\\':
2789  writer_->AddCharacter('\\');
2790  writer_->AddCharacter(*s);
2791  continue;
2792  default:
2793  if (*s > 31 && *s < 128) {
2794  writer_->AddCharacter(*s);
2795  } else if (*s <= 31) {
2796  // Special character with no dedicated literal.
2797  WriteUChar(writer_, *s);
2798  } else {
2799  // Convert UTF-8 into \u UTF-16 literal.
2800  size_t length = 1, cursor = 0;
2801  for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2802  unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2803  if (c != unibrow::Utf8::kBadChar) {
2804  WriteUChar(writer_, c);
2805  DCHECK_NE(cursor, 0);
2806  s += cursor - 1;
2807  } else {
2808  writer_->AddCharacter('?');
2809  }
2810  }
2811  }
2812  }
2813  writer_->AddCharacter('\"');
2814 }
2815 
2816 
2817 void HeapSnapshotJSONSerializer::SerializeStrings() {
2818  ScopedVector<const unsigned char*> sorted_strings(
2819  strings_.occupancy() + 1);
2820  for (base::HashMap::Entry* entry = strings_.Start(); entry != nullptr;
2821  entry = strings_.Next(entry)) {
2822  int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
2823  sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
2824  }
2825  writer_->AddString("\"<dummy>\"");
2826  for (int i = 1; i < sorted_strings.length(); ++i) {
2827  writer_->AddCharacter(',');
2828  SerializeString(sorted_strings[i]);
2829  if (writer_->aborted()) return;
2830  }
2831 }
2832 
2833 void HeapSnapshotJSONSerializer::SerializeLocation(
2834  const SourceLocation& location) {
2835  // The buffer needs space for 4 unsigned ints, 3 commas, \n and \0
2836  static const int kBufferSize =
2837  MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 4 + 3 + 2;
2838  EmbeddedVector<char, kBufferSize> buffer;
2839  int buffer_pos = 0;
2840  buffer_pos = utoa(to_node_index(location.entry_index), buffer, buffer_pos);
2841  buffer[buffer_pos++] = ',';
2842  buffer_pos = utoa(location.scriptId, buffer, buffer_pos);
2843  buffer[buffer_pos++] = ',';
2844  buffer_pos = utoa(location.line, buffer, buffer_pos);
2845  buffer[buffer_pos++] = ',';
2846  buffer_pos = utoa(location.col, buffer, buffer_pos);
2847  buffer[buffer_pos++] = '\n';
2848  buffer[buffer_pos++] = '\0';
2849  writer_->AddString(buffer.start());
2850 }
2851 
2852 void HeapSnapshotJSONSerializer::SerializeLocations() {
2853  const std::vector<SourceLocation>& locations = snapshot_->locations();
2854  for (size_t i = 0; i < locations.size(); i++) {
2855  if (i > 0) writer_->AddCharacter(',');
2856  SerializeLocation(locations[i]);
2857  if (writer_->aborted()) return;
2858  }
2859 }
2860 
2861 } // namespace internal
2862 } // namespace v8
Node * AddNode(std::unique_ptr< Node > node) final
virtual ControlOption ReportProgressValue(int done, int total)=0
virtual intptr_t GetElementCount()
Definition: v8-profiler.h:1020
void AddEdge(Node *from, Node *to, const char *name) final
bool IsEquivalent(RetainedObjectInfo *other) override
virtual const char * GetName(Local< Object > object)=0
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
virtual int GetChunkSize()
Definition: v8-profiler.h:458
virtual intptr_t GetSizeInBytes()
Definition: v8-profiler.h:1023
Definition: libplatform.h:13
virtual intptr_t GetHash()=0
virtual void Dispose()=0
virtual const char * GetGroupLabel()
Definition: v8-profiler.h:1014
Node * V8Node(const v8::Local< v8::Value > &value) final
virtual const char * GetLabel()=0
virtual void EndOfStream()=0