V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
object-stats.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 //
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 
6 #include "src/heap/object-stats.h"
7 
8 #include <unordered_set>
9 
10 #include "src/assembler-inl.h"
11 #include "src/base/bits.h"
12 #include "src/compilation-cache.h"
13 #include "src/counters.h"
14 #include "src/globals.h"
15 #include "src/heap/heap-inl.h"
16 #include "src/heap/mark-compact.h"
17 #include "src/isolate.h"
18 #include "src/objects/compilation-cache-inl.h"
19 #include "src/objects/heap-object.h"
20 #include "src/objects/js-collection-inl.h"
21 #include "src/objects/literal-objects-inl.h"
22 #include "src/objects/slots.h"
23 #include "src/objects/templates.h"
24 #include "src/utils.h"
25 
26 namespace v8 {
27 namespace internal {
28 
29 static base::LazyMutex object_stats_mutex = LAZY_MUTEX_INITIALIZER;
30 
32  public:
33  FieldStatsCollector(size_t* tagged_fields_count,
34  size_t* embedder_fields_count,
35  size_t* unboxed_double_fields_count,
36  size_t* raw_fields_count)
37  : tagged_fields_count_(tagged_fields_count),
38  embedder_fields_count_(embedder_fields_count),
39  unboxed_double_fields_count_(unboxed_double_fields_count),
40  raw_fields_count_(raw_fields_count) {}
41 
42  void RecordStats(HeapObject* host) {
43  size_t old_pointer_fields_count = *tagged_fields_count_;
44  host->Iterate(this);
45  size_t tagged_fields_count_in_object =
46  *tagged_fields_count_ - old_pointer_fields_count;
47 
48  int object_size_in_words = host->Size() / kPointerSize;
49  DCHECK_LE(tagged_fields_count_in_object, object_size_in_words);
50  size_t raw_fields_count_in_object =
51  object_size_in_words - tagged_fields_count_in_object;
52 
53  if (host->IsJSObject()) {
54  JSObjectFieldStats field_stats = GetInobjectFieldStats(host->map());
55  // Embedder fields are already included into pointer words.
56  DCHECK_LE(field_stats.embedded_fields_count_,
57  tagged_fields_count_in_object);
58  tagged_fields_count_in_object -= field_stats.embedded_fields_count_;
59  *tagged_fields_count_ -= field_stats.embedded_fields_count_;
60  *embedder_fields_count_ += field_stats.embedded_fields_count_;
61 
62  // The rest are data words.
63  DCHECK_LE(field_stats.unboxed_double_fields_count_,
64  raw_fields_count_in_object);
65  raw_fields_count_in_object -= field_stats.unboxed_double_fields_count_;
66  *unboxed_double_fields_count_ += field_stats.unboxed_double_fields_count_;
67  }
68  *raw_fields_count_ += raw_fields_count_in_object;
69  }
70 
71  void VisitPointers(HeapObject* host, ObjectSlot start,
72  ObjectSlot end) override {
73  *tagged_fields_count_ += (end - start);
74  }
75  void VisitPointers(HeapObject* host, MaybeObjectSlot start,
76  MaybeObjectSlot end) override {
77  *tagged_fields_count_ += (end - start);
78  }
79 
80  private:
81  struct JSObjectFieldStats {
82  JSObjectFieldStats()
83  : embedded_fields_count_(0), unboxed_double_fields_count_(0) {}
84 
85  unsigned embedded_fields_count_ : kDescriptorIndexBitCount;
86  unsigned unboxed_double_fields_count_ : kDescriptorIndexBitCount;
87  };
88  std::unordered_map<Map, JSObjectFieldStats, ObjectPtr::Hasher>
89  field_stats_cache_;
90 
91  JSObjectFieldStats GetInobjectFieldStats(Map map);
92 
93  size_t* const tagged_fields_count_;
94  size_t* const embedder_fields_count_;
95  size_t* const unboxed_double_fields_count_;
96  size_t* const raw_fields_count_;
97 };
98 
99 FieldStatsCollector::JSObjectFieldStats
100 FieldStatsCollector::GetInobjectFieldStats(Map map) {
101  auto iter = field_stats_cache_.find(map);
102  if (iter != field_stats_cache_.end()) {
103  return iter->second;
104  }
105  // Iterate descriptor array and calculate stats.
106  JSObjectFieldStats stats;
107  stats.embedded_fields_count_ = JSObject::GetEmbedderFieldCount(map);
108  if (!map->is_dictionary_map()) {
109  int nof = map->NumberOfOwnDescriptors();
110  DescriptorArray* descriptors = map->instance_descriptors();
111  for (int descriptor = 0; descriptor < nof; descriptor++) {
112  PropertyDetails details = descriptors->GetDetails(descriptor);
113  if (details.location() == kField) {
114  FieldIndex index = FieldIndex::ForDescriptor(map, descriptor);
115  // Stop on first out-of-object field.
116  if (!index.is_inobject()) break;
117  if (details.representation().IsDouble() &&
118  map->IsUnboxedDoubleField(index)) {
119  ++stats.unboxed_double_fields_count_;
120  }
121  }
122  }
123  }
124  field_stats_cache_.insert(std::make_pair(map, stats));
125  return stats;
126 }
127 
128 void ObjectStats::ClearObjectStats(bool clear_last_time_stats) {
129  memset(object_counts_, 0, sizeof(object_counts_));
130  memset(object_sizes_, 0, sizeof(object_sizes_));
131  memset(over_allocated_, 0, sizeof(over_allocated_));
132  memset(size_histogram_, 0, sizeof(size_histogram_));
133  memset(over_allocated_histogram_, 0, sizeof(over_allocated_histogram_));
134  if (clear_last_time_stats) {
135  memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
136  memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
137  }
138  tagged_fields_count_ = 0;
139  embedder_fields_count_ = 0;
140  unboxed_double_fields_count_ = 0;
141  raw_fields_count_ = 0;
142 }
143 
144 // Tell the compiler to never inline this: occasionally, the optimizer will
145 // decide to inline this and unroll the loop, making the compiled code more than
146 // 100KB larger.
147 V8_NOINLINE static void PrintJSONArray(size_t* array, const int len) {
148  PrintF("[ ");
149  for (int i = 0; i < len; i++) {
150  PrintF("%zu", array[i]);
151  if (i != (len - 1)) PrintF(", ");
152  }
153  PrintF(" ]");
154 }
155 
156 V8_NOINLINE static void DumpJSONArray(std::stringstream& stream, size_t* array,
157  const int len) {
158  stream << PrintCollection(Vector<size_t>(array, len));
159 }
160 
161 void ObjectStats::PrintKeyAndId(const char* key, int gc_count) {
162  PrintF("\"isolate\": \"%p\", \"id\": %d, \"key\": \"%s\", ",
163  reinterpret_cast<void*>(isolate()), gc_count, key);
164 }
165 
166 void ObjectStats::PrintInstanceTypeJSON(const char* key, int gc_count,
167  const char* name, int index) {
168  PrintF("{ ");
169  PrintKeyAndId(key, gc_count);
170  PrintF("\"type\": \"instance_type_data\", ");
171  PrintF("\"instance_type\": %d, ", index);
172  PrintF("\"instance_type_name\": \"%s\", ", name);
173  PrintF("\"overall\": %zu, ", object_sizes_[index]);
174  PrintF("\"count\": %zu, ", object_counts_[index]);
175  PrintF("\"over_allocated\": %zu, ", over_allocated_[index]);
176  PrintF("\"histogram\": ");
177  PrintJSONArray(size_histogram_[index], kNumberOfBuckets);
178  PrintF(",");
179  PrintF("\"over_allocated_histogram\": ");
180  PrintJSONArray(over_allocated_histogram_[index], kNumberOfBuckets);
181  PrintF(" }\n");
182 }
183 
184 void ObjectStats::PrintJSON(const char* key) {
185  double time = isolate()->time_millis_since_init();
186  int gc_count = heap()->gc_count();
187 
188  // gc_descriptor
189  PrintF("{ ");
190  PrintKeyAndId(key, gc_count);
191  PrintF("\"type\": \"gc_descriptor\", \"time\": %f }\n", time);
192  // field_data
193  PrintF("{ ");
194  PrintKeyAndId(key, gc_count);
195  PrintF("\"type\": \"field_data\"");
196  PrintF(", \"tagged_fields\": %zu", tagged_fields_count_ * kPointerSize);
197  PrintF(", \"embedder_fields\": %zu", embedder_fields_count_ * kPointerSize);
198  PrintF(", \"unboxed_double_fields\": %zu",
199  unboxed_double_fields_count_ * kDoubleSize);
200  PrintF(", \"other_raw_fields\": %zu", raw_fields_count_ * kPointerSize);
201  PrintF(" }\n");
202  // bucket_sizes
203  PrintF("{ ");
204  PrintKeyAndId(key, gc_count);
205  PrintF("\"type\": \"bucket_sizes\", \"sizes\": [ ");
206  for (int i = 0; i < kNumberOfBuckets; i++) {
207  PrintF("%d", 1 << (kFirstBucketShift + i));
208  if (i != (kNumberOfBuckets - 1)) PrintF(", ");
209  }
210  PrintF(" ] }\n");
211 
212 #define INSTANCE_TYPE_WRAPPER(name) \
213  PrintInstanceTypeJSON(key, gc_count, #name, name);
214 
215 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
216  PrintInstanceTypeJSON(key, gc_count, #name, FIRST_VIRTUAL_TYPE + name);
217 
218  INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER)
219  VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
220 
221 #undef INSTANCE_TYPE_WRAPPER
222 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
223 }
224 
225 void ObjectStats::DumpInstanceTypeData(std::stringstream& stream,
226  const char* name, int index) {
227  stream << "\"" << name << "\":{";
228  stream << "\"type\":" << static_cast<int>(index) << ",";
229  stream << "\"overall\":" << object_sizes_[index] << ",";
230  stream << "\"count\":" << object_counts_[index] << ",";
231  stream << "\"over_allocated\":" << over_allocated_[index] << ",";
232  stream << "\"histogram\":";
233  DumpJSONArray(stream, size_histogram_[index], kNumberOfBuckets);
234  stream << ",\"over_allocated_histogram\":";
235  DumpJSONArray(stream, over_allocated_histogram_[index], kNumberOfBuckets);
236  stream << "},";
237 }
238 
239 void ObjectStats::Dump(std::stringstream& stream) {
240  double time = isolate()->time_millis_since_init();
241  int gc_count = heap()->gc_count();
242 
243  stream << "{";
244  stream << "\"isolate\":\"" << reinterpret_cast<void*>(isolate()) << "\",";
245  stream << "\"id\":" << gc_count << ",";
246  stream << "\"time\":" << time << ",";
247 
248  // field_data
249  stream << "\"field_data\":{";
250  stream << "\"tagged_fields\":" << (tagged_fields_count_ * kPointerSize);
251  stream << ",\"embedder_fields\":" << (embedder_fields_count_ * kPointerSize);
252  stream << ",\"unboxed_double_fields\": "
253  << (unboxed_double_fields_count_ * kDoubleSize);
254  stream << ",\"other_raw_fields\":" << (raw_fields_count_ * kPointerSize);
255  stream << "}, ";
256 
257  stream << "\"bucket_sizes\":[";
258  for (int i = 0; i < kNumberOfBuckets; i++) {
259  stream << (1 << (kFirstBucketShift + i));
260  if (i != (kNumberOfBuckets - 1)) stream << ",";
261  }
262  stream << "],";
263  stream << "\"type_data\":{";
264 
265 #define INSTANCE_TYPE_WRAPPER(name) DumpInstanceTypeData(stream, #name, name);
266 
267 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
268  DumpInstanceTypeData(stream, #name, FIRST_VIRTUAL_TYPE + name);
269 
270  INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER);
271  VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
272  stream << "\"END\":{}}}";
273 
274 #undef INSTANCE_TYPE_WRAPPER
275 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
276 }
277 
278 void ObjectStats::CheckpointObjectStats() {
279  base::MutexGuard lock_guard(object_stats_mutex.Pointer());
280  MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
281  MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
282  ClearObjectStats();
283 }
284 
285 namespace {
286 
287 int Log2ForSize(size_t size) {
288  DCHECK_GT(size, 0);
289  return kSizetSize * 8 - 1 - base::bits::CountLeadingZeros(size);
290 }
291 
292 } // namespace
293 
294 int ObjectStats::HistogramIndexFromSize(size_t size) {
295  if (size == 0) return 0;
296  return Min(Max(Log2ForSize(size) + 1 - kFirstBucketShift, 0),
297  kLastValueBucketIndex);
298 }
299 
300 void ObjectStats::RecordObjectStats(InstanceType type, size_t size) {
301  DCHECK_LE(type, LAST_TYPE);
302  object_counts_[type]++;
303  object_sizes_[type] += size;
304  size_histogram_[type][HistogramIndexFromSize(size)]++;
305 }
306 
307 void ObjectStats::RecordVirtualObjectStats(VirtualInstanceType type,
308  size_t size, size_t over_allocated) {
309  DCHECK_LE(type, LAST_VIRTUAL_TYPE);
310  object_counts_[FIRST_VIRTUAL_TYPE + type]++;
311  object_sizes_[FIRST_VIRTUAL_TYPE + type] += size;
312  size_histogram_[FIRST_VIRTUAL_TYPE + type][HistogramIndexFromSize(size)]++;
313  over_allocated_[FIRST_VIRTUAL_TYPE + type] += over_allocated;
314  over_allocated_histogram_[FIRST_VIRTUAL_TYPE + type]
315  [HistogramIndexFromSize(size)]++;
316 }
317 
318 Isolate* ObjectStats::isolate() { return heap()->isolate(); }
319 
321  public:
322  enum Phase {
323  kPhase1,
324  kPhase2,
325  };
326  static const int kNumberOfPhases = kPhase2 + 1;
327 
329 
330  void CollectGlobalStatistics();
331 
332  enum class CollectFieldStats { kNo, kYes };
333  void CollectStatistics(HeapObject* obj, Phase phase,
334  CollectFieldStats collect_field_stats);
335 
336  private:
337  enum CowMode {
338  kCheckCow,
339  kIgnoreCow,
340  };
341 
342  Isolate* isolate() { return heap_->isolate(); }
343 
344  bool RecordVirtualObjectStats(HeapObject* parent, HeapObject* obj,
345  ObjectStats::VirtualInstanceType type,
346  size_t size, size_t over_allocated,
347  CowMode check_cow_array = kCheckCow);
348  void RecordExternalResourceStats(Address resource,
349  ObjectStats::VirtualInstanceType type,
350  size_t size);
351  // Gets size from |ob| and assumes no over allocating.
352  bool RecordSimpleVirtualObjectStats(HeapObject* parent, HeapObject* obj,
353  ObjectStats::VirtualInstanceType type);
354  // For HashTable it is possible to compute over allocated memory.
355  void RecordHashTableVirtualObjectStats(HeapObject* parent,
356  FixedArray hash_table,
357  ObjectStats::VirtualInstanceType type);
358 
359  bool SameLiveness(HeapObject* obj1, HeapObject* obj2);
360  bool CanRecordFixedArray(FixedArrayBase array);
361  bool IsCowArray(FixedArrayBase array);
362 
363  // Blacklist for objects that should not be recorded using
364  // VirtualObjectStats and RecordSimpleVirtualObjectStats. For recording those
365  // objects dispatch to the low level ObjectStats::RecordObjectStats manually.
366  bool ShouldRecordObject(HeapObject* object, CowMode check_cow_array);
367 
368  void RecordObjectStats(HeapObject* obj, InstanceType type, size_t size);
369 
370  // Specific recursion into constant pool or embedded code objects. Records
371  // FixedArrays and Tuple2.
372  void RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
373  HeapObject* parent, HeapObject* object,
374  ObjectStats::VirtualInstanceType type);
375 
376  // Details.
377  void RecordVirtualAllocationSiteDetails(AllocationSite* site);
378  void RecordVirtualBytecodeArrayDetails(BytecodeArray bytecode);
379  void RecordVirtualCodeDetails(Code code);
380  void RecordVirtualContext(Context context);
381  void RecordVirtualFeedbackVectorDetails(FeedbackVector* vector);
382  void RecordVirtualFixedArrayDetails(FixedArray array);
383  void RecordVirtualFunctionTemplateInfoDetails(FunctionTemplateInfo* fti);
384  void RecordVirtualJSGlobalObjectDetails(JSGlobalObject* object);
385  void RecordVirtualJSCollectionDetails(JSObject* object);
386  void RecordVirtualJSObjectDetails(JSObject* object);
387  void RecordVirtualMapDetails(Map map);
388  void RecordVirtualScriptDetails(Script* script);
389  void RecordVirtualExternalStringDetails(ExternalString script);
390  void RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo* info);
391  void RecordVirtualJSFunctionDetails(JSFunction* function);
392 
393  void RecordVirtualArrayBoilerplateDescription(
394  ArrayBoilerplateDescription* description);
395  Heap* heap_;
396  ObjectStats* stats_;
398  std::unordered_set<HeapObject*> virtual_objects_;
399  std::unordered_set<Address> external_resources_;
400  FieldStatsCollector field_stats_collector_;
401 };
402 
403 ObjectStatsCollectorImpl::ObjectStatsCollectorImpl(Heap* heap,
404  ObjectStats* stats)
405  : heap_(heap),
406  stats_(stats),
407  marking_state_(
408  heap->mark_compact_collector()->non_atomic_marking_state()),
409  field_stats_collector_(
410  &stats->tagged_fields_count_, &stats->embedder_fields_count_,
411  &stats->unboxed_double_fields_count_, &stats->raw_fields_count_) {}
412 
413 bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject* obj,
414  CowMode check_cow_array) {
415  if (obj->IsFixedArrayExact()) {
416  FixedArray fixed_array = FixedArray::cast(obj);
417  bool cow_check = check_cow_array == kIgnoreCow || !IsCowArray(fixed_array);
418  return CanRecordFixedArray(fixed_array) && cow_check;
419  }
420  if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false;
421  return true;
422 }
423 
424 void ObjectStatsCollectorImpl::RecordHashTableVirtualObjectStats(
425  HeapObject* parent, FixedArray hash_table,
426  ObjectStats::VirtualInstanceType type) {
427  CHECK(hash_table->IsHashTable());
428  // TODO(mlippautz): Implement over allocation for hash tables.
429  RecordVirtualObjectStats(parent, hash_table, type, hash_table->Size(),
430  ObjectStats::kNoOverAllocation);
431 }
432 
433 bool ObjectStatsCollectorImpl::RecordSimpleVirtualObjectStats(
434  HeapObject* parent, HeapObject* obj,
435  ObjectStats::VirtualInstanceType type) {
436  return RecordVirtualObjectStats(parent, obj, type, obj->Size(),
437  ObjectStats::kNoOverAllocation, kCheckCow);
438 }
439 
440 bool ObjectStatsCollectorImpl::RecordVirtualObjectStats(
441  HeapObject* parent, HeapObject* obj, ObjectStats::VirtualInstanceType type,
442  size_t size, size_t over_allocated, CowMode check_cow_array) {
443  if (!SameLiveness(parent, obj) || !ShouldRecordObject(obj, check_cow_array)) {
444  return false;
445  }
446 
447  if (virtual_objects_.find(obj) == virtual_objects_.end()) {
448  virtual_objects_.insert(obj);
449  stats_->RecordVirtualObjectStats(type, size, over_allocated);
450  return true;
451  }
452  return false;
453 }
454 
455 void ObjectStatsCollectorImpl::RecordExternalResourceStats(
456  Address resource, ObjectStats::VirtualInstanceType type, size_t size) {
457  if (external_resources_.find(resource) == external_resources_.end()) {
458  external_resources_.insert(resource);
459  stats_->RecordVirtualObjectStats(type, size, 0);
460  }
461 }
462 
463 void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
464  AllocationSite* site) {
465  if (!site->PointsToLiteral()) return;
466  JSObject* boilerplate = site->boilerplate();
467  if (boilerplate->IsJSArray()) {
468  RecordSimpleVirtualObjectStats(site, boilerplate,
469  ObjectStats::JS_ARRAY_BOILERPLATE_TYPE);
470  // Array boilerplates cannot have properties.
471  } else {
472  RecordVirtualObjectStats(
473  site, boilerplate, ObjectStats::JS_OBJECT_BOILERPLATE_TYPE,
474  boilerplate->Size(), ObjectStats::kNoOverAllocation);
475  if (boilerplate->HasFastProperties()) {
476  // We'll mis-classify the empty_property_array here. Given that there is a
477  // single instance, this is negligible.
478  PropertyArray properties = boilerplate->property_array();
479  RecordSimpleVirtualObjectStats(
480  site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
481  } else {
482  NameDictionary properties = boilerplate->property_dictionary();
483  RecordSimpleVirtualObjectStats(
484  site, properties, ObjectStats::BOILERPLATE_PROPERTY_DICTIONARY_TYPE);
485  }
486  }
487  FixedArrayBase elements = boilerplate->elements();
488  RecordSimpleVirtualObjectStats(site, elements,
489  ObjectStats::BOILERPLATE_ELEMENTS_TYPE);
490 }
491 
492 void ObjectStatsCollectorImpl::RecordVirtualFunctionTemplateInfoDetails(
493  FunctionTemplateInfo* fti) {
494  // named_property_handler and indexed_property_handler are recorded as
495  // INTERCEPTOR_INFO_TYPE.
496  if (!fti->call_code()->IsUndefined(isolate())) {
497  RecordSimpleVirtualObjectStats(
498  fti, CallHandlerInfo::cast(fti->call_code()),
499  ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
500  }
501  if (!fti->GetInstanceCallHandler()->IsUndefined(isolate())) {
502  RecordSimpleVirtualObjectStats(
503  fti, CallHandlerInfo::cast(fti->GetInstanceCallHandler()),
504  ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
505  }
506 }
507 
508 void ObjectStatsCollectorImpl::RecordVirtualJSGlobalObjectDetails(
509  JSGlobalObject* object) {
510  // Properties.
511  GlobalDictionary properties = object->global_dictionary();
512  RecordHashTableVirtualObjectStats(object, properties,
513  ObjectStats::GLOBAL_PROPERTIES_TYPE);
514  // Elements.
515  FixedArrayBase elements = object->elements();
516  RecordSimpleVirtualObjectStats(object, elements,
517  ObjectStats::GLOBAL_ELEMENTS_TYPE);
518 }
519 
520 void ObjectStatsCollectorImpl::RecordVirtualJSCollectionDetails(
521  JSObject* object) {
522  if (object->IsJSMap()) {
523  RecordSimpleVirtualObjectStats(
524  object, FixedArray::cast(JSMap::cast(object)->table()),
525  ObjectStats::JS_COLLECTION_TABLE_TYPE);
526  }
527  if (object->IsJSSet()) {
528  RecordSimpleVirtualObjectStats(
529  object, FixedArray::cast(JSSet::cast(object)->table()),
530  ObjectStats::JS_COLLECTION_TABLE_TYPE);
531  }
532 }
533 
534 void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject* object) {
535  // JSGlobalObject is recorded separately.
536  if (object->IsJSGlobalObject()) return;
537 
538  // Properties.
539  if (object->HasFastProperties()) {
540  PropertyArray properties = object->property_array();
541  CHECK_EQ(PROPERTY_ARRAY_TYPE, properties->map()->instance_type());
542  } else {
543  NameDictionary properties = object->property_dictionary();
544  RecordHashTableVirtualObjectStats(
545  object, properties, ObjectStats::OBJECT_PROPERTY_DICTIONARY_TYPE);
546  }
547  // Elements.
548  FixedArrayBase elements = object->elements();
549  RecordSimpleVirtualObjectStats(object, elements, ObjectStats::ELEMENTS_TYPE);
550 }
551 
552 static ObjectStats::VirtualInstanceType GetFeedbackSlotType(
553  MaybeObject maybe_obj, FeedbackSlotKind kind, Isolate* isolate) {
554  if (maybe_obj->IsCleared())
555  return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
556  Object* obj = maybe_obj->GetHeapObjectOrSmi();
557  switch (kind) {
558  case FeedbackSlotKind::kCall:
559  if (obj == *isolate->factory()->uninitialized_symbol() ||
560  obj == *isolate->factory()->premonomorphic_symbol()) {
561  return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_UNUSED_TYPE;
562  }
563  return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_TYPE;
564 
565  case FeedbackSlotKind::kLoadProperty:
566  case FeedbackSlotKind::kLoadGlobalInsideTypeof:
567  case FeedbackSlotKind::kLoadGlobalNotInsideTypeof:
568  case FeedbackSlotKind::kLoadKeyed:
569  if (obj == *isolate->factory()->uninitialized_symbol() ||
570  obj == *isolate->factory()->premonomorphic_symbol()) {
571  return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_UNUSED_TYPE;
572  }
573  return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_TYPE;
574 
575  case FeedbackSlotKind::kStoreNamedSloppy:
576  case FeedbackSlotKind::kStoreNamedStrict:
577  case FeedbackSlotKind::kStoreOwnNamed:
578  case FeedbackSlotKind::kStoreGlobalSloppy:
579  case FeedbackSlotKind::kStoreGlobalStrict:
580  case FeedbackSlotKind::kStoreKeyedSloppy:
581  case FeedbackSlotKind::kStoreKeyedStrict:
582  if (obj == *isolate->factory()->uninitialized_symbol() ||
583  obj == *isolate->factory()->premonomorphic_symbol()) {
584  return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_UNUSED_TYPE;
585  }
586  return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_TYPE;
587 
588  case FeedbackSlotKind::kBinaryOp:
589  case FeedbackSlotKind::kCompareOp:
590  return ObjectStats::FEEDBACK_VECTOR_SLOT_ENUM_TYPE;
591 
592  default:
593  return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
594  }
595 }
596 
597 void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
598  FeedbackVector* vector) {
599  if (virtual_objects_.find(vector) == virtual_objects_.end()) {
600  // Manually insert the feedback vector into the virtual object list, since
601  // we're logging its component parts separately.
602  virtual_objects_.insert(vector);
603 
604  size_t calculated_size = 0;
605 
606  // Log the feedback vector's header (fixed fields).
607  size_t header_size = vector->slots_start().address() - vector->address();
608  stats_->RecordVirtualObjectStats(ObjectStats::FEEDBACK_VECTOR_HEADER_TYPE,
609  header_size,
610  ObjectStats::kNoOverAllocation);
611  calculated_size += header_size;
612 
613  // Iterate over the feedback slots and log each one.
614  if (!vector->shared_function_info()->HasFeedbackMetadata()) return;
615 
616  FeedbackMetadataIterator it(vector->metadata());
617  while (it.HasNext()) {
618  FeedbackSlot slot = it.Next();
619  // Log the entry (or entries) taken up by this slot.
620  size_t slot_size = it.entry_size() * kPointerSize;
621  stats_->RecordVirtualObjectStats(
622  GetFeedbackSlotType(vector->Get(slot), it.kind(), heap_->isolate()),
623  slot_size, ObjectStats::kNoOverAllocation);
624  calculated_size += slot_size;
625 
626  // Log the monomorphic/polymorphic helper objects that this slot owns.
627  for (int i = 0; i < it.entry_size(); i++) {
628  MaybeObject raw_object = vector->get(slot.ToInt() + i);
629  HeapObject* object;
630  if (raw_object->GetHeapObject(&object)) {
631  if (object->IsCell() || object->IsWeakFixedArray()) {
632  RecordSimpleVirtualObjectStats(
633  vector, object, ObjectStats::FEEDBACK_VECTOR_ENTRY_TYPE);
634  }
635  }
636  }
637  }
638 
639  CHECK_EQ(calculated_size, vector->Size());
640  }
641 }
642 
643 void ObjectStatsCollectorImpl::RecordVirtualFixedArrayDetails(
644  FixedArray array) {
645  if (IsCowArray(array)) {
646  RecordVirtualObjectStats(nullptr, array, ObjectStats::COW_ARRAY_TYPE,
647  array->Size(), ObjectStats::kNoOverAllocation,
648  kIgnoreCow);
649  }
650 }
651 
652 void ObjectStatsCollectorImpl::CollectStatistics(
653  HeapObject* obj, Phase phase, CollectFieldStats collect_field_stats) {
654  Map map = obj->map();
655  switch (phase) {
656  case kPhase1:
657  if (obj->IsFeedbackVector()) {
658  RecordVirtualFeedbackVectorDetails(FeedbackVector::cast(obj));
659  } else if (obj->IsMap()) {
660  RecordVirtualMapDetails(Map::cast(obj));
661  } else if (obj->IsBytecodeArray()) {
662  RecordVirtualBytecodeArrayDetails(BytecodeArray::cast(obj));
663  } else if (obj->IsCode()) {
664  RecordVirtualCodeDetails(Code::cast(obj));
665  } else if (obj->IsFunctionTemplateInfo()) {
666  RecordVirtualFunctionTemplateInfoDetails(
667  FunctionTemplateInfo::cast(obj));
668  } else if (obj->IsJSFunction()) {
669  RecordVirtualJSFunctionDetails(JSFunction::cast(obj));
670  } else if (obj->IsJSGlobalObject()) {
671  RecordVirtualJSGlobalObjectDetails(JSGlobalObject::cast(obj));
672  } else if (obj->IsJSObject()) {
673  // This phase needs to come after RecordVirtualAllocationSiteDetails
674  // to properly split among boilerplates.
675  RecordVirtualJSObjectDetails(JSObject::cast(obj));
676  } else if (obj->IsJSCollection()) {
677  RecordVirtualJSCollectionDetails(JSObject::cast(obj));
678  } else if (obj->IsSharedFunctionInfo()) {
679  RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo::cast(obj));
680  } else if (obj->IsContext()) {
681  RecordVirtualContext(Context::cast(obj));
682  } else if (obj->IsScript()) {
683  RecordVirtualScriptDetails(Script::cast(obj));
684  } else if (obj->IsArrayBoilerplateDescription()) {
685  RecordVirtualArrayBoilerplateDescription(
686  ArrayBoilerplateDescription::cast(obj));
687  } else if (obj->IsFixedArrayExact()) {
688  // Has to go last as it triggers too eagerly.
689  RecordVirtualFixedArrayDetails(FixedArray::cast(obj));
690  }
691  break;
692  case kPhase2:
693  if (obj->IsExternalString()) {
694  // This has to be in Phase2 to avoid conflicting with recording Script
695  // sources. We still want to run RecordObjectStats after though.
696  RecordVirtualExternalStringDetails(ExternalString::cast(obj));
697  }
698  RecordObjectStats(obj, map->instance_type(), obj->Size());
699  if (collect_field_stats == CollectFieldStats::kYes) {
700  field_stats_collector_.RecordStats(obj);
701  }
702  break;
703  }
704 }
705 
706 void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
707  // Iterate boilerplates first to disambiguate them from regular JS objects.
708  Object* list = heap_->allocation_sites_list();
709  while (list->IsAllocationSite()) {
710  AllocationSite* site = AllocationSite::cast(list);
711  RecordVirtualAllocationSiteDetails(site);
712  list = site->weak_next();
713  }
714 
715  // FixedArray.
716  RecordSimpleVirtualObjectStats(nullptr, heap_->serialized_objects(),
717  ObjectStats::SERIALIZED_OBJECTS_TYPE);
718  RecordSimpleVirtualObjectStats(nullptr, heap_->number_string_cache(),
719  ObjectStats::NUMBER_STRING_CACHE_TYPE);
720  RecordSimpleVirtualObjectStats(
721  nullptr, heap_->single_character_string_cache(),
722  ObjectStats::SINGLE_CHARACTER_STRING_CACHE_TYPE);
723  RecordSimpleVirtualObjectStats(nullptr, heap_->string_split_cache(),
724  ObjectStats::STRING_SPLIT_CACHE_TYPE);
725  RecordSimpleVirtualObjectStats(nullptr, heap_->regexp_multiple_cache(),
726  ObjectStats::REGEXP_MULTIPLE_CACHE_TYPE);
727  RecordSimpleVirtualObjectStats(nullptr, heap_->retained_maps(),
728  ObjectStats::RETAINED_MAPS_TYPE);
729 
730  // WeakArrayList.
731  RecordSimpleVirtualObjectStats(
732  nullptr, WeakArrayList::cast(heap_->noscript_shared_function_infos()),
733  ObjectStats::NOSCRIPT_SHARED_FUNCTION_INFOS_TYPE);
734  RecordSimpleVirtualObjectStats(nullptr,
735  WeakArrayList::cast(heap_->script_list()),
736  ObjectStats::SCRIPT_LIST_TYPE);
737 
738  // HashTable.
739  RecordHashTableVirtualObjectStats(nullptr, heap_->code_stubs(),
740  ObjectStats::CODE_STUBS_TABLE_TYPE);
741 }
742 
743 void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject* obj,
744  InstanceType type,
745  size_t size) {
746  if (virtual_objects_.find(obj) == virtual_objects_.end()) {
747  stats_->RecordObjectStats(type, size);
748  }
749 }
750 
751 bool ObjectStatsCollectorImpl::CanRecordFixedArray(FixedArrayBase array) {
752  ReadOnlyRoots roots(heap_);
753  return array != roots.empty_fixed_array() &&
754  array != roots.empty_sloppy_arguments_elements() &&
755  array != roots.empty_slow_element_dictionary() &&
756  array != roots.empty_property_dictionary();
757 }
758 
759 bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
760  return array->map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
761 }
762 
763 bool ObjectStatsCollectorImpl::SameLiveness(HeapObject* obj1,
764  HeapObject* obj2) {
765  return obj1 == nullptr || obj2 == nullptr ||
766  marking_state_->Color(obj1) == marking_state_->Color(obj2);
767 }
768 
769 void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
770  // TODO(mlippautz): map->dependent_code(): DEPENDENT_CODE_TYPE.
771 
772  DescriptorArray* array = map->instance_descriptors();
773  if (map->owns_descriptors() &&
774  array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
775  // DescriptorArray has its own instance type.
776  EnumCache* enum_cache = array->enum_cache();
777  RecordSimpleVirtualObjectStats(array, enum_cache->keys(),
778  ObjectStats::ENUM_CACHE_TYPE);
779  RecordSimpleVirtualObjectStats(array, enum_cache->indices(),
780  ObjectStats::ENUM_INDICES_CACHE_TYPE);
781  }
782 
783  if (map->is_prototype_map()) {
784  if (map->prototype_info()->IsPrototypeInfo()) {
785  PrototypeInfo* info = PrototypeInfo::cast(map->prototype_info());
786  Object* users = info->prototype_users();
787  if (users->IsWeakFixedArray()) {
788  RecordSimpleVirtualObjectStats(map, WeakArrayList::cast(users),
789  ObjectStats::PROTOTYPE_USERS_TYPE);
790  }
791  }
792  }
793 }
794 
795 void ObjectStatsCollectorImpl::RecordVirtualScriptDetails(Script* script) {
796  RecordSimpleVirtualObjectStats(
797  script, script->shared_function_infos(),
798  ObjectStats::SCRIPT_SHARED_FUNCTION_INFOS_TYPE);
799 
800  // Log the size of external source code.
801  Object* raw_source = script->source();
802  if (raw_source->IsExternalString()) {
803  // The contents of external strings aren't on the heap, so we have to record
804  // them manually. The on-heap String object is recorded indepentendely in
805  // the normal pass.
806  ExternalString string = ExternalString::cast(raw_source);
807  Address resource = string->resource_as_address();
808  size_t off_heap_size = string->ExternalPayloadSize();
809  RecordExternalResourceStats(
810  resource,
811  string->IsOneByteRepresentation()
812  ? ObjectStats::SCRIPT_SOURCE_EXTERNAL_ONE_BYTE_TYPE
813  : ObjectStats::SCRIPT_SOURCE_EXTERNAL_TWO_BYTE_TYPE,
814  off_heap_size);
815  } else if (raw_source->IsString()) {
816  String source = String::cast(raw_source);
817  RecordSimpleVirtualObjectStats(
818  script, source,
819  source->IsOneByteRepresentation()
820  ? ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_ONE_BYTE_TYPE
821  : ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_TWO_BYTE_TYPE);
822  }
823 }
824 
825 void ObjectStatsCollectorImpl::RecordVirtualExternalStringDetails(
826  ExternalString string) {
827  // Track the external string resource size in a separate category.
828 
829  Address resource = string->resource_as_address();
830  size_t off_heap_size = string->ExternalPayloadSize();
831  RecordExternalResourceStats(
832  resource,
833  string->IsOneByteRepresentation()
834  ? ObjectStats::STRING_EXTERNAL_RESOURCE_ONE_BYTE_TYPE
835  : ObjectStats::STRING_EXTERNAL_RESOURCE_TWO_BYTE_TYPE,
836  off_heap_size);
837 }
838 
839 void ObjectStatsCollectorImpl::RecordVirtualSharedFunctionInfoDetails(
840  SharedFunctionInfo* info) {
841  // Uncompiled SharedFunctionInfo gets its own category.
842  if (!info->is_compiled()) {
843  RecordSimpleVirtualObjectStats(
844  nullptr, info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
845  }
846 }
847 
848 void ObjectStatsCollectorImpl::RecordVirtualJSFunctionDetails(
849  JSFunction* function) {
850  // Uncompiled JSFunctions get their own category.
851  if (!function->is_compiled()) {
852  RecordSimpleVirtualObjectStats(nullptr, function,
853  ObjectStats::UNCOMPILED_JS_FUNCTION_TYPE);
854  }
855 }
856 void ObjectStatsCollectorImpl::RecordVirtualArrayBoilerplateDescription(
857  ArrayBoilerplateDescription* description) {
858  RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
859  description, description->constant_elements(),
860  ObjectStats::ARRAY_BOILERPLATE_DESCRIPTION_ELEMENTS_TYPE);
861 }
862 
863 void ObjectStatsCollectorImpl::
864  RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
865  HeapObject* parent, HeapObject* object,
866  ObjectStats::VirtualInstanceType type) {
867  if (!RecordSimpleVirtualObjectStats(parent, object, type)) return;
868  if (object->IsFixedArrayExact()) {
869  FixedArray array = FixedArray::cast(object);
870  for (int i = 0; i < array->length(); i++) {
871  Object* entry = array->get(i);
872  if (!entry->IsHeapObject()) continue;
873  RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
874  array, HeapObject::cast(entry), type);
875  }
876  }
877 }
878 
879 void ObjectStatsCollectorImpl::RecordVirtualBytecodeArrayDetails(
880  BytecodeArray bytecode) {
881  RecordSimpleVirtualObjectStats(
882  bytecode, bytecode->constant_pool(),
883  ObjectStats::BYTECODE_ARRAY_CONSTANT_POOL_TYPE);
884  // FixedArrays on constant pool are used for holding descriptor information.
885  // They are shared with optimized code.
886  FixedArray constant_pool = FixedArray::cast(bytecode->constant_pool());
887  for (int i = 0; i < constant_pool->length(); i++) {
888  Object* entry = constant_pool->get(i);
889  if (entry->IsFixedArrayExact()) {
890  RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
891  constant_pool, HeapObject::cast(entry),
892  ObjectStats::EMBEDDED_OBJECT_TYPE);
893  }
894  }
895  RecordSimpleVirtualObjectStats(
896  bytecode, bytecode->handler_table(),
897  ObjectStats::BYTECODE_ARRAY_HANDLER_TABLE_TYPE);
898  RecordSimpleVirtualObjectStats(bytecode, bytecode->SourcePositionTable(),
899  ObjectStats::SOURCE_POSITION_TABLE_TYPE);
900 }
901 
902 namespace {
903 
904 ObjectStats::VirtualInstanceType CodeKindToVirtualInstanceType(
905  Code::Kind kind) {
906  switch (kind) {
907 #define CODE_KIND_CASE(type) \
908  case Code::type: \
909  return ObjectStats::type;
910  CODE_KIND_LIST(CODE_KIND_CASE)
911 #undef CODE_KIND_CASE
912  default:
913  UNREACHABLE();
914  }
915  UNREACHABLE();
916 }
917 
918 } // namespace
919 
920 void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code code) {
921  RecordSimpleVirtualObjectStats(nullptr, code,
922  CodeKindToVirtualInstanceType(code->kind()));
923  RecordSimpleVirtualObjectStats(code, code->deoptimization_data(),
924  ObjectStats::DEOPTIMIZATION_DATA_TYPE);
925  RecordSimpleVirtualObjectStats(code, code->relocation_info(),
926  ObjectStats::RELOC_INFO_TYPE);
927  Object* source_position_table = code->source_position_table();
928  if (source_position_table->IsSourcePositionTableWithFrameCache()) {
929  RecordSimpleVirtualObjectStats(
930  code,
931  SourcePositionTableWithFrameCache::cast(source_position_table)
932  ->source_position_table(),
933  ObjectStats::SOURCE_POSITION_TABLE_TYPE);
934  } else if (source_position_table->IsHeapObject()) {
935  RecordSimpleVirtualObjectStats(code,
936  HeapObject::cast(source_position_table),
937  ObjectStats::SOURCE_POSITION_TABLE_TYPE);
938  }
939  if (code->kind() == Code::Kind::OPTIMIZED_FUNCTION) {
940  DeoptimizationData input_data =
941  DeoptimizationData::cast(code->deoptimization_data());
942  if (input_data->length() > 0) {
943  RecordSimpleVirtualObjectStats(code->deoptimization_data(),
944  input_data->LiteralArray(),
945  ObjectStats::OPTIMIZED_CODE_LITERALS_TYPE);
946  }
947  }
948  int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
949  for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
950  RelocInfo::Mode mode = it.rinfo()->rmode();
951  if (mode == RelocInfo::EMBEDDED_OBJECT) {
952  Object* target = it.rinfo()->target_object();
953  if (target->IsFixedArrayExact()) {
954  RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
955  code, HeapObject::cast(target), ObjectStats::EMBEDDED_OBJECT_TYPE);
956  }
957  }
958  }
959 }
960 
961 void ObjectStatsCollectorImpl::RecordVirtualContext(Context context) {
962  if (context->IsNativeContext()) {
963  RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context->Size());
964  } else if (context->IsFunctionContext()) {
965  RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context->Size());
966  } else {
967  RecordSimpleVirtualObjectStats(nullptr, context,
968  ObjectStats::OTHER_CONTEXT_TYPE);
969  }
970 }
971 
973  public:
974  ObjectStatsVisitor(Heap* heap, ObjectStatsCollectorImpl* live_collector,
975  ObjectStatsCollectorImpl* dead_collector,
976  ObjectStatsCollectorImpl::Phase phase)
977  : live_collector_(live_collector),
978  dead_collector_(dead_collector),
979  marking_state_(
980  heap->mark_compact_collector()->non_atomic_marking_state()),
981  phase_(phase) {}
982 
983  bool Visit(HeapObject* obj, int size) {
984  if (marking_state_->IsBlack(obj)) {
985  live_collector_->CollectStatistics(
986  obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
987  } else {
988  DCHECK(!marking_state_->IsGrey(obj));
989  dead_collector_->CollectStatistics(
990  obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kNo);
991  }
992  return true;
993  }
994 
995  private:
996  ObjectStatsCollectorImpl* live_collector_;
997  ObjectStatsCollectorImpl* dead_collector_;
999  ObjectStatsCollectorImpl::Phase phase_;
1000 };
1001 
1002 namespace {
1003 
1004 void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
1005  SpaceIterator space_it(heap);
1006  HeapObject* obj = nullptr;
1007  while (space_it.has_next()) {
1008  std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator());
1009  ObjectIterator* obj_it = it.get();
1010  while ((obj = obj_it->Next()) != nullptr) {
1011  visitor->Visit(obj, obj->Size());
1012  }
1013  }
1014 }
1015 
1016 } // namespace
1017 
1018 void ObjectStatsCollector::Collect() {
1019  ObjectStatsCollectorImpl live_collector(heap_, live_);
1020  ObjectStatsCollectorImpl dead_collector(heap_, dead_);
1021  live_collector.CollectGlobalStatistics();
1022  for (int i = 0; i < ObjectStatsCollectorImpl::kNumberOfPhases; i++) {
1023  ObjectStatsVisitor visitor(heap_, &live_collector, &dead_collector,
1024  static_cast<ObjectStatsCollectorImpl::Phase>(i));
1025  IterateHeap(heap_, &visitor);
1026  }
1027 }
1028 
1029 } // namespace internal
1030 } // namespace v8
Definition: libplatform.h:13