V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
sampling-heap-profiler.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/profiler/sampling-heap-profiler.h"
6 
7 #include <stdint.h>
8 #include <memory>
9 
10 #include "src/api-inl.h"
11 #include "src/base/ieee754.h"
12 #include "src/base/template-utils.h"
13 #include "src/base/utils/random-number-generator.h"
14 #include "src/frames-inl.h"
15 #include "src/heap/heap.h"
16 #include "src/isolate.h"
17 #include "src/profiler/strings-storage.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 // We sample with a Poisson process, with constant average sampling interval.
23 // This follows the exponential probability distribution with parameter
24 // λ = 1/rate where rate is the average number of bytes between samples.
25 //
26 // Let u be a uniformly distributed random number between 0 and 1, then
27 // next_sample = (- ln u) / λ
28 intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
29  if (FLAG_sampling_heap_profiler_suppress_randomness) {
30  return static_cast<intptr_t>(rate);
31  }
32  double u = random_->NextDouble();
33  double next = (-base::ieee754::log(u)) * rate;
34  return next < kPointerSize
35  ? kPointerSize
36  : (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
37 }
38 
39 // Samples were collected according to a poisson process. Since we have not
40 // recorded all allocations, we must approximate the shape of the underlying
41 // space of allocations based on the samples we have collected. Given that
42 // we sample at rate R, the probability that an allocation of size S will be
43 // sampled is 1-exp(-S/R). This function uses the above probability to
44 // approximate the true number of allocations with size *size* given that
45 // *count* samples were observed.
46 v8::AllocationProfile::Allocation SamplingHeapProfiler::ScaleSample(
47  size_t size, unsigned int count) const {
48  double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
49  // Round count instead of truncating.
50  return {size, static_cast<unsigned int>(count * scale + 0.5)};
51 }
52 
53 SamplingHeapProfiler::SamplingHeapProfiler(
54  Heap* heap, StringsStorage* names, uint64_t rate, int stack_depth,
55  v8::HeapProfiler::SamplingFlags flags)
56  : isolate_(heap->isolate()),
57  heap_(heap),
58  new_space_observer_(new SamplingAllocationObserver(
59  heap_, static_cast<intptr_t>(rate), rate, this,
60  heap->isolate()->random_number_generator())),
61  other_spaces_observer_(new SamplingAllocationObserver(
62  heap_, static_cast<intptr_t>(rate), rate, this,
63  heap->isolate()->random_number_generator())),
64  names_(names),
65  profile_root_(nullptr, "(root)", v8::UnboundScript::kNoScriptId, 0,
66  next_node_id()),
67  stack_depth_(stack_depth),
68  rate_(rate),
69  flags_(flags) {
70  CHECK_GT(rate_, 0u);
71  heap_->AddAllocationObserversToAllSpaces(other_spaces_observer_.get(),
72  new_space_observer_.get());
73 }
74 
75 SamplingHeapProfiler::~SamplingHeapProfiler() {
76  heap_->RemoveAllocationObserversFromAllSpaces(other_spaces_observer_.get(),
77  new_space_observer_.get());
78 }
79 
80 void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
81  DisallowHeapAllocation no_allocation;
82 
83  HandleScope scope(isolate_);
84  HeapObject* heap_object = HeapObject::FromAddress(soon_object);
85  Handle<Object> obj(heap_object, isolate_);
86 
87  // Mark the new block as FreeSpace to make sure the heap is iterable while we
88  // are taking the sample.
89  heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size),
90  ClearRecordedSlots::kNo);
91 
92  Local<v8::Value> loc = v8::Utils::ToLocal(obj);
93 
94  AllocationNode* node = AddStack();
95  node->allocations_[size]++;
96  auto sample =
97  base::make_unique<Sample>(size, node, loc, this, next_sample_id());
98  sample->global.SetWeak(sample.get(), OnWeakCallback,
99  WeakCallbackType::kParameter);
100 #if __clang__
101 #pragma clang diagnostic push
102 #pragma clang diagnostic ignored "-Wdeprecated"
103 #endif
104  // MarkIndependent is marked deprecated but we still rely on it here
105  // temporarily.
106  sample->global.MarkIndependent();
107 #if __clang__
108 #pragma clang diagnostic pop
109 #endif
110  samples_.emplace(sample.get(), std::move(sample));
111 }
112 
113 void SamplingHeapProfiler::OnWeakCallback(
114  const WeakCallbackInfo<Sample>& data) {
115  Sample* sample = data.GetParameter();
116  AllocationNode* node = sample->owner;
117  DCHECK_GT(node->allocations_[sample->size], 0);
118  node->allocations_[sample->size]--;
119  if (node->allocations_[sample->size] == 0) {
120  node->allocations_.erase(sample->size);
121  while (node->allocations_.empty() && node->children_.empty() &&
122  node->parent_ && !node->parent_->pinned_) {
123  AllocationNode* parent = node->parent_;
124  AllocationNode::FunctionId id = AllocationNode::function_id(
125  node->script_id_, node->script_position_, node->name_);
126  parent->children_.erase(id);
127  node = parent;
128  }
129  }
130  sample->profiler->samples_.erase(sample);
131  // sample is deleted because its unique ptr was erased from samples_.
132 }
133 
134 SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::FindOrAddChildNode(
135  AllocationNode* parent, const char* name, int script_id,
136  int start_position) {
137  AllocationNode::FunctionId id =
138  AllocationNode::function_id(script_id, start_position, name);
139  AllocationNode* child = parent->FindChildNode(id);
140  if (child) {
141  DCHECK_EQ(strcmp(child->name_, name), 0);
142  return child;
143  }
144  auto new_child = base::make_unique<AllocationNode>(
145  parent, name, script_id, start_position, next_node_id());
146  return parent->AddChildNode(id, std::move(new_child));
147 }
148 
149 SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
150  AllocationNode* node = &profile_root_;
151 
152  std::vector<SharedFunctionInfo*> stack;
153  JavaScriptFrameIterator it(isolate_);
154  int frames_captured = 0;
155  bool found_arguments_marker_frames = false;
156  while (!it.done() && frames_captured < stack_depth_) {
157  JavaScriptFrame* frame = it.frame();
158  // If we are materializing objects during deoptimization, inlined
159  // closures may not yet be materialized, and this includes the
160  // closure on the stack. Skip over any such frames (they'll be
161  // in the top frames of the stack). The allocations made in this
162  // sensitive moment belong to the formerly optimized frame anyway.
163  if (frame->unchecked_function()->IsJSFunction()) {
164  SharedFunctionInfo* shared = frame->function()->shared();
165  stack.push_back(shared);
166  frames_captured++;
167  } else {
168  found_arguments_marker_frames = true;
169  }
170  it.Advance();
171  }
172 
173  if (frames_captured == 0) {
174  const char* name = nullptr;
175  switch (isolate_->current_vm_state()) {
176  case GC:
177  name = "(GC)";
178  break;
179  case PARSER:
180  name = "(PARSER)";
181  break;
182  case COMPILER:
183  name = "(COMPILER)";
184  break;
185  case BYTECODE_COMPILER:
186  name = "(BYTECODE_COMPILER)";
187  break;
188  case OTHER:
189  name = "(V8 API)";
190  break;
191  case EXTERNAL:
192  name = "(EXTERNAL)";
193  break;
194  case IDLE:
195  name = "(IDLE)";
196  break;
197  case JS:
198  name = "(JS)";
199  break;
200  }
201  return FindOrAddChildNode(node, name, v8::UnboundScript::kNoScriptId, 0);
202  }
203 
204  // We need to process the stack in reverse order as the top of the stack is
205  // the first element in the list.
206  for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
207  SharedFunctionInfo* shared = *it;
208  const char* name = this->names()->GetName(shared->DebugName());
209  int script_id = v8::UnboundScript::kNoScriptId;
210  if (shared->script()->IsScript()) {
211  Script* script = Script::cast(shared->script());
212  script_id = script->id();
213  }
214  node = FindOrAddChildNode(node, name, script_id, shared->StartPosition());
215  }
216 
217  if (found_arguments_marker_frames) {
218  node =
219  FindOrAddChildNode(node, "(deopt)", v8::UnboundScript::kNoScriptId, 0);
220  }
221 
222  return node;
223 }
224 
225 v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
226  AllocationProfile* profile, SamplingHeapProfiler::AllocationNode* node,
227  const std::map<int, Handle<Script>>& scripts) {
228  // By pinning the node we make sure its children won't get disposed if
229  // a GC kicks in during the tree retrieval.
230  node->pinned_ = true;
231  Local<v8::String> script_name =
232  ToApiHandle<v8::String>(isolate_->factory()->InternalizeUtf8String(""));
233  int line = v8::AllocationProfile::kNoLineNumberInfo;
234  int column = v8::AllocationProfile::kNoColumnNumberInfo;
235  std::vector<v8::AllocationProfile::Allocation> allocations;
236  allocations.reserve(node->allocations_.size());
237  if (node->script_id_ != v8::UnboundScript::kNoScriptId &&
238  scripts.find(node->script_id_) != scripts.end()) {
239  // Cannot use std::map<T>::at because it is not available on android.
240  auto non_const_scripts =
241  const_cast<std::map<int, Handle<Script>>&>(scripts);
242  Handle<Script> script = non_const_scripts[node->script_id_];
243  if (!script.is_null()) {
244  if (script->name()->IsName()) {
245  Name name = Name::cast(script->name());
246  script_name = ToApiHandle<v8::String>(
247  isolate_->factory()->InternalizeUtf8String(names_->GetName(name)));
248  }
249  line = 1 + Script::GetLineNumber(script, node->script_position_);
250  column = 1 + Script::GetColumnNumber(script, node->script_position_);
251  }
252  }
253  for (auto alloc : node->allocations_) {
254  allocations.push_back(ScaleSample(alloc.first, alloc.second));
255  }
256 
257  profile->nodes_.push_back(v8::AllocationProfile::Node{
258  ToApiHandle<v8::String>(
259  isolate_->factory()->InternalizeUtf8String(node->name_)),
260  script_name, node->script_id_, node->script_position_, line, column,
261  node->id_, std::vector<v8::AllocationProfile::Node*>(), allocations});
262  v8::AllocationProfile::Node* current = &profile->nodes_.back();
263  // The |children_| map may have nodes inserted into it during translation
264  // because the translation may allocate strings on the JS heap that have
265  // the potential to be sampled. That's ok since map iterators are not
266  // invalidated upon std::map insertion.
267  for (const auto& it : node->children_) {
268  current->children.push_back(
269  TranslateAllocationNode(profile, it.second.get(), scripts));
270  }
271  node->pinned_ = false;
272  return current;
273 }
274 
275 v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
276  if (flags_ & v8::HeapProfiler::kSamplingForceGC) {
277  isolate_->heap()->CollectAllGarbage(
278  Heap::kNoGCFlags, GarbageCollectionReason::kSamplingProfiler);
279  }
280  // To resolve positions to line/column numbers, we will need to look up
281  // scripts. Build a map to allow fast mapping from script id to script.
282  std::map<int, Handle<Script>> scripts;
283  {
284  Script::Iterator iterator(isolate_);
285  while (Script* script = iterator.Next()) {
286  scripts[script->id()] = handle(script, isolate_);
287  }
288  }
289  auto profile = new v8::internal::AllocationProfile();
290  TranslateAllocationNode(profile, &profile_root_, scripts);
291  profile->samples_ = SamplingHeapProfiler::BuildSamples();
292 
293  return profile;
294 }
295 
296 const std::vector<v8::AllocationProfile::Sample>
297 SamplingHeapProfiler::BuildSamples() const {
298  std::vector<v8::AllocationProfile::Sample> samples;
299  samples.reserve(samples_.size());
300  for (const auto& it : samples_) {
301  const Sample* sample = it.second.get();
302  samples.emplace_back(v8::AllocationProfile::Sample{
303  sample->owner->id_, sample->size, ScaleSample(sample->size, 1).count,
304  sample->sample_id});
305  }
306  return samples;
307 }
308 
309 } // namespace internal
310 } // namespace v8
std::vector< Node * > children
Definition: v8-profiler.h:624
Definition: libplatform.h:13