V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
escape-analysis.cc
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/escape-analysis.h"
6 
7 #include "src/bootstrapper.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/node-matchers.h"
10 #include "src/compiler/operator-properties.h"
11 #include "src/compiler/simplified-operator.h"
12 #include "src/handles-inl.h"
13 #include "src/objects/map-inl.h"
14 
15 #ifdef DEBUG
16 #define TRACE(...) \
17  do { \
18  if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
19  } while (false)
20 #else
21 #define TRACE(...)
22 #endif
23 
24 namespace v8 {
25 namespace internal {
26 namespace compiler {
27 
28 template <class T>
29 class Sidetable {
30  public:
31  explicit Sidetable(Zone* zone) : map_(zone) {}
32  T& operator[](const Node* node) {
33  NodeId id = node->id();
34  if (id >= map_.size()) {
35  map_.resize(id + 1);
36  }
37  return map_[id];
38  }
39 
40  private:
41  ZoneVector<T> map_;
42 };
43 
44 template <class T>
46  public:
47  explicit SparseSidetable(Zone* zone, T def_value = T())
48  : def_value_(std::move(def_value)), map_(zone) {}
49  void Set(const Node* node, T value) {
50  auto iter = map_.find(node->id());
51  if (iter != map_.end()) {
52  iter->second = std::move(value);
53  } else if (value != def_value_) {
54  map_.insert(iter, std::make_pair(node->id(), std::move(value)));
55  }
56  }
57  const T& Get(const Node* node) const {
58  auto iter = map_.find(node->id());
59  return iter != map_.end() ? iter->second : def_value_;
60  }
61 
62  private:
63  T def_value_;
65 };
66 
67 // Keeps track of the changes to the current node during reduction.
68 // Encapsulates the current state of the IR graph and the reducer state like
69 // side-tables. All access to the IR and the reducer state should happen through
70 // a ReduceScope to ensure that changes and dependencies are tracked and all
71 // necessary node revisitations happen.
72 class ReduceScope {
73  public:
75  explicit ReduceScope(Node* node, Reduction* reduction)
76  : current_node_(node), reduction_(reduction) {}
77 
78  protected:
79  Node* current_node() const { return current_node_; }
80  Reduction* reduction() { return reduction_; }
81 
82  private:
83  Node* current_node_;
84  Reduction* reduction_;
85 };
86 
87 // A VariableTracker object keeps track of the values of variables at all points
88 // of the effect chain and introduces new phi nodes when necessary.
89 // Initially and by default, variables are mapped to nullptr, which means that
90 // the variable allocation point does not dominate the current point on the
91 // effect chain. We map variables that represent uninitialized memory to the
92 // Dead node to ensure it is not read.
93 // Unmapped values are impossible by construction, it is indistinguishable if a
94 // PersistentMap does not contain an element or maps it to the default element.
96  private:
97  // The state of all variables at one point in the effect chain.
98  class State {
100 
101  public:
102  explicit State(Zone* zone) : map_(zone) {}
103  Node* Get(Variable var) const {
104  CHECK(var != Variable::Invalid());
105  return map_.Get(var);
106  }
107  void Set(Variable var, Node* node) {
108  CHECK(var != Variable::Invalid());
109  return map_.Set(var, node);
110  }
111  Map::iterator begin() const { return map_.begin(); }
112  Map::iterator end() const { return map_.end(); }
113  bool operator!=(const State& other) const { return map_ != other.map_; }
114 
115  private:
116  Map map_;
117  };
118 
119  public:
120  VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
121  Variable NewVariable() { return Variable(next_variable_++); }
122  Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
123  Zone* zone() { return zone_; }
124 
125  class Scope : public ReduceScope {
126  public:
127  Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
128  ~Scope();
129  Maybe<Node*> Get(Variable var) {
130  Node* node = current_state_.Get(var);
131  if (node && node->opcode() == IrOpcode::kDead) {
132  // TODO(tebbi): We use {Dead} as a sentinel for uninitialized memory.
133  // Reading uninitialized memory can only happen in unreachable code. In
134  // this case, we have to mark the object as escaping to avoid dead nodes
135  // in the graph. This is a workaround that should be removed once we can
136  // handle dead nodes everywhere.
137  return Nothing<Node*>();
138  }
139  return Just(node);
140  }
141  void Set(Variable var, Node* node) { current_state_.Set(var, node); }
142 
143  private:
144  VariableTracker* states_;
145  State current_state_;
146  };
147 
148  private:
149  State MergeInputs(Node* effect_phi);
150  Zone* zone_;
151  JSGraph* graph_;
152  SparseSidetable<State> table_;
153  ZoneVector<Node*> buffer_;
154  EffectGraphReducer* reducer_;
155  int next_variable_ = 0;
156 
157  DISALLOW_COPY_AND_ASSIGN(VariableTracker);
158 };
159 
160 // Encapsulates the current state of the escape analysis reducer to preserve
161 // invariants regarding changes and re-visitation.
163  public:
165  Zone* zone)
166  : virtual_objects_(zone),
167  replacements_(zone),
168  variable_states_(jsgraph, reducer, zone),
169  jsgraph_(jsgraph),
170  zone_(zone) {}
171 
172  class Scope : public VariableTracker::Scope {
173  public:
175  Node* node, Reduction* reduction)
176  : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
177  tracker_(tracker),
178  reducer_(reducer) {}
179  const VirtualObject* GetVirtualObject(Node* node) {
180  VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
181  if (vobject) vobject->AddDependency(current_node());
182  return vobject;
183  }
184  // Create or retrieve a virtual object for the current node.
185  const VirtualObject* InitVirtualObject(int size) {
186  DCHECK_EQ(IrOpcode::kAllocate, current_node()->opcode());
187  VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
188  if (vobject) {
189  CHECK(vobject->size() == size);
190  } else {
191  vobject = tracker_->NewVirtualObject(size);
192  }
193  if (vobject) vobject->AddDependency(current_node());
194  vobject_ = vobject;
195  return vobject;
196  }
197 
198  void SetVirtualObject(Node* object) {
199  vobject_ = tracker_->virtual_objects_.Get(object);
200  }
201 
202  void SetEscaped(Node* node) {
203  if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
204  if (object->HasEscaped()) return;
205  TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
206  node->op()->mnemonic(), node->id(),
207  current_node()->op()->mnemonic(), current_node()->id());
208  object->SetEscaped();
209  object->RevisitDependants(reducer_);
210  }
211  }
212  // The inputs of the current node have to be accessed through the scope to
213  // ensure that they respect the node replacements.
214  Node* ValueInput(int i) {
215  return tracker_->ResolveReplacement(
216  NodeProperties::GetValueInput(current_node(), i));
217  }
218  Node* ContextInput() {
219  return tracker_->ResolveReplacement(
220  NodeProperties::GetContextInput(current_node()));
221  }
222 
223  void SetReplacement(Node* replacement) {
224  replacement_ = replacement;
225  vobject_ =
226  replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
227  if (replacement) {
228  TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
229  replacement->id());
230  } else {
231  TRACE("Set nullptr as replacement.\n");
232  }
233  }
234 
235  void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
236 
237  ~Scope() {
238  if (replacement_ != tracker_->replacements_[current_node()] ||
239  vobject_ != tracker_->virtual_objects_.Get(current_node())) {
240  reduction()->set_value_changed();
241  }
242  tracker_->replacements_[current_node()] = replacement_;
243  tracker_->virtual_objects_.Set(current_node(), vobject_);
244  }
245 
246  private:
247  EscapeAnalysisTracker* tracker_;
248  EffectGraphReducer* reducer_;
249  VirtualObject* vobject_ = nullptr;
250  Node* replacement_ = nullptr;
251  };
252 
253  Node* GetReplacementOf(Node* node) { return replacements_[node]; }
254  Node* ResolveReplacement(Node* node) {
255  if (Node* replacement = GetReplacementOf(node)) {
256  return replacement;
257  }
258  return node;
259  }
260 
261  private:
262  friend class EscapeAnalysisResult;
263  static const size_t kMaxTrackedObjects = 100;
264 
265  VirtualObject* NewVirtualObject(int size) {
266  if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
267  return new (zone_)
268  VirtualObject(&variable_states_, next_object_id_++, size);
269  }
270 
271  SparseSidetable<VirtualObject*> virtual_objects_;
272  Sidetable<Node*> replacements_;
273  VariableTracker variable_states_;
274  VirtualObject::Id next_object_id_ = 0;
275  JSGraph* const jsgraph_;
276  Zone* const zone_;
277 
278  DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
279 };
280 
281 EffectGraphReducer::EffectGraphReducer(
282  Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
283  : graph_(graph),
284  state_(graph, kNumStates),
285  revisit_(zone),
286  stack_(zone),
287  reduce_(std::move(reduce)) {}
288 
289 void EffectGraphReducer::ReduceFrom(Node* node) {
290  // Perform DFS and eagerly trigger revisitation as soon as possible.
291  // A stack element {node, i} indicates that input i of node should be visited
292  // next.
293  DCHECK(stack_.empty());
294  stack_.push({node, 0});
295  while (!stack_.empty()) {
296  Node* current = stack_.top().node;
297  int& input_index = stack_.top().input_index;
298  if (input_index < current->InputCount()) {
299  Node* input = current->InputAt(input_index);
300  input_index++;
301  switch (state_.Get(input)) {
302  case State::kVisited:
303  // The input is already reduced.
304  break;
305  case State::kOnStack:
306  // The input is on the DFS stack right now, so it will be revisited
307  // later anyway.
308  break;
309  case State::kUnvisited:
310  case State::kRevisit: {
311  state_.Set(input, State::kOnStack);
312  stack_.push({input, 0});
313  break;
314  }
315  }
316  } else {
317  stack_.pop();
318  Reduction reduction;
319  reduce_(current, &reduction);
320  for (Edge edge : current->use_edges()) {
321  // Mark uses for revisitation.
322  Node* use = edge.from();
323  if (NodeProperties::IsEffectEdge(edge)) {
324  if (reduction.effect_changed()) Revisit(use);
325  } else {
326  if (reduction.value_changed()) Revisit(use);
327  }
328  }
329  state_.Set(current, State::kVisited);
330  // Process the revisitation buffer immediately. This improves performance
331  // of escape analysis. Using a stack for {revisit_} reverses the order in
332  // which the revisitation happens. This also seems to improve performance.
333  while (!revisit_.empty()) {
334  Node* revisit = revisit_.top();
335  if (state_.Get(revisit) == State::kRevisit) {
336  state_.Set(revisit, State::kOnStack);
337  stack_.push({revisit, 0});
338  }
339  revisit_.pop();
340  }
341  }
342  }
343 }
344 
345 void EffectGraphReducer::Revisit(Node* node) {
346  if (state_.Get(node) == State::kVisited) {
347  TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
348  node->id());
349  state_.Set(node, State::kRevisit);
350  revisit_.push(node);
351  }
352 }
353 
354 VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
355  Zone* zone)
356  : zone_(zone),
357  graph_(graph),
358  table_(zone, State(zone)),
359  buffer_(zone),
360  reducer_(reducer) {}
361 
362 VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
363  Reduction* reduction)
364  : ReduceScope(node, reduction),
365  states_(states),
366  current_state_(states->zone_) {
367  switch (node->opcode()) {
368  case IrOpcode::kEffectPhi:
369  current_state_ = states_->MergeInputs(node);
370  break;
371  default:
372  int effect_inputs = node->op()->EffectInputCount();
373  if (effect_inputs == 1) {
374  current_state_ =
375  states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
376  } else {
377  DCHECK_EQ(0, effect_inputs);
378  }
379  }
380 }
381 
382 VariableTracker::Scope::~Scope() {
383  if (!reduction()->effect_changed() &&
384  states_->table_.Get(current_node()) != current_state_) {
385  reduction()->set_effect_changed();
386  }
387  states_->table_.Set(current_node(), current_state_);
388 }
389 
390 VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
391  // A variable that is mapped to [nullptr] was not assigned a value on every
392  // execution path to the current effect phi. Relying on the invariant that
393  // every variable is initialized (at least with a sentinel like the Dead
394  // node), this means that the variable initialization does not dominate the
395  // current point. So for loop effect phis, we can keep nullptr for a variable
396  // as long as the first input of the loop has nullptr for this variable. For
397  // non-loop effect phis, we can even keep it nullptr as long as any input has
398  // nullptr.
399  DCHECK_EQ(IrOpcode::kEffectPhi, effect_phi->opcode());
400  int arity = effect_phi->op()->EffectInputCount();
401  Node* control = NodeProperties::GetControlInput(effect_phi, 0);
402  TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
403  bool is_loop = control->opcode() == IrOpcode::kLoop;
404  buffer_.reserve(arity + 1);
405 
406  State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
407  State result = first_input;
408  for (std::pair<Variable, Node*> var_value : first_input) {
409  if (Node* value = var_value.second) {
410  Variable var = var_value.first;
411  TRACE("var %i:\n", var.id_);
412  buffer_.clear();
413  buffer_.push_back(value);
414  bool identical_inputs = true;
415  int num_defined_inputs = 1;
416  TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
417  for (int i = 1; i < arity; ++i) {
418  Node* next_value =
419  table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
420  if (next_value != value) identical_inputs = false;
421  if (next_value != nullptr) {
422  num_defined_inputs++;
423  TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
424  next_value->id());
425  } else {
426  TRACE(" input %i: nullptr\n", i);
427  }
428  buffer_.push_back(next_value);
429  }
430 
431  Node* old_value = table_.Get(effect_phi).Get(var);
432  if (old_value) {
433  TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
434  } else {
435  TRACE(" old: nullptr\n");
436  }
437  // Reuse a previously created phi node if possible.
438  if (old_value && old_value->opcode() == IrOpcode::kPhi &&
439  NodeProperties::GetControlInput(old_value, 0) == control) {
440  // Since a phi node can never dominate its control node,
441  // [old_value] cannot originate from the inputs. Thus [old_value]
442  // must have been created by a previous reduction of this [effect_phi].
443  for (int i = 0; i < arity; ++i) {
444  NodeProperties::ReplaceValueInput(
445  old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
446  // This change cannot affect the rest of the reducer, so there is no
447  // need to trigger additional revisitations.
448  }
449  result.Set(var, old_value);
450  } else {
451  if (num_defined_inputs == 1 && is_loop) {
452  // For loop effect phis, the variable initialization dominates iff it
453  // dominates the first input.
454  DCHECK_EQ(2, arity);
455  DCHECK_EQ(value, buffer_[0]);
456  result.Set(var, value);
457  } else if (num_defined_inputs < arity) {
458  // If the variable is undefined on some input of this non-loop effect
459  // phi, then its initialization does not dominate this point.
460  result.Set(var, nullptr);
461  } else {
462  DCHECK_EQ(num_defined_inputs, arity);
463  // We only create a phi if the values are different.
464  if (identical_inputs) {
465  result.Set(var, value);
466  } else {
467  TRACE("Creating new phi\n");
468  buffer_.push_back(control);
469  Node* phi = graph_->graph()->NewNode(
470  graph_->common()->Phi(MachineRepresentation::kTagged, arity),
471  arity + 1, &buffer_.front());
472  // TODO(tebbi): Computing precise types here is tricky, because of
473  // the necessary revisitations. If we really need this, we should
474  // probably do it afterwards.
475  NodeProperties::SetType(phi, Type::Any());
476  reducer_->AddRoot(phi);
477  result.Set(var, phi);
478  }
479  }
480  }
481 #ifdef DEBUG
482  if (Node* result_node = result.Get(var)) {
483  TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
484  result_node->id());
485  } else {
486  TRACE(" result: nullptr\n");
487  }
488 #endif
489  }
490  }
491  return result;
492 }
493 
494 namespace {
495 
496 int OffsetOfFieldAccess(const Operator* op) {
497  DCHECK(op->opcode() == IrOpcode::kLoadField ||
498  op->opcode() == IrOpcode::kStoreField);
499  FieldAccess access = FieldAccessOf(op);
500  return access.offset;
501 }
502 
503 int OffsetOfElementAt(ElementAccess const& access, int index) {
504  DCHECK_GE(index, 0);
505  DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
506  kPointerSizeLog2);
507  return access.header_size +
508  (index << ElementSizeLog2Of(access.machine_type.representation()));
509 }
510 
511 Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
512  DCHECK(op->opcode() == IrOpcode::kLoadElement ||
513  op->opcode() == IrOpcode::kStoreElement);
514  Type index_type = NodeProperties::GetType(index_node);
515  if (!index_type.Is(Type::OrderedNumber())) return Nothing<int>();
516  double max = index_type.Max();
517  double min = index_type.Min();
518  int index = static_cast<int>(min);
519  if (index < 0 || index != min || index != max) return Nothing<int>();
520  return Just(OffsetOfElementAt(ElementAccessOf(op), index));
521 }
522 
523 Node* LowerCompareMapsWithoutLoad(Node* checked_map,
524  ZoneHandleSet<Map> const& checked_against,
525  JSGraph* jsgraph) {
526  Node* true_node = jsgraph->TrueConstant();
527  Node* false_node = jsgraph->FalseConstant();
528  Node* replacement = false_node;
529  for (Handle<Map> map : checked_against) {
530  Node* map_node = jsgraph->HeapConstant(map);
531  // We cannot create a HeapConstant type here as we are off-thread.
532  NodeProperties::SetType(map_node, Type::Internal());
533  Node* comparison = jsgraph->graph()->NewNode(
534  jsgraph->simplified()->ReferenceEqual(), checked_map, map_node);
535  NodeProperties::SetType(comparison, Type::Boolean());
536  if (replacement == false_node) {
537  replacement = comparison;
538  } else {
539  replacement = jsgraph->graph()->NewNode(
540  jsgraph->common()->Select(MachineRepresentation::kTaggedPointer),
541  comparison, true_node, replacement);
542  NodeProperties::SetType(replacement, Type::Boolean());
543  }
544  }
545  return replacement;
546 }
547 
548 void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
549  JSGraph* jsgraph) {
550  switch (op->opcode()) {
551  case IrOpcode::kAllocate: {
552  NumberMatcher size(current->ValueInput(0));
553  if (!size.HasValue()) break;
554  int size_int = static_cast<int>(size.Value());
555  if (size_int != size.Value()) break;
556  if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
557  // Initialize with dead nodes as a sentinel for uninitialized memory.
558  for (Variable field : *vobject) {
559  current->Set(field, jsgraph->Dead());
560  }
561  }
562  break;
563  }
564  case IrOpcode::kFinishRegion:
565  current->SetVirtualObject(current->ValueInput(0));
566  break;
567  case IrOpcode::kStoreField: {
568  Node* object = current->ValueInput(0);
569  Node* value = current->ValueInput(1);
570  const VirtualObject* vobject = current->GetVirtualObject(object);
571  Variable var;
572  if (vobject && !vobject->HasEscaped() &&
573  vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
574  current->Set(var, value);
575  current->MarkForDeletion();
576  } else {
577  current->SetEscaped(object);
578  current->SetEscaped(value);
579  }
580  break;
581  }
582  case IrOpcode::kStoreElement: {
583  Node* object = current->ValueInput(0);
584  Node* index = current->ValueInput(1);
585  Node* value = current->ValueInput(2);
586  const VirtualObject* vobject = current->GetVirtualObject(object);
587  int offset;
588  Variable var;
589  if (vobject && !vobject->HasEscaped() &&
590  OffsetOfElementsAccess(op, index).To(&offset) &&
591  vobject->FieldAt(offset).To(&var)) {
592  current->Set(var, value);
593  current->MarkForDeletion();
594  } else {
595  current->SetEscaped(value);
596  current->SetEscaped(object);
597  }
598  break;
599  }
600  case IrOpcode::kLoadField: {
601  Node* object = current->ValueInput(0);
602  const VirtualObject* vobject = current->GetVirtualObject(object);
603  Variable var;
604  Node* value;
605  if (vobject && !vobject->HasEscaped() &&
606  vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) &&
607  current->Get(var).To(&value)) {
608  current->SetReplacement(value);
609  } else {
610  current->SetEscaped(object);
611  }
612  break;
613  }
614  case IrOpcode::kLoadElement: {
615  Node* object = current->ValueInput(0);
616  Node* index = current->ValueInput(1);
617  const VirtualObject* vobject = current->GetVirtualObject(object);
618  int offset;
619  Variable var;
620  Node* value;
621  if (vobject && !vobject->HasEscaped() &&
622  OffsetOfElementsAccess(op, index).To(&offset) &&
623  vobject->FieldAt(offset).To(&var) && current->Get(var).To(&value)) {
624  current->SetReplacement(value);
625  } else if (vobject && !vobject->HasEscaped()) {
626  // Compute the known length (aka the number of elements) of {object}
627  // based on the virtual object information.
628  ElementAccess const& access = ElementAccessOf(op);
629  int const length =
630  (vobject->size() - access.header_size) >>
631  ElementSizeLog2Of(access.machine_type.representation());
632  Variable var0, var1;
633  Node* value0;
634  Node* value1;
635  if (length == 1 &&
636  vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var) &&
637  current->Get(var).To(&value) &&
638  (value == nullptr ||
639  NodeProperties::GetType(value).Is(access.type))) {
640  // The {object} has no elements, and we know that the LoadElement
641  // {index} must be within bounds, thus it must always yield this
642  // one element of {object}.
643  current->SetReplacement(value);
644  break;
645  } else if (length == 2 &&
646  vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var0) &&
647  current->Get(var0).To(&value0) &&
648  (value0 == nullptr ||
649  NodeProperties::GetType(value0).Is(access.type)) &&
650  vobject->FieldAt(OffsetOfElementAt(access, 1)).To(&var1) &&
651  current->Get(var1).To(&value1) &&
652  (value1 == nullptr ||
653  NodeProperties::GetType(value1).Is(access.type))) {
654  if (value0 && value1) {
655  // The {object} has exactly two elements, so the LoadElement
656  // must return one of them (i.e. either the element at index
657  // 0 or the one at index 1). So we can turn the LoadElement
658  // into a Select operation instead (still allowing the {object}
659  // to be scalar replaced). We must however mark the elements
660  // of the {object} itself as escaping.
661  Node* check =
662  jsgraph->graph()->NewNode(jsgraph->simplified()->NumberEqual(),
663  index, jsgraph->ZeroConstant());
664  NodeProperties::SetType(check, Type::Boolean());
665  Node* select = jsgraph->graph()->NewNode(
666  jsgraph->common()->Select(access.machine_type.representation()),
667  check, value0, value1);
668  NodeProperties::SetType(select, access.type);
669  current->SetReplacement(select);
670  current->SetEscaped(value0);
671  current->SetEscaped(value1);
672  break;
673  } else {
674  // If the variables have no values, we have
675  // not reached the fixed-point yet.
676  break;
677  }
678  }
679  }
680  current->SetEscaped(object);
681  break;
682  }
683  case IrOpcode::kTypeGuard: {
684  current->SetVirtualObject(current->ValueInput(0));
685  break;
686  }
687  case IrOpcode::kReferenceEqual: {
688  Node* left = current->ValueInput(0);
689  Node* right = current->ValueInput(1);
690  const VirtualObject* left_object = current->GetVirtualObject(left);
691  const VirtualObject* right_object = current->GetVirtualObject(right);
692  Node* replacement = nullptr;
693  if (left_object && !left_object->HasEscaped()) {
694  if (right_object && !right_object->HasEscaped() &&
695  left_object->id() == right_object->id()) {
696  replacement = jsgraph->TrueConstant();
697  } else {
698  replacement = jsgraph->FalseConstant();
699  }
700  } else if (right_object && !right_object->HasEscaped()) {
701  replacement = jsgraph->FalseConstant();
702  }
703  if (replacement) {
704  // TODO(tebbi) This is a workaround for uninhabited types. If we
705  // replaced a value of uninhabited type with a constant, we would
706  // widen the type of the node. This could produce inconsistent
707  // types (which might confuse representation selection). We get
708  // around this by refusing to constant-fold and escape-analyze
709  // if the type is not inhabited.
710  if (!NodeProperties::GetType(left).IsNone() &&
711  !NodeProperties::GetType(right).IsNone()) {
712  current->SetReplacement(replacement);
713  } else {
714  current->SetEscaped(left);
715  current->SetEscaped(right);
716  }
717  }
718  break;
719  }
720  case IrOpcode::kCheckMaps: {
721  CheckMapsParameters params = CheckMapsParametersOf(op);
722  Node* checked = current->ValueInput(0);
723  const VirtualObject* vobject = current->GetVirtualObject(checked);
724  Variable map_field;
725  Node* map;
726  if (vobject && !vobject->HasEscaped() &&
727  vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
728  current->Get(map_field).To(&map)) {
729  if (map) {
730  Type const map_type = NodeProperties::GetType(map);
731  AllowHandleDereference handle_dereference;
732  if (map_type.IsHeapConstant() &&
733  params.maps().contains(
734  Handle<Map>::cast(map_type.AsHeapConstant()->Value()))) {
735  current->MarkForDeletion();
736  break;
737  }
738  } else {
739  // If the variable has no value, we have not reached the fixed-point
740  // yet.
741  break;
742  }
743  }
744  current->SetEscaped(checked);
745  break;
746  }
747  case IrOpcode::kCompareMaps: {
748  Node* object = current->ValueInput(0);
749  const VirtualObject* vobject = current->GetVirtualObject(object);
750  Variable map_field;
751  Node* object_map;
752  if (vobject && !vobject->HasEscaped() &&
753  vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
754  current->Get(map_field).To(&object_map)) {
755  if (object_map) {
756  current->SetReplacement(LowerCompareMapsWithoutLoad(
757  object_map, CompareMapsParametersOf(op).maps(), jsgraph));
758  break;
759  } else {
760  // If the variable has no value, we have not reached the fixed-point
761  // yet.
762  break;
763  }
764  }
765  current->SetEscaped(object);
766  break;
767  }
768  case IrOpcode::kCheckHeapObject: {
769  Node* checked = current->ValueInput(0);
770  switch (checked->opcode()) {
771  case IrOpcode::kAllocate:
772  case IrOpcode::kFinishRegion:
773  case IrOpcode::kHeapConstant:
774  current->SetReplacement(checked);
775  break;
776  default:
777  current->SetEscaped(checked);
778  break;
779  }
780  break;
781  }
782  case IrOpcode::kMapGuard: {
783  Node* object = current->ValueInput(0);
784  const VirtualObject* vobject = current->GetVirtualObject(object);
785  if (vobject && !vobject->HasEscaped()) {
786  current->MarkForDeletion();
787  }
788  break;
789  }
790  case IrOpcode::kStateValues:
791  case IrOpcode::kFrameState:
792  // These uses are always safe.
793  break;
794  default: {
795  // For unknown nodes, treat all value inputs as escaping.
796  int value_input_count = op->ValueInputCount();
797  for (int i = 0; i < value_input_count; ++i) {
798  Node* input = current->ValueInput(i);
799  current->SetEscaped(input);
800  }
801  if (OperatorProperties::HasContextInput(op)) {
802  current->SetEscaped(current->ContextInput());
803  }
804  break;
805  }
806  }
807 }
808 
809 } // namespace
810 
811 void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
812  const Operator* op = node->op();
813  TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
814 
815  EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
816  ReduceNode(op, &current, jsgraph());
817 }
818 
819 EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, Zone* zone)
820  : EffectGraphReducer(
821  jsgraph->graph(),
822  [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
823  zone),
824  tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
825  jsgraph_(jsgraph) {}
826 
827 Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
828  Node* replacement = tracker_->GetReplacementOf(node);
829  // Replacements cannot have replacements. This is important to ensure
830  // re-visitation: If a replacement is replaced, then all nodes accessing
831  // the replacement have to be updated.
832  if (replacement) DCHECK_NULL(tracker_->GetReplacementOf(replacement));
833  return replacement;
834 }
835 
836 Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
837  int field, Node* effect) {
838  return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
839  effect);
840 }
841 
842 const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
843  return tracker_->virtual_objects_.Get(node);
844 }
845 
846 VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
847  int size)
848  : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
849  DCHECK_EQ(0, size % kPointerSize);
850  TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
851  int num_fields = size / kPointerSize;
852  fields_.reserve(num_fields);
853  for (int i = 0; i < num_fields; ++i) {
854  fields_.push_back(var_states->NewVariable());
855  }
856 }
857 
858 #undef TRACE
859 
860 } // namespace compiler
861 } // namespace internal
862 } // namespace v8
STL namespace.
Definition: v8.h:56
Definition: libplatform.h:13
Definition: v8.h:3740