V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
store-store-elimination.cc
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include <iterator>
6 
7 #include "src/compiler/store-store-elimination.h"
8 
9 #include "src/compiler/all-nodes.h"
10 #include "src/compiler/js-graph.h"
11 #include "src/compiler/node-properties.h"
12 #include "src/compiler/simplified-operator.h"
13 
14 namespace v8 {
15 namespace internal {
16 namespace compiler {
17 
18 #define TRACE(fmt, ...) \
19  do { \
20  if (FLAG_trace_store_elimination) { \
21  PrintF("RedundantStoreFinder: " fmt "\n", ##__VA_ARGS__); \
22  } \
23  } while (false)
24 
25 // CHECK_EXTRA is like CHECK, but has two or more arguments: a boolean
26 // expression, a format string, and any number of extra arguments. The boolean
27 // expression will be evaluated at runtime. If it evaluates to false, then an
28 // error message will be shown containing the condition, as well as the extra
29 // info formatted like with printf.
30 #define CHECK_EXTRA(condition, fmt, ...) \
31  do { \
32  if (V8_UNLIKELY(!(condition))) { \
33  FATAL("Check failed: %s. Extra info: " fmt, #condition, ##__VA_ARGS__); \
34  } \
35  } while (false)
36 
37 #ifdef DEBUG
38 #define DCHECK_EXTRA(condition, fmt, ...) \
39  CHECK_EXTRA(condition, fmt, ##__VA_ARGS__)
40 #else
41 #define DCHECK_EXTRA(condition, fmt, ...) ((void)0)
42 #endif
43 
44 // Store-store elimination.
45 //
46 // The aim of this optimization is to detect the following pattern in the
47 // effect graph:
48 //
49 // - StoreField[+24, kRepTagged](263, ...)
50 //
51 // ... lots of nodes from which the field at offset 24 of the object
52 // returned by node #263 cannot be observed ...
53 //
54 // - StoreField[+24, kRepTagged](263, ...)
55 //
56 // In such situations, the earlier StoreField cannot be observed, and can be
57 // eliminated. This optimization should work for any offset and input node, of
58 // course.
59 //
60 // The optimization also works across splits. It currently does not work for
61 // loops, because we tend to put a stack check in loops, and like deopts,
62 // stack checks can observe anything.
63 
64 // Assumption: every byte of a JS object is only ever accessed through one
65 // offset. For instance, byte 15 of a given object may be accessed using a
66 // two-byte read at offset 14, or a four-byte read at offset 12, but never
67 // both in the same program.
68 //
69 // This implementation needs all dead nodes removed from the graph, and the
70 // graph should be trimmed.
71 
72 namespace {
73 
74 typedef uint32_t StoreOffset;
75 
76 struct UnobservableStore {
77  NodeId id_;
78  StoreOffset offset_;
79 
80  bool operator==(const UnobservableStore) const;
81  bool operator<(const UnobservableStore) const;
82 };
83 
84 } // namespace
85 
86 namespace {
87 
88 // Instances of UnobservablesSet are immutable. They represent either a set of
89 // UnobservableStores, or the "unvisited empty set".
90 //
91 // We apply some sharing to save memory. The class UnobservablesSet is only a
92 // pointer wide, and a copy does not use any heap (or temp_zone) memory. Most
93 // changes to an UnobservablesSet might allocate in the temp_zone.
94 //
95 // The size of an instance should be the size of a pointer, plus additional
96 // space in the zone in the case of non-unvisited UnobservablesSets. Copying
97 // an UnobservablesSet allocates no memory.
98 class UnobservablesSet final {
99  public:
100  static UnobservablesSet Unvisited();
101  static UnobservablesSet VisitedEmpty(Zone* zone);
102  UnobservablesSet(); // unvisited
103  UnobservablesSet(const UnobservablesSet& other) = default;
104 
105  UnobservablesSet Intersect(const UnobservablesSet& other, Zone* zone) const;
106  UnobservablesSet Add(UnobservableStore obs, Zone* zone) const;
107  UnobservablesSet RemoveSameOffset(StoreOffset off, Zone* zone) const;
108 
109  const ZoneSet<UnobservableStore>* set() const { return set_; }
110 
111  bool IsUnvisited() const { return set_ == nullptr; }
112  bool IsEmpty() const { return set_ == nullptr || set_->empty(); }
113  bool Contains(UnobservableStore obs) const {
114  return set_ != nullptr && (set_->find(obs) != set_->end());
115  }
116 
117  bool operator==(const UnobservablesSet&) const;
118  bool operator!=(const UnobservablesSet&) const;
119 
120  private:
121  explicit UnobservablesSet(const ZoneSet<UnobservableStore>* set)
122  : set_(set) {}
123  const ZoneSet<UnobservableStore>* set_;
124 };
125 
126 } // namespace
127 
128 namespace {
129 
130 class RedundantStoreFinder final {
131  public:
132  RedundantStoreFinder(JSGraph* js_graph, Zone* temp_zone);
133 
134  void Find();
135 
136  const ZoneSet<Node*>& to_remove_const() { return to_remove_; }
137 
138  void Visit(Node* node);
139 
140  private:
141  void VisitEffectfulNode(Node* node);
142  UnobservablesSet RecomputeUseIntersection(Node* node);
143  UnobservablesSet RecomputeSet(Node* node, const UnobservablesSet& uses);
144  static bool CannotObserveStoreField(Node* node);
145 
146  void MarkForRevisit(Node* node);
147  bool HasBeenVisited(Node* node);
148 
149  JSGraph* jsgraph() const { return jsgraph_; }
150  Isolate* isolate() { return jsgraph()->isolate(); }
151  Zone* temp_zone() const { return temp_zone_; }
152  ZoneVector<UnobservablesSet>& unobservable() { return unobservable_; }
153  UnobservablesSet& unobservable_for_id(NodeId id) {
154  DCHECK_LT(id, unobservable().size());
155  return unobservable()[id];
156  }
157  ZoneSet<Node*>& to_remove() { return to_remove_; }
158 
159  JSGraph* const jsgraph_;
160  Zone* const temp_zone_;
161 
162  ZoneStack<Node*> revisit_;
163  ZoneVector<bool> in_revisit_;
164  // Maps node IDs to UnobservableNodeSets.
165  ZoneVector<UnobservablesSet> unobservable_;
166  ZoneSet<Node*> to_remove_;
167  const UnobservablesSet unobservables_visited_empty_;
168 };
169 
170 // To safely cast an offset from a FieldAccess, which has a potentially wider
171 // range (namely int).
172 StoreOffset ToOffset(int offset) {
173  CHECK_LE(0, offset);
174  return static_cast<StoreOffset>(offset);
175 }
176 
177 StoreOffset ToOffset(const FieldAccess& access) {
178  return ToOffset(access.offset);
179 }
180 
181 unsigned int RepSizeOf(MachineRepresentation rep) {
182  return 1u << ElementSizeLog2Of(rep);
183 }
184 unsigned int RepSizeOf(FieldAccess access) {
185  return RepSizeOf(access.machine_type.representation());
186 }
187 
188 bool AtMostTagged(FieldAccess access) {
189  return RepSizeOf(access) <= RepSizeOf(MachineRepresentation::kTagged);
190 }
191 
192 bool AtLeastTagged(FieldAccess access) {
193  return RepSizeOf(access) >= RepSizeOf(MachineRepresentation::kTagged);
194 }
195 
196 } // namespace
197 
198 void RedundantStoreFinder::Find() {
199  Visit(jsgraph()->graph()->end());
200 
201  while (!revisit_.empty()) {
202  Node* next = revisit_.top();
203  revisit_.pop();
204  DCHECK_LT(next->id(), in_revisit_.size());
205  in_revisit_[next->id()] = false;
206  Visit(next);
207  }
208 
209 #ifdef DEBUG
210  // Check that we visited all the StoreFields
211  AllNodes all(temp_zone(), jsgraph()->graph());
212  for (Node* node : all.reachable) {
213  if (node->op()->opcode() == IrOpcode::kStoreField) {
214  DCHECK_EXTRA(HasBeenVisited(node), "#%d:%s", node->id(),
215  node->op()->mnemonic());
216  }
217  }
218 #endif
219 }
220 
221 void RedundantStoreFinder::MarkForRevisit(Node* node) {
222  DCHECK_LT(node->id(), in_revisit_.size());
223  if (!in_revisit_[node->id()]) {
224  revisit_.push(node);
225  in_revisit_[node->id()] = true;
226  }
227 }
228 
229 bool RedundantStoreFinder::HasBeenVisited(Node* node) {
230  return !unobservable_for_id(node->id()).IsUnvisited();
231 }
232 
233 void StoreStoreElimination::Run(JSGraph* js_graph, Zone* temp_zone) {
234  // Find superfluous nodes
235  RedundantStoreFinder finder(js_graph, temp_zone);
236  finder.Find();
237 
238  // Remove superfluous nodes
239 
240  for (Node* node : finder.to_remove_const()) {
241  if (FLAG_trace_store_elimination) {
242  PrintF("StoreStoreElimination::Run: Eliminating node #%d:%s\n",
243  node->id(), node->op()->mnemonic());
244  }
245  Node* previous_effect = NodeProperties::GetEffectInput(node);
246  NodeProperties::ReplaceUses(node, nullptr, previous_effect, nullptr,
247  nullptr);
248  node->Kill();
249  }
250 }
251 
252 // Recompute unobservables-set for a node. Will also mark superfluous nodes
253 // as to be removed.
254 
255 UnobservablesSet RedundantStoreFinder::RecomputeSet(
256  Node* node, const UnobservablesSet& uses) {
257  switch (node->op()->opcode()) {
258  case IrOpcode::kStoreField: {
259  Node* stored_to = node->InputAt(0);
260  const FieldAccess& access = FieldAccessOf(node->op());
261  StoreOffset offset = ToOffset(access);
262 
263  UnobservableStore observation = {stored_to->id(), offset};
264  bool isNotObservable = uses.Contains(observation);
265 
266  if (isNotObservable && AtMostTagged(access)) {
267  TRACE(" #%d is StoreField[+%d,%s](#%d), unobservable", node->id(),
268  offset, MachineReprToString(access.machine_type.representation()),
269  stored_to->id());
270  to_remove().insert(node);
271  return uses;
272  } else if (isNotObservable && !AtMostTagged(access)) {
273  TRACE(
274  " #%d is StoreField[+%d,%s](#%d), repeated in future but too "
275  "big to optimize away",
276  node->id(), offset,
277  MachineReprToString(access.machine_type.representation()),
278  stored_to->id());
279  return uses;
280  } else if (!isNotObservable && AtLeastTagged(access)) {
281  TRACE(" #%d is StoreField[+%d,%s](#%d), observable, recording in set",
282  node->id(), offset,
283  MachineReprToString(access.machine_type.representation()),
284  stored_to->id());
285  return uses.Add(observation, temp_zone());
286  } else if (!isNotObservable && !AtLeastTagged(access)) {
287  TRACE(
288  " #%d is StoreField[+%d,%s](#%d), observable but too small to "
289  "record",
290  node->id(), offset,
291  MachineReprToString(access.machine_type.representation()),
292  stored_to->id());
293  return uses;
294  } else {
295  UNREACHABLE();
296  }
297  break;
298  }
299  case IrOpcode::kLoadField: {
300  Node* loaded_from = node->InputAt(0);
301  const FieldAccess& access = FieldAccessOf(node->op());
302  StoreOffset offset = ToOffset(access);
303 
304  TRACE(
305  " #%d is LoadField[+%d,%s](#%d), removing all offsets [+%d] from "
306  "set",
307  node->id(), offset,
308  MachineReprToString(access.machine_type.representation()),
309  loaded_from->id(), offset);
310 
311  return uses.RemoveSameOffset(offset, temp_zone());
312  break;
313  }
314  default:
315  if (CannotObserveStoreField(node)) {
316  TRACE(" #%d:%s can observe nothing, set stays unchanged", node->id(),
317  node->op()->mnemonic());
318  return uses;
319  } else {
320  TRACE(" #%d:%s might observe anything, recording empty set",
321  node->id(), node->op()->mnemonic());
322  return unobservables_visited_empty_;
323  }
324  }
325  UNREACHABLE();
326 }
327 
328 bool RedundantStoreFinder::CannotObserveStoreField(Node* node) {
329  return node->opcode() == IrOpcode::kLoadElement ||
330  node->opcode() == IrOpcode::kLoad ||
331  node->opcode() == IrOpcode::kStore ||
332  node->opcode() == IrOpcode::kEffectPhi ||
333  node->opcode() == IrOpcode::kStoreElement ||
334  node->opcode() == IrOpcode::kUnsafePointerAdd ||
335  node->opcode() == IrOpcode::kRetain;
336 }
337 
338 // Initialize unobservable_ with js_graph->graph->NodeCount() empty sets.
339 RedundantStoreFinder::RedundantStoreFinder(JSGraph* js_graph, Zone* temp_zone)
340  : jsgraph_(js_graph),
341  temp_zone_(temp_zone),
342  revisit_(temp_zone),
343  in_revisit_(js_graph->graph()->NodeCount(), temp_zone),
344  unobservable_(js_graph->graph()->NodeCount(),
345  UnobservablesSet::Unvisited(), temp_zone),
346  to_remove_(temp_zone),
347  unobservables_visited_empty_(UnobservablesSet::VisitedEmpty(temp_zone)) {}
348 
349 void RedundantStoreFinder::Visit(Node* node) {
350  // All effectful nodes should be reachable from End via a sequence of
351  // control, then a sequence of effect edges. In VisitEffectfulNode we mark
352  // all effect inputs for revisiting (if they might have stale state); here
353  // we mark all control inputs at least once.
354 
355  if (!HasBeenVisited(node)) {
356  for (int i = 0; i < node->op()->ControlInputCount(); i++) {
357  Node* control_input = NodeProperties::GetControlInput(node, i);
358  if (!HasBeenVisited(control_input)) {
359  MarkForRevisit(control_input);
360  }
361  }
362  }
363 
364  bool isEffectful = (node->op()->EffectInputCount() >= 1);
365  if (isEffectful) {
366  VisitEffectfulNode(node);
367  DCHECK(HasBeenVisited(node));
368  }
369 
370  if (!HasBeenVisited(node)) {
371  // Mark as visited.
372  unobservable_for_id(node->id()) = unobservables_visited_empty_;
373  }
374 }
375 
376 void RedundantStoreFinder::VisitEffectfulNode(Node* node) {
377  if (HasBeenVisited(node)) {
378  TRACE("- Revisiting: #%d:%s", node->id(), node->op()->mnemonic());
379  }
380  UnobservablesSet after_set = RecomputeUseIntersection(node);
381  UnobservablesSet before_set = RecomputeSet(node, after_set);
382  DCHECK(!before_set.IsUnvisited());
383 
384  UnobservablesSet stored_for_node = unobservable_for_id(node->id());
385  bool cur_set_changed =
386  (stored_for_node.IsUnvisited() || stored_for_node != before_set);
387  if (!cur_set_changed) {
388  // We will not be able to update the part of this chain above any more.
389  // Exit.
390  TRACE("+ No change: stabilized. Not visiting effect inputs.");
391  } else {
392  unobservable_for_id(node->id()) = before_set;
393 
394  // Mark effect inputs for visiting.
395  for (int i = 0; i < node->op()->EffectInputCount(); i++) {
396  Node* input = NodeProperties::GetEffectInput(node, i);
397  TRACE(" marking #%d:%s for revisit", input->id(),
398  input->op()->mnemonic());
399  MarkForRevisit(input);
400  }
401  }
402 }
403 
404 // Compute the intersection of the UnobservablesSets of all effect uses and
405 // return it. This function only works if {node} has an effect use.
406 //
407 // The result UnobservablesSet will always be visited.
408 UnobservablesSet RedundantStoreFinder::RecomputeUseIntersection(Node* node) {
409  // {first} == true indicates that we haven't looked at any elements yet.
410  // {first} == false indicates that cur_set is the intersection of at least one
411  // thing.
412 
413  bool first = true;
414  UnobservablesSet cur_set = UnobservablesSet::Unvisited(); // irrelevant
415 
416  for (Edge edge : node->use_edges()) {
417  // Skip non-effect edges
418  if (!NodeProperties::IsEffectEdge(edge)) {
419  continue;
420  }
421 
422  Node* use = edge.from();
423  UnobservablesSet new_set = unobservable_for_id(use->id());
424  // Include new_set in the intersection.
425  if (first) {
426  // Intersection of a one-element set is that one element
427  first = false;
428  cur_set = new_set;
429  } else {
430  // Take the intersection of cur_set and new_set.
431  cur_set = cur_set.Intersect(new_set, temp_zone());
432  }
433  }
434 
435  if (first) {
436  // There were no effect uses.
437  auto opcode = node->op()->opcode();
438  // List of opcodes that may end this effect chain. The opcodes are not
439  // important to the soundness of this optimization; this serves as a
440  // general sanity check. Add opcodes to this list as it suits you.
441  //
442  // Everything is observable after these opcodes; return the empty set.
443  DCHECK_EXTRA(
444  opcode == IrOpcode::kReturn || opcode == IrOpcode::kTerminate ||
445  opcode == IrOpcode::kDeoptimize || opcode == IrOpcode::kThrow,
446  "for #%d:%s", node->id(), node->op()->mnemonic());
447  USE(opcode); // silence warning about unused variable in release mode
448 
449  return unobservables_visited_empty_;
450  } else {
451  if (cur_set.IsUnvisited()) {
452  cur_set = unobservables_visited_empty_;
453  }
454 
455  return cur_set;
456  }
457 }
458 
459 UnobservablesSet UnobservablesSet::Unvisited() { return UnobservablesSet(); }
460 
461 UnobservablesSet::UnobservablesSet() : set_(nullptr) {}
462 
463 UnobservablesSet UnobservablesSet::VisitedEmpty(Zone* zone) {
464  // Create a new empty UnobservablesSet. This allocates in the zone, and
465  // can probably be optimized to use a global singleton.
466  ZoneSet<UnobservableStore>* empty_set =
467  new (zone->New(sizeof(ZoneSet<UnobservableStore>)))
468  ZoneSet<UnobservableStore>(zone);
469  return UnobservablesSet(empty_set);
470 }
471 
472 // Computes the intersection of two UnobservablesSets. May return
473 // UnobservablesSet::Unvisited() instead of an empty UnobservablesSet for
474 // speed.
475 UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
476  Zone* zone) const {
477  if (IsEmpty() || other.IsEmpty()) {
478  return Unvisited();
479  } else {
480  ZoneSet<UnobservableStore>* intersection =
481  new (zone->New(sizeof(ZoneSet<UnobservableStore>)))
482  ZoneSet<UnobservableStore>(zone);
483  // Put the intersection of set() and other.set() in intersection.
484  set_intersection(set()->begin(), set()->end(), other.set()->begin(),
485  other.set()->end(),
486  std::inserter(*intersection, intersection->end()));
487 
488  return UnobservablesSet(intersection);
489  }
490 }
491 
492 UnobservablesSet UnobservablesSet::Add(UnobservableStore obs,
493  Zone* zone) const {
494  bool present = (set()->find(obs) != set()->end());
495  if (present) {
496  return *this;
497  } else {
498  // Make a new empty set.
499  ZoneSet<UnobservableStore>* new_set =
500  new (zone->New(sizeof(ZoneSet<UnobservableStore>)))
501  ZoneSet<UnobservableStore>(zone);
502  // Copy the old elements over.
503  *new_set = *set();
504  // Add the new element.
505  bool inserted = new_set->insert(obs).second;
506  DCHECK(inserted);
507  USE(inserted); // silence warning about unused variable
508 
509  return UnobservablesSet(new_set);
510  }
511 }
512 
513 UnobservablesSet UnobservablesSet::RemoveSameOffset(StoreOffset offset,
514  Zone* zone) const {
515  // Make a new empty set.
516  ZoneSet<UnobservableStore>* new_set =
517  new (zone->New(sizeof(ZoneSet<UnobservableStore>)))
518  ZoneSet<UnobservableStore>(zone);
519  // Copy all elements over that have a different offset.
520  for (auto obs : *set()) {
521  if (obs.offset_ != offset) {
522  new_set->insert(obs);
523  }
524  }
525 
526  return UnobservablesSet(new_set);
527 }
528 
529 // Used for debugging.
530 bool UnobservablesSet::operator==(const UnobservablesSet& other) const {
531  if (IsUnvisited() || other.IsUnvisited()) {
532  return IsEmpty() && other.IsEmpty();
533  } else {
534  // Both pointers guaranteed not to be nullptrs.
535  return *set() == *other.set();
536  }
537 }
538 
539 bool UnobservablesSet::operator!=(const UnobservablesSet& other) const {
540  return !(*this == other);
541 }
542 
543 bool UnobservableStore::operator==(const UnobservableStore other) const {
544  return (id_ == other.id_) && (offset_ == other.offset_);
545 }
546 
547 
548 bool UnobservableStore::operator<(const UnobservableStore other) const {
549  return (id_ < other.id_) || (id_ == other.id_ && offset_ < other.offset_);
550 }
551 
552 #undef TRACE
553 #undef CHECK_EXTRA
554 #undef DCHECK_EXTRA
555 
556 } // namespace compiler
557 } // namespace internal
558 } // namespace v8
Definition: libplatform.h:13