V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
state-values-utils.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/state-values-utils.h"
6 
7 #include "src/bit-vector.h"
8 
9 namespace v8 {
10 namespace internal {
11 namespace compiler {
12 
13 StateValuesCache::StateValuesCache(JSGraph* js_graph)
14  : js_graph_(js_graph),
15  hash_map_(AreKeysEqual, ZoneHashMap::kDefaultHashMapCapacity,
16  ZoneAllocationPolicy(zone())),
17  working_space_(zone()),
18  empty_state_values_(nullptr) {}
19 
20 
21 // static
22 bool StateValuesCache::AreKeysEqual(void* key1, void* key2) {
23  NodeKey* node_key1 = reinterpret_cast<NodeKey*>(key1);
24  NodeKey* node_key2 = reinterpret_cast<NodeKey*>(key2);
25 
26  if (node_key1->node == nullptr) {
27  if (node_key2->node == nullptr) {
28  return AreValueKeysEqual(reinterpret_cast<StateValuesKey*>(key1),
29  reinterpret_cast<StateValuesKey*>(key2));
30  } else {
31  return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key1),
32  node_key2->node);
33  }
34  } else {
35  if (node_key2->node == nullptr) {
36  // If the nodes are already processed, they must be the same.
37  return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key2),
38  node_key1->node);
39  } else {
40  return node_key1->node == node_key2->node;
41  }
42  }
43  UNREACHABLE();
44 }
45 
46 
47 // static
48 bool StateValuesCache::IsKeysEqualToNode(StateValuesKey* key, Node* node) {
49  if (key->count != static_cast<size_t>(node->InputCount())) {
50  return false;
51  }
52 
53  DCHECK_EQ(IrOpcode::kStateValues, node->opcode());
54  SparseInputMask node_mask = SparseInputMaskOf(node->op());
55 
56  if (node_mask != key->mask) {
57  return false;
58  }
59 
60  // Comparing real inputs rather than sparse inputs, since we already know the
61  // sparse input masks are the same.
62  for (size_t i = 0; i < key->count; i++) {
63  if (key->values[i] != node->InputAt(static_cast<int>(i))) {
64  return false;
65  }
66  }
67  return true;
68 }
69 
70 
71 // static
72 bool StateValuesCache::AreValueKeysEqual(StateValuesKey* key1,
73  StateValuesKey* key2) {
74  if (key1->count != key2->count) {
75  return false;
76  }
77  if (key1->mask != key2->mask) {
78  return false;
79  }
80  for (size_t i = 0; i < key1->count; i++) {
81  if (key1->values[i] != key2->values[i]) {
82  return false;
83  }
84  }
85  return true;
86 }
87 
88 
89 Node* StateValuesCache::GetEmptyStateValues() {
90  if (empty_state_values_ == nullptr) {
91  empty_state_values_ =
92  graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
93  }
94  return empty_state_values_;
95 }
96 
97 StateValuesCache::WorkingBuffer* StateValuesCache::GetWorkingSpace(
98  size_t level) {
99  if (working_space_.size() <= level) {
100  working_space_.resize(level + 1);
101  }
102  return &working_space_[level];
103 }
104 
105 namespace {
106 
107 int StateValuesHashKey(Node** nodes, size_t count) {
108  size_t hash = count;
109  for (size_t i = 0; i < count; i++) {
110  hash = hash * 23 + (nodes[i] == nullptr ? 0 : nodes[i]->id());
111  }
112  return static_cast<int>(hash & 0x7FFFFFFF);
113 }
114 
115 } // namespace
116 
117 Node* StateValuesCache::GetValuesNodeFromCache(Node** nodes, size_t count,
118  SparseInputMask mask) {
119  StateValuesKey key(count, mask, nodes);
120  int hash = StateValuesHashKey(nodes, count);
121  ZoneHashMap::Entry* lookup =
122  hash_map_.LookupOrInsert(&key, hash, ZoneAllocationPolicy(zone()));
123  DCHECK_NOT_NULL(lookup);
124  Node* node;
125  if (lookup->value == nullptr) {
126  int node_count = static_cast<int>(count);
127  node = graph()->NewNode(common()->StateValues(node_count, mask), node_count,
128  nodes);
129  NodeKey* new_key = new (zone()->New(sizeof(NodeKey))) NodeKey(node);
130  lookup->key = new_key;
131  lookup->value = node;
132  } else {
133  node = reinterpret_cast<Node*>(lookup->value);
134  }
135  return node;
136 }
137 
138 SparseInputMask::BitMaskType StateValuesCache::FillBufferWithValues(
139  WorkingBuffer* node_buffer, size_t* node_count, size_t* values_idx,
140  Node** values, size_t count, const BitVector* liveness,
141  int liveness_offset) {
142  SparseInputMask::BitMaskType input_mask = 0;
143 
144  // Virtual nodes are the live nodes plus the implicit optimized out nodes,
145  // which are implied by the liveness mask.
146  size_t virtual_node_count = *node_count;
147 
148  while (*values_idx < count && *node_count < kMaxInputCount &&
149  virtual_node_count < SparseInputMask::kMaxSparseInputs) {
150  DCHECK_LE(*values_idx, static_cast<size_t>(INT_MAX));
151 
152  if (liveness == nullptr ||
153  liveness->Contains(liveness_offset + static_cast<int>(*values_idx))) {
154  input_mask |= 1 << (virtual_node_count);
155  (*node_buffer)[(*node_count)++] = values[*values_idx];
156  }
157  virtual_node_count++;
158 
159  (*values_idx)++;
160  }
161 
162  DCHECK_GE(StateValuesCache::kMaxInputCount, *node_count);
163  DCHECK_GE(SparseInputMask::kMaxSparseInputs, virtual_node_count);
164 
165  // Add the end marker at the end of the mask.
166  input_mask |= SparseInputMask::kEndMarker << virtual_node_count;
167 
168  return input_mask;
169 }
170 
171 Node* StateValuesCache::BuildTree(size_t* values_idx, Node** values,
172  size_t count, const BitVector* liveness,
173  int liveness_offset, size_t level) {
174  WorkingBuffer* node_buffer = GetWorkingSpace(level);
175  size_t node_count = 0;
176  SparseInputMask::BitMaskType input_mask = SparseInputMask::kDenseBitMask;
177 
178  if (level == 0) {
179  input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx,
180  values, count, liveness, liveness_offset);
181  // Make sure we returned a sparse input mask.
182  DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask);
183  } else {
184  while (*values_idx < count && node_count < kMaxInputCount) {
185  if (count - *values_idx < kMaxInputCount - node_count) {
186  // If we have fewer values remaining than inputs remaining, dump the
187  // remaining values into this node.
188  // TODO(leszeks): We could optimise this further by only counting
189  // remaining live nodes.
190 
191  size_t previous_input_count = node_count;
192  input_mask =
193  FillBufferWithValues(node_buffer, &node_count, values_idx, values,
194  count, liveness, liveness_offset);
195  // Make sure we have exhausted our values.
196  DCHECK_EQ(*values_idx, count);
197  // Make sure we returned a sparse input mask.
198  DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask);
199 
200  // Make sure we haven't touched inputs below previous_input_count in the
201  // mask.
202  DCHECK_EQ(input_mask & ((1 << previous_input_count) - 1), 0u);
203  // Mark all previous inputs as live.
204  input_mask |= ((1 << previous_input_count) - 1);
205 
206  break;
207 
208  } else {
209  // Otherwise, add the values to a subtree and add that as an input.
210  Node* subtree = BuildTree(values_idx, values, count, liveness,
211  liveness_offset, level - 1);
212  (*node_buffer)[node_count++] = subtree;
213  // Don't touch the bitmask, so that it stays dense.
214  }
215  }
216  }
217 
218  if (node_count == 1 && input_mask == SparseInputMask::kDenseBitMask) {
219  // Elide the StateValue node if there is only one, dense input. This will
220  // only happen if we built a single subtree (as nodes with values are always
221  // sparse), and so we can replace ourselves with it.
222  DCHECK_EQ((*node_buffer)[0]->opcode(), IrOpcode::kStateValues);
223  return (*node_buffer)[0];
224  } else {
225  return GetValuesNodeFromCache(node_buffer->data(), node_count,
226  SparseInputMask(input_mask));
227  }
228 }
229 
230 #if DEBUG
231 namespace {
232 
233 void CheckTreeContainsValues(Node* tree, Node** values, size_t count,
234  const BitVector* liveness, int liveness_offset) {
235  DCHECK_EQ(count, StateValuesAccess(tree).size());
236 
237  int i;
238  auto access = StateValuesAccess(tree);
239  auto it = access.begin();
240  auto itend = access.end();
241  for (i = 0; it != itend; ++it, ++i) {
242  if (liveness == nullptr || liveness->Contains(liveness_offset + i)) {
243  DCHECK_EQ((*it).node, values[i]);
244  } else {
245  DCHECK_NULL((*it).node);
246  }
247  }
248  DCHECK_EQ(static_cast<size_t>(i), count);
249 }
250 
251 } // namespace
252 #endif
253 
254 Node* StateValuesCache::GetNodeForValues(Node** values, size_t count,
255  const BitVector* liveness,
256  int liveness_offset) {
257 #if DEBUG
258  // Check that the values represent actual values, and not a tree of values.
259  for (size_t i = 0; i < count; i++) {
260  if (values[i] != nullptr) {
261  DCHECK_NE(values[i]->opcode(), IrOpcode::kStateValues);
262  DCHECK_NE(values[i]->opcode(), IrOpcode::kTypedStateValues);
263  }
264  }
265  if (liveness != nullptr) {
266  DCHECK_LE(liveness_offset + count, static_cast<size_t>(liveness->length()));
267 
268  for (size_t i = 0; i < count; i++) {
269  if (liveness->Contains(liveness_offset + static_cast<int>(i))) {
270  DCHECK_NOT_NULL(values[i]);
271  }
272  }
273  }
274 #endif
275 
276  if (count == 0) {
277  return GetEmptyStateValues();
278  }
279 
280  // This is a worst-case tree height estimate, assuming that all values are
281  // live. We could get a better estimate by counting zeroes in the liveness
282  // vector, but there's no point -- any excess height in the tree will be
283  // collapsed by the single-input elision at the end of BuildTree.
284  size_t height = 0;
285  size_t max_inputs = kMaxInputCount;
286  while (count > max_inputs) {
287  height++;
288  max_inputs *= kMaxInputCount;
289  }
290 
291  size_t values_idx = 0;
292  Node* tree =
293  BuildTree(&values_idx, values, count, liveness, liveness_offset, height);
294  // The values should be exhausted by the end of BuildTree.
295  DCHECK_EQ(values_idx, count);
296 
297  // The 'tree' must be rooted with a state value node.
298  DCHECK_EQ(tree->opcode(), IrOpcode::kStateValues);
299 
300 #if DEBUG
301  CheckTreeContainsValues(tree, values, count, liveness, liveness_offset);
302 #endif
303 
304  return tree;
305 }
306 
307 StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) {
308  stack_[current_depth_] =
309  SparseInputMaskOf(node->op()).IterateOverInputs(node);
310  EnsureValid();
311 }
312 
313 SparseInputMask::InputIterator* StateValuesAccess::iterator::Top() {
314  DCHECK_LE(0, current_depth_);
315  DCHECK_GT(kMaxInlineDepth, current_depth_);
316  return &(stack_[current_depth_]);
317 }
318 
319 void StateValuesAccess::iterator::Push(Node* node) {
320  current_depth_++;
321  CHECK_GT(kMaxInlineDepth, current_depth_);
322  stack_[current_depth_] =
323  SparseInputMaskOf(node->op()).IterateOverInputs(node);
324 }
325 
326 
327 void StateValuesAccess::iterator::Pop() {
328  DCHECK_LE(0, current_depth_);
329  current_depth_--;
330 }
331 
332 
333 bool StateValuesAccess::iterator::done() { return current_depth_ < 0; }
334 
335 
336 void StateValuesAccess::iterator::Advance() {
337  Top()->Advance();
338  EnsureValid();
339 }
340 
341 void StateValuesAccess::iterator::EnsureValid() {
342  while (true) {
343  SparseInputMask::InputIterator* top = Top();
344 
345  if (top->IsEmpty()) {
346  // We are on a valid (albeit optimized out) node.
347  return;
348  }
349 
350  if (top->IsEnd()) {
351  // We have hit the end of this iterator. Pop the stack and move to the
352  // next sibling iterator.
353  Pop();
354  if (done()) {
355  // Stack is exhausted, we have reached the end.
356  return;
357  }
358  Top()->Advance();
359  continue;
360  }
361 
362  // At this point the value is known to be live and within our input nodes.
363  Node* value_node = top->GetReal();
364 
365  if (value_node->opcode() == IrOpcode::kStateValues ||
366  value_node->opcode() == IrOpcode::kTypedStateValues) {
367  // Nested state, we need to push to the stack.
368  Push(value_node);
369  continue;
370  }
371 
372  // We are on a valid node, we can stop the iteration.
373  return;
374  }
375 }
376 
377 Node* StateValuesAccess::iterator::node() { return Top()->Get(nullptr); }
378 
379 MachineType StateValuesAccess::iterator::type() {
380  Node* parent = Top()->parent();
381  if (parent->opcode() == IrOpcode::kStateValues) {
382  return MachineType::AnyTagged();
383  } else {
384  DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode());
385 
386  if (Top()->IsEmpty()) {
387  return MachineType::None();
388  } else {
389  ZoneVector<MachineType> const* types = MachineTypesOf(parent->op());
390  return (*types)[Top()->real_index()];
391  }
392  }
393 }
394 
395 
396 bool StateValuesAccess::iterator::operator!=(iterator& other) {
397  // We only allow comparison with end().
398  CHECK(other.done());
399  return !done();
400 }
401 
402 
403 StateValuesAccess::iterator& StateValuesAccess::iterator::operator++() {
404  Advance();
405  return *this;
406 }
407 
408 
409 StateValuesAccess::TypedNode StateValuesAccess::iterator::operator*() {
410  return TypedNode(node(), type());
411 }
412 
413 
414 size_t StateValuesAccess::size() {
415  size_t count = 0;
416  SparseInputMask mask = SparseInputMaskOf(node_->op());
417 
418  SparseInputMask::InputIterator iterator = mask.IterateOverInputs(node_);
419 
420  for (; !iterator.IsEnd(); iterator.Advance()) {
421  if (iterator.IsEmpty()) {
422  count++;
423  } else {
424  Node* value = iterator.GetReal();
425  if (value->opcode() == IrOpcode::kStateValues ||
426  value->opcode() == IrOpcode::kTypedStateValues) {
427  count += StateValuesAccess(value).size();
428  } else {
429  count++;
430  }
431  }
432  }
433 
434  return count;
435 }
436 
437 } // namespace compiler
438 } // namespace internal
439 } // namespace v8
Definition: libplatform.h:13