V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
js-native-context-specialization.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/js-native-context-specialization.h"
6 
7 #include "src/accessors.h"
8 #include "src/api-inl.h"
9 #include "src/code-factory.h"
10 #include "src/compiler/access-builder.h"
11 #include "src/compiler/access-info.h"
12 #include "src/compiler/allocation-builder.h"
13 #include "src/compiler/compilation-dependencies.h"
14 #include "src/compiler/js-graph.h"
15 #include "src/compiler/js-operator.h"
16 #include "src/compiler/linkage.h"
17 #include "src/compiler/node-matchers.h"
18 #include "src/compiler/property-access-builder.h"
19 #include "src/compiler/type-cache.h"
20 #include "src/dtoa.h"
21 #include "src/feedback-vector.h"
22 #include "src/field-index-inl.h"
23 #include "src/isolate-inl.h"
24 #include "src/objects/js-array-buffer-inl.h"
25 #include "src/objects/js-array-inl.h"
26 #include "src/objects/templates.h"
27 #include "src/string-constants.h"
28 #include "src/vector-slot-pair.h"
29 
30 namespace v8 {
31 namespace internal {
32 namespace compiler {
33 
34 // This is needed for gc_mole which will compile this file without the full set
35 // of GN defined macros.
36 #ifndef V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP
37 #define V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP 64
38 #endif
39 
40 namespace {
41 
42 bool HasNumberMaps(JSHeapBroker* broker, MapHandles const& maps) {
43  for (auto map : maps) {
44  MapRef map_ref(broker, map);
45  if (map_ref.IsHeapNumberMap()) return true;
46  }
47  return false;
48 }
49 
50 bool HasOnlyJSArrayMaps(JSHeapBroker* broker, MapHandles const& maps) {
51  for (auto map : maps) {
52  MapRef map_ref(broker, map);
53  if (!map_ref.IsJSArrayMap()) return false;
54  }
55  return true;
56 }
57 
58 } // namespace
59 
60 JSNativeContextSpecialization::JSNativeContextSpecialization(
61  Editor* editor, JSGraph* jsgraph, JSHeapBroker* broker, Flags flags,
62  Handle<Context> native_context, CompilationDependencies* dependencies,
63  Zone* zone, Zone* shared_zone)
64  : AdvancedReducer(editor),
65  jsgraph_(jsgraph),
66  broker_(broker),
67  flags_(flags),
68  global_object_(native_context->global_object(), jsgraph->isolate()),
69  global_proxy_(JSGlobalProxy::cast(native_context->global_proxy()),
70  jsgraph->isolate()),
71  dependencies_(dependencies),
72  zone_(zone),
73  shared_zone_(shared_zone),
74  type_cache_(TypeCache::Get()) {}
75 
76 Reduction JSNativeContextSpecialization::Reduce(Node* node) {
77  switch (node->opcode()) {
78  case IrOpcode::kJSAdd:
79  return ReduceJSAdd(node);
80  case IrOpcode::kJSAsyncFunctionEnter:
81  return ReduceJSAsyncFunctionEnter(node);
82  case IrOpcode::kJSAsyncFunctionReject:
83  return ReduceJSAsyncFunctionReject(node);
84  case IrOpcode::kJSAsyncFunctionResolve:
85  return ReduceJSAsyncFunctionResolve(node);
86  case IrOpcode::kJSGetSuperConstructor:
87  return ReduceJSGetSuperConstructor(node);
88  case IrOpcode::kJSInstanceOf:
89  return ReduceJSInstanceOf(node);
90  case IrOpcode::kJSHasInPrototypeChain:
91  return ReduceJSHasInPrototypeChain(node);
92  case IrOpcode::kJSOrdinaryHasInstance:
93  return ReduceJSOrdinaryHasInstance(node);
94  case IrOpcode::kJSPromiseResolve:
95  return ReduceJSPromiseResolve(node);
96  case IrOpcode::kJSResolvePromise:
97  return ReduceJSResolvePromise(node);
98  case IrOpcode::kJSLoadContext:
99  return ReduceJSLoadContext(node);
100  case IrOpcode::kJSLoadGlobal:
101  return ReduceJSLoadGlobal(node);
102  case IrOpcode::kJSStoreGlobal:
103  return ReduceJSStoreGlobal(node);
104  case IrOpcode::kJSLoadNamed:
105  return ReduceJSLoadNamed(node);
106  case IrOpcode::kJSStoreNamed:
107  return ReduceJSStoreNamed(node);
108  case IrOpcode::kJSLoadProperty:
109  return ReduceJSLoadProperty(node);
110  case IrOpcode::kJSStoreProperty:
111  return ReduceJSStoreProperty(node);
112  case IrOpcode::kJSStoreNamedOwn:
113  return ReduceJSStoreNamedOwn(node);
114  case IrOpcode::kJSStoreDataPropertyInLiteral:
115  return ReduceJSStoreDataPropertyInLiteral(node);
116  case IrOpcode::kJSStoreInArrayLiteral:
117  return ReduceJSStoreInArrayLiteral(node);
118  case IrOpcode::kJSToObject:
119  return ReduceJSToObject(node);
120  case IrOpcode::kJSToString:
121  return ReduceJSToString(node);
122  default:
123  break;
124  }
125  return NoChange();
126 }
127 
128 // static
129 base::Optional<size_t> JSNativeContextSpecialization::GetMaxStringLength(
130  JSHeapBroker* broker, Node* node) {
131  if (node->opcode() == IrOpcode::kDelayedStringConstant) {
132  return StringConstantBaseOf(node->op())->GetMaxStringConstantLength();
133  }
134 
135  HeapObjectMatcher matcher(node);
136  if (matcher.HasValue() && matcher.Ref(broker).IsString()) {
137  StringRef input = matcher.Ref(broker).AsString();
138  return input.length();
139  }
140 
141  NumberMatcher number_matcher(node);
142  if (number_matcher.HasValue()) {
143  return kBase10MaximalLength + 1;
144  }
145 
146  // We don't support objects with possibly monkey-patched prototype.toString
147  // as it might have side-effects, so we shouldn't attempt lowering them.
148  return base::nullopt;
149 }
150 
151 Reduction JSNativeContextSpecialization::ReduceJSToString(Node* node) {
152  DCHECK_EQ(IrOpcode::kJSToString, node->opcode());
153  Node* const input = node->InputAt(0);
154  Reduction reduction;
155 
156  HeapObjectMatcher matcher(input);
157  if (matcher.HasValue() && matcher.Ref(broker()).IsString()) {
158  reduction = Changed(input); // JSToString(x:string) => x
159  ReplaceWithValue(node, reduction.replacement());
160  return reduction;
161  }
162 
163  // TODO(turbofan): This optimization is weaker than what we used to have
164  // in js-typed-lowering for OrderedNumbers. We don't have types here though,
165  // so alternative approach should be designed if this causes performance
166  // regressions and the stronger optimization should be re-implemented.
167  NumberMatcher number_matcher(input);
168  if (number_matcher.HasValue()) {
169  const StringConstantBase* base =
170  new (shared_zone()) NumberToStringConstant(number_matcher.Value());
171  reduction =
172  Replace(graph()->NewNode(common()->DelayedStringConstant(base)));
173  ReplaceWithValue(node, reduction.replacement());
174  return reduction;
175  }
176 
177  return NoChange();
178 }
179 
180 const StringConstantBase*
181 JSNativeContextSpecialization::CreateDelayedStringConstant(Node* node) {
182  if (node->opcode() == IrOpcode::kDelayedStringConstant) {
183  return StringConstantBaseOf(node->op());
184  } else {
185  NumberMatcher number_matcher(node);
186  if (number_matcher.HasValue()) {
187  return new (shared_zone()) NumberToStringConstant(number_matcher.Value());
188  } else {
189  HeapObjectMatcher matcher(node);
190  if (matcher.HasValue() && matcher.Ref(broker()).IsString()) {
191  StringRef s = matcher.Ref(broker()).AsString();
192  return new (shared_zone())
193  StringLiteral(s.object(), static_cast<size_t>(s.length()));
194  } else {
195  UNREACHABLE();
196  }
197  }
198  }
199 }
200 
201 namespace {
202 bool IsStringConstant(JSHeapBroker* broker, Node* node) {
203  if (node->opcode() == IrOpcode::kDelayedStringConstant) {
204  return true;
205  }
206 
207  HeapObjectMatcher matcher(node);
208  return matcher.HasValue() && matcher.Ref(broker).IsString();
209 }
210 }
211 
212 Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionEnter(
213  Node* node) {
214  DCHECK_EQ(IrOpcode::kJSAsyncFunctionEnter, node->opcode());
215  Node* closure = NodeProperties::GetValueInput(node, 0);
216  Node* receiver = NodeProperties::GetValueInput(node, 1);
217  Node* context = NodeProperties::GetContextInput(node);
218  Node* frame_state = NodeProperties::GetFrameStateInput(node);
219  Node* effect = NodeProperties::GetEffectInput(node);
220  Node* control = NodeProperties::GetControlInput(node);
221  if (!isolate()->IsPromiseHookProtectorIntact()) return NoChange();
222 
223  // Install a code dependency on the promise hook protector cell.
224  dependencies()->DependOnProtector(
225  PropertyCellRef(broker(), factory()->promise_hook_protector()));
226 
227  // Create the promise for the async function.
228  Node* promise = effect =
229  graph()->NewNode(javascript()->CreatePromise(), context, effect);
230 
231  // Create the JSAsyncFunctionObject based on the SharedFunctionInfo
232  // extracted from the top-most frame in {frame_state}.
233  Handle<SharedFunctionInfo> shared =
234  FrameStateInfoOf(frame_state->op()).shared_info().ToHandleChecked();
235  DCHECK(shared->is_compiled());
236  int register_count = shared->internal_formal_parameter_count() +
237  shared->GetBytecodeArray()->register_count();
238  Node* value = effect =
239  graph()->NewNode(javascript()->CreateAsyncFunctionObject(register_count),
240  closure, receiver, promise, context, effect, control);
241  ReplaceWithValue(node, value, effect, control);
242  return Replace(value);
243 }
244 
245 Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionReject(
246  Node* node) {
247  DCHECK_EQ(IrOpcode::kJSAsyncFunctionReject, node->opcode());
248  Node* async_function_object = NodeProperties::GetValueInput(node, 0);
249  Node* reason = NodeProperties::GetValueInput(node, 1);
250  Node* context = NodeProperties::GetContextInput(node);
251  Node* frame_state = NodeProperties::GetFrameStateInput(node);
252  Node* effect = NodeProperties::GetEffectInput(node);
253  Node* control = NodeProperties::GetControlInput(node);
254  if (!isolate()->IsPromiseHookProtectorIntact()) return NoChange();
255 
256  // Install a code dependency on the promise hook protector cell.
257  dependencies()->DependOnProtector(
258  PropertyCellRef(broker(), factory()->promise_hook_protector()));
259 
260  // Load the promise from the {async_function_object}.
261  Node* promise = effect = graph()->NewNode(
262  simplified()->LoadField(AccessBuilder::ForJSAsyncFunctionObjectPromise()),
263  async_function_object, effect, control);
264 
265  // Create a nested frame state inside the current method's most-recent
266  // {frame_state} that will ensure that lazy deoptimizations at this
267  // point will still return the {promise} instead of the result of the
268  // JSRejectPromise operation (which yields undefined).
269  Node* parameters[] = {promise};
270  frame_state = CreateStubBuiltinContinuationFrameState(
271  jsgraph(), Builtins::kAsyncFunctionLazyDeoptContinuation, context,
272  parameters, arraysize(parameters), frame_state,
273  ContinuationFrameStateMode::LAZY);
274 
275  // Disable the additional debug event for the rejection since a
276  // debug event already happend for the exception that got us here.
277  Node* debug_event = jsgraph()->FalseConstant();
278  effect = graph()->NewNode(javascript()->RejectPromise(), promise, reason,
279  debug_event, context, frame_state, effect, control);
280  ReplaceWithValue(node, promise, effect, control);
281  return Replace(promise);
282 }
283 
284 Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionResolve(
285  Node* node) {
286  DCHECK_EQ(IrOpcode::kJSAsyncFunctionResolve, node->opcode());
287  Node* async_function_object = NodeProperties::GetValueInput(node, 0);
288  Node* value = NodeProperties::GetValueInput(node, 1);
289  Node* context = NodeProperties::GetContextInput(node);
290  Node* frame_state = NodeProperties::GetFrameStateInput(node);
291  Node* effect = NodeProperties::GetEffectInput(node);
292  Node* control = NodeProperties::GetControlInput(node);
293  if (!isolate()->IsPromiseHookProtectorIntact()) return NoChange();
294 
295  // Install a code dependency on the promise hook protector cell.
296  dependencies()->DependOnProtector(
297  PropertyCellRef(broker(), factory()->promise_hook_protector()));
298 
299  // Load the promise from the {async_function_object}.
300  Node* promise = effect = graph()->NewNode(
301  simplified()->LoadField(AccessBuilder::ForJSAsyncFunctionObjectPromise()),
302  async_function_object, effect, control);
303 
304  // Create a nested frame state inside the current method's most-recent
305  // {frame_state} that will ensure that lazy deoptimizations at this
306  // point will still return the {promise} instead of the result of the
307  // JSResolvePromise operation (which yields undefined).
308  Node* parameters[] = {promise};
309  frame_state = CreateStubBuiltinContinuationFrameState(
310  jsgraph(), Builtins::kAsyncFunctionLazyDeoptContinuation, context,
311  parameters, arraysize(parameters), frame_state,
312  ContinuationFrameStateMode::LAZY);
313 
314  effect = graph()->NewNode(javascript()->ResolvePromise(), promise, value,
315  context, frame_state, effect, control);
316  ReplaceWithValue(node, promise, effect, control);
317  return Replace(promise);
318 }
319 
320 Reduction JSNativeContextSpecialization::ReduceJSAdd(Node* node) {
321  // TODO(turbofan): This has to run together with the inlining and
322  // native context specialization to be able to leverage the string
323  // constant-folding for optimizing property access, but we should
324  // nevertheless find a better home for this at some point.
325  DCHECK_EQ(IrOpcode::kJSAdd, node->opcode());
326 
327  Node* const lhs = node->InputAt(0);
328  Node* const rhs = node->InputAt(1);
329 
330  base::Optional<size_t> lhs_len = GetMaxStringLength(broker(), lhs);
331  base::Optional<size_t> rhs_len = GetMaxStringLength(broker(), rhs);
332  if (!lhs_len || !rhs_len) {
333  return NoChange();
334  }
335 
336  // Fold into DelayedStringConstant if at least one of the parameters is a
337  // string constant and the addition won't throw due to too long result.
338  if (*lhs_len + *rhs_len <= String::kMaxLength &&
339  (IsStringConstant(broker(), lhs) || IsStringConstant(broker(), rhs))) {
340  const StringConstantBase* left = CreateDelayedStringConstant(lhs);
341  const StringConstantBase* right = CreateDelayedStringConstant(rhs);
342  const StringConstantBase* cons =
343  new (shared_zone()) StringCons(left, right);
344 
345  Node* reduced = graph()->NewNode(common()->DelayedStringConstant(cons));
346  ReplaceWithValue(node, reduced);
347  return Replace(reduced);
348  }
349 
350  return NoChange();
351 }
352 
353 Reduction JSNativeContextSpecialization::ReduceJSGetSuperConstructor(
354  Node* node) {
355  DCHECK_EQ(IrOpcode::kJSGetSuperConstructor, node->opcode());
356  Node* constructor = NodeProperties::GetValueInput(node, 0);
357 
358  // Check if the input is a known JSFunction.
359  HeapObjectMatcher m(constructor);
360  if (!m.HasValue()) return NoChange();
361  JSFunctionRef function = m.Ref(broker()).AsJSFunction();
362  MapRef function_map = function.map();
363  // TODO(neis): Remove SerializePrototype call once brokerization is complete.
364  function_map.SerializePrototype();
365  ObjectRef function_prototype = function_map.prototype();
366 
367  // We can constant-fold the super constructor access if the
368  // {function}s map is stable, i.e. we can use a code dependency
369  // to guard against [[Prototype]] changes of {function}.
370  if (function_map.is_stable() && function_prototype.IsHeapObject() &&
371  function_prototype.AsHeapObject().map().is_constructor()) {
372  dependencies()->DependOnStableMap(function_map);
373  Node* value = jsgraph()->Constant(function_prototype);
374  ReplaceWithValue(node, value);
375  return Replace(value);
376  }
377 
378  return NoChange();
379 }
380 
381 Reduction JSNativeContextSpecialization::ReduceJSInstanceOf(Node* node) {
382  DCHECK_EQ(IrOpcode::kJSInstanceOf, node->opcode());
383  FeedbackParameter const& p = FeedbackParameterOf(node->op());
384  Node* object = NodeProperties::GetValueInput(node, 0);
385  Node* constructor = NodeProperties::GetValueInput(node, 1);
386  Node* context = NodeProperties::GetContextInput(node);
387  Node* effect = NodeProperties::GetEffectInput(node);
388  Node* frame_state = NodeProperties::GetFrameStateInput(node);
389  Node* control = NodeProperties::GetControlInput(node);
390 
391  // Check if the right hand side is a known {receiver}, or
392  // we have feedback from the InstanceOfIC.
393  Handle<JSObject> receiver;
394  HeapObjectMatcher m(constructor);
395  if (m.HasValue() && m.Value()->IsJSObject()) {
396  receiver = Handle<JSObject>::cast(m.Value());
397  } else if (p.feedback().IsValid()) {
398  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
399  if (!nexus.GetConstructorFeedback().ToHandle(&receiver)) return NoChange();
400  } else {
401  return NoChange();
402  }
403  Handle<Map> receiver_map(receiver->map(), isolate());
404 
405  // Compute property access info for @@hasInstance on the constructor.
406  PropertyAccessInfo access_info;
407  AccessInfoFactory access_info_factory(
408  broker(), dependencies(), native_context().object(), graph()->zone());
409  if (!access_info_factory.ComputePropertyAccessInfo(
410  receiver_map, factory()->has_instance_symbol(), AccessMode::kLoad,
411  &access_info)) {
412  return NoChange();
413  }
414  DCHECK_EQ(access_info.receiver_maps().size(), 1);
415  DCHECK_EQ(access_info.receiver_maps()[0].address(), receiver_map.address());
416 
417  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
418 
419  if (access_info.IsNotFound()) {
420  // If there's no @@hasInstance handler, the OrdinaryHasInstance operation
421  // takes over, but that requires the constructor to be callable.
422  if (!receiver_map->is_callable()) return NoChange();
423 
424  // Determine actual holder and perform prototype chain checks.
425  Handle<JSObject> holder;
426  if (access_info.holder().ToHandle(&holder)) {
427  dependencies()->DependOnStablePrototypeChains(
428  broker(), access_info.receiver_maps(), JSObjectRef(broker(), holder));
429  }
430 
431  // Monomorphic property access.
432  access_builder.BuildCheckMaps(constructor, &effect, control,
433  access_info.receiver_maps());
434 
435  // Lower to OrdinaryHasInstance(C, O).
436  NodeProperties::ReplaceValueInput(node, constructor, 0);
437  NodeProperties::ReplaceValueInput(node, object, 1);
438  NodeProperties::ReplaceEffectInput(node, effect);
439  NodeProperties::ChangeOp(node, javascript()->OrdinaryHasInstance());
440  Reduction const reduction = ReduceJSOrdinaryHasInstance(node);
441  return reduction.Changed() ? reduction : Changed(node);
442  }
443 
444  if (access_info.IsDataConstant() || access_info.IsDataConstantField()) {
445  // Determine actual holder.
446  Handle<JSObject> holder;
447  bool found_on_proto = access_info.holder().ToHandle(&holder);
448  if (!found_on_proto) holder = receiver;
449 
450  Handle<Object> constant;
451  if (access_info.IsDataConstant()) {
452  DCHECK(!FLAG_track_constant_fields);
453  constant = access_info.constant();
454  } else {
455  DCHECK(FLAG_track_constant_fields);
456  DCHECK(access_info.IsDataConstantField());
457  FieldIndex field_index = access_info.field_index();
458  constant = JSObject::FastPropertyAt(holder, Representation::Tagged(),
459  field_index);
460  if (!constant->IsCallable()) {
461  return NoChange();
462  }
463 
464  // Install dependency on constness. Unfortunately, access_info does not
465  // track descriptor index, so we have to search for it.
466  MapRef holder_map(broker(), handle(holder->map(), isolate()));
467  Handle<DescriptorArray> descriptors(
468  holder_map.object()->instance_descriptors(), isolate());
469  int descriptor_index = descriptors->Search(
470  *(factory()->has_instance_symbol()), *(holder_map.object()));
471  CHECK_NE(descriptor_index, DescriptorArray::kNotFound);
472  holder_map.SerializeOwnDescriptors();
473  dependencies()->DependOnFieldType(holder_map, descriptor_index);
474  }
475 
476  if (found_on_proto) {
477  dependencies()->DependOnStablePrototypeChains(
478  broker(), access_info.receiver_maps(), JSObjectRef(broker(), holder));
479  }
480 
481  DCHECK(constant->IsCallable());
482 
483  // Check that {constructor} is actually {receiver}.
484  constructor =
485  access_builder.BuildCheckValue(constructor, &effect, control, receiver);
486 
487  // Monomorphic property access.
488  access_builder.BuildCheckMaps(constructor, &effect, control,
489  access_info.receiver_maps());
490 
491  // Create a nested frame state inside the current method's most-recent frame
492  // state that will ensure that deopts that happen after this point will not
493  // fallback to the last Checkpoint--which would completely re-execute the
494  // instanceof logic--but rather create an activation of a version of the
495  // ToBoolean stub that finishes the remaining work of instanceof and returns
496  // to the caller without duplicating side-effects upon a lazy deopt.
497  Node* continuation_frame_state = CreateStubBuiltinContinuationFrameState(
498  jsgraph(), Builtins::kToBooleanLazyDeoptContinuation, context, nullptr,
499  0, frame_state, ContinuationFrameStateMode::LAZY);
500 
501  // Call the @@hasInstance handler.
502  Node* target = jsgraph()->Constant(constant);
503  node->InsertInput(graph()->zone(), 0, target);
504  node->ReplaceInput(1, constructor);
505  node->ReplaceInput(2, object);
506  node->ReplaceInput(4, continuation_frame_state);
507  node->ReplaceInput(5, effect);
508  NodeProperties::ChangeOp(
509  node, javascript()->Call(3, CallFrequency(), VectorSlotPair(),
510  ConvertReceiverMode::kNotNullOrUndefined));
511 
512  // Rewire the value uses of {node} to ToBoolean conversion of the result.
513  Node* value = graph()->NewNode(simplified()->ToBoolean(), node);
514  for (Edge edge : node->use_edges()) {
515  if (NodeProperties::IsValueEdge(edge) && edge.from() != value) {
516  edge.UpdateTo(value);
517  Revisit(edge.from());
518  }
519  }
520  return Changed(node);
521  }
522 
523  return NoChange();
524 }
525 
526 JSNativeContextSpecialization::InferHasInPrototypeChainResult
527 JSNativeContextSpecialization::InferHasInPrototypeChain(
528  Node* receiver, Node* effect, Handle<HeapObject> prototype) {
529  ZoneHandleSet<Map> receiver_maps;
530  NodeProperties::InferReceiverMapsResult result =
531  NodeProperties::InferReceiverMaps(broker(), receiver, effect,
532  &receiver_maps);
533  if (result == NodeProperties::kNoReceiverMaps) return kMayBeInPrototypeChain;
534 
535  // Check if either all or none of the {receiver_maps} have the given
536  // {prototype} in their prototype chain.
537  bool all = true;
538  bool none = true;
539  for (size_t i = 0; i < receiver_maps.size(); ++i) {
540  Handle<Map> receiver_map = receiver_maps[i];
541  if (receiver_map->instance_type() <= LAST_SPECIAL_RECEIVER_TYPE) {
542  return kMayBeInPrototypeChain;
543  }
544  if (result == NodeProperties::kUnreliableReceiverMaps) {
545  // In case of an unreliable {result} we need to ensure that all
546  // {receiver_maps} are stable, because otherwise we cannot trust
547  // the {receiver_maps} information, since arbitrary side-effects
548  // may have happened.
549  if (!receiver_map->is_stable()) {
550  return kMayBeInPrototypeChain;
551  }
552  }
553  for (PrototypeIterator j(isolate(), receiver_map);; j.Advance()) {
554  if (j.IsAtEnd()) {
555  all = false;
556  break;
557  }
558  Handle<HeapObject> const current =
559  PrototypeIterator::GetCurrent<HeapObject>(j);
560  if (current.is_identical_to(prototype)) {
561  none = false;
562  break;
563  }
564  if (!current->map()->is_stable() ||
565  current->map()->instance_type() <= LAST_SPECIAL_RECEIVER_TYPE) {
566  return kMayBeInPrototypeChain;
567  }
568  }
569  }
570  DCHECK_IMPLIES(all, !none);
571  DCHECK_IMPLIES(none, !all);
572 
573  if (all) return kIsInPrototypeChain;
574  if (none) return kIsNotInPrototypeChain;
575  return kMayBeInPrototypeChain;
576 }
577 
578 Reduction JSNativeContextSpecialization::ReduceJSHasInPrototypeChain(
579  Node* node) {
580  DCHECK_EQ(IrOpcode::kJSHasInPrototypeChain, node->opcode());
581  Node* value = NodeProperties::GetValueInput(node, 0);
582  Node* prototype = NodeProperties::GetValueInput(node, 1);
583  Node* effect = NodeProperties::GetEffectInput(node);
584 
585  // Check if we can constant-fold the prototype chain walk
586  // for the given {value} and the {prototype}.
587  HeapObjectMatcher m(prototype);
588  if (m.HasValue()) {
589  InferHasInPrototypeChainResult result =
590  InferHasInPrototypeChain(value, effect, m.Value());
591  if (result != kMayBeInPrototypeChain) {
592  Node* value = jsgraph()->BooleanConstant(result == kIsInPrototypeChain);
593  ReplaceWithValue(node, value);
594  return Replace(value);
595  }
596  }
597 
598  return NoChange();
599 }
600 
601 Reduction JSNativeContextSpecialization::ReduceJSOrdinaryHasInstance(
602  Node* node) {
603  DCHECK_EQ(IrOpcode::kJSOrdinaryHasInstance, node->opcode());
604  Node* constructor = NodeProperties::GetValueInput(node, 0);
605  Node* object = NodeProperties::GetValueInput(node, 1);
606 
607  // Check if the {constructor} is known at compile time.
608  HeapObjectMatcher m(constructor);
609  if (!m.HasValue()) return NoChange();
610 
611  // Check if the {constructor} is a JSBoundFunction.
612  if (m.Value()->IsJSBoundFunction()) {
613  // OrdinaryHasInstance on bound functions turns into a recursive
614  // invocation of the instanceof operator again.
615  // ES6 section 7.3.19 OrdinaryHasInstance (C, O) step 2.
616  Handle<JSBoundFunction> function = Handle<JSBoundFunction>::cast(m.Value());
617  Handle<JSReceiver> bound_target_function(function->bound_target_function(),
618  isolate());
619  NodeProperties::ReplaceValueInput(node, object, 0);
620  NodeProperties::ReplaceValueInput(
621  node, jsgraph()->HeapConstant(bound_target_function), 1);
622  NodeProperties::ChangeOp(node, javascript()->InstanceOf(VectorSlotPair()));
623  Reduction const reduction = ReduceJSInstanceOf(node);
624  return reduction.Changed() ? reduction : Changed(node);
625  }
626 
627  // Optimize if we currently know the "prototype" property.
628  if (m.Value()->IsJSFunction()) {
629  JSFunctionRef function = m.Ref(broker()).AsJSFunction();
630  // TODO(neis): This is a temporary hack needed because the copy reducer
631  // runs only after this pass.
632  function.Serialize();
633  // TODO(neis): Remove the has_prototype_slot condition once the broker is
634  // always enabled.
635  if (!function.map().has_prototype_slot() || !function.has_prototype() ||
636  function.PrototypeRequiresRuntimeLookup()) {
637  return NoChange();
638  }
639  ObjectRef prototype = dependencies()->DependOnPrototypeProperty(function);
640  Node* prototype_constant = jsgraph()->Constant(prototype);
641 
642  // Lower the {node} to JSHasInPrototypeChain.
643  NodeProperties::ReplaceValueInput(node, object, 0);
644  NodeProperties::ReplaceValueInput(node, prototype_constant, 1);
645  NodeProperties::ChangeOp(node, javascript()->HasInPrototypeChain());
646  Reduction const reduction = ReduceJSHasInPrototypeChain(node);
647  return reduction.Changed() ? reduction : Changed(node);
648  }
649 
650  return NoChange();
651 }
652 
653 // ES section #sec-promise-resolve
654 Reduction JSNativeContextSpecialization::ReduceJSPromiseResolve(Node* node) {
655  DCHECK_EQ(IrOpcode::kJSPromiseResolve, node->opcode());
656  Node* constructor = NodeProperties::GetValueInput(node, 0);
657  Node* value = NodeProperties::GetValueInput(node, 1);
658  Node* context = NodeProperties::GetContextInput(node);
659  Node* frame_state = NodeProperties::GetFrameStateInput(node);
660  Node* effect = NodeProperties::GetEffectInput(node);
661  Node* control = NodeProperties::GetControlInput(node);
662 
663  if (!isolate()->IsPromiseHookProtectorIntact()) {
664  return NoChange();
665  }
666 
667  // Check if the {constructor} is the %Promise% function.
668  HeapObjectMatcher m(constructor);
669  if (!m.HasValue() ||
670  !m.Ref(broker()).equals(broker()->native_context().promise_function())) {
671  return NoChange();
672  }
673 
674  // Check if we know something about the {value}.
675  ZoneHandleSet<Map> value_maps;
676  NodeProperties::InferReceiverMapsResult result =
677  NodeProperties::InferReceiverMaps(broker(), value, effect, &value_maps);
678  if (result == NodeProperties::kNoReceiverMaps) return NoChange();
679  DCHECK_NE(0, value_maps.size());
680 
681  // Check that the {value} cannot be a JSPromise.
682  for (Handle<Map> const value_map : value_maps) {
683  if (value_map->IsJSPromiseMap()) return NoChange();
684  }
685 
686  // Install a code dependency on the promise hook protector cell.
687  dependencies()->DependOnProtector(
688  PropertyCellRef(broker(), factory()->promise_hook_protector()));
689 
690  // Create a %Promise% instance and resolve it with {value}.
691  Node* promise = effect =
692  graph()->NewNode(javascript()->CreatePromise(), context, effect);
693  effect = graph()->NewNode(javascript()->ResolvePromise(), promise, value,
694  context, frame_state, effect, control);
695  ReplaceWithValue(node, promise, effect, control);
696  return Replace(promise);
697 }
698 
699 // ES section #sec-promise-resolve-functions
700 Reduction JSNativeContextSpecialization::ReduceJSResolvePromise(Node* node) {
701  DCHECK_EQ(IrOpcode::kJSResolvePromise, node->opcode());
702  Node* promise = NodeProperties::GetValueInput(node, 0);
703  Node* resolution = NodeProperties::GetValueInput(node, 1);
704  Node* context = NodeProperties::GetContextInput(node);
705  Node* effect = NodeProperties::GetEffectInput(node);
706  Node* control = NodeProperties::GetControlInput(node);
707 
708  // Check if we know something about the {resolution}.
709  ZoneHandleSet<Map> resolution_maps;
710  NodeProperties::InferReceiverMapsResult result =
711  NodeProperties::InferReceiverMaps(broker(), resolution, effect,
712  &resolution_maps);
713  if (result == NodeProperties::kNoReceiverMaps) return NoChange();
714  DCHECK_NE(0, resolution_maps.size());
715 
716  // When the {resolution_maps} information is unreliable, we can
717  // still optimize if all individual {resolution_maps} are stable.
718  if (result == NodeProperties::kUnreliableReceiverMaps) {
719  for (Handle<Map> resolution_map : resolution_maps) {
720  if (!resolution_map->is_stable()) return NoChange();
721  }
722  }
723 
724  // Compute property access info for "then" on {resolution}.
725  PropertyAccessInfo access_info;
726  AccessInfoFactory access_info_factory(
727  broker(), dependencies(), native_context().object(), graph()->zone());
728  if (!access_info_factory.ComputePropertyAccessInfo(
729  MapHandles(resolution_maps.begin(), resolution_maps.end()),
730  factory()->then_string(), AccessMode::kLoad, &access_info)) {
731  return NoChange();
732  }
733 
734  // We can further optimize the case where {resolution}
735  // definitely doesn't have a "then" property.
736  if (!access_info.IsNotFound()) return NoChange();
737  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
738 
739  // Add proper dependencies on the {resolution}s [[Prototype]]s.
740  Handle<JSObject> holder;
741  if (access_info.holder().ToHandle(&holder)) {
742  dependencies()->DependOnStablePrototypeChains(
743  broker(), access_info.receiver_maps(), JSObjectRef(broker(), holder));
744  }
745 
746  // Add stability dependencies on the {resolution_maps}.
747  if (result == NodeProperties::kUnreliableReceiverMaps) {
748  for (Handle<Map> resolution_map : resolution_maps) {
749  dependencies()->DependOnStableMap(MapRef(broker(), resolution_map));
750  }
751  }
752 
753  // Simply fulfill the {promise} with the {resolution}.
754  Node* value = effect =
755  graph()->NewNode(javascript()->FulfillPromise(), promise, resolution,
756  context, effect, control);
757  ReplaceWithValue(node, value, effect, control);
758  return Replace(value);
759 }
760 
761 Reduction JSNativeContextSpecialization::ReduceJSLoadContext(Node* node) {
762  DCHECK_EQ(IrOpcode::kJSLoadContext, node->opcode());
763  ContextAccess const& access = ContextAccessOf(node->op());
764  // Specialize JSLoadContext(NATIVE_CONTEXT_INDEX) to the known native
765  // context (if any), so we can constant-fold those fields, which is
766  // safe, since the NATIVE_CONTEXT_INDEX slot is always immutable.
767  if (access.index() == Context::NATIVE_CONTEXT_INDEX) {
768  Node* value = jsgraph()->Constant(native_context());
769  ReplaceWithValue(node, value);
770  return Replace(value);
771  }
772  return NoChange();
773 }
774 
775 namespace {
776 
777 FieldAccess ForPropertyCellValue(MachineRepresentation representation,
778  Type type, MaybeHandle<Map> map,
779  Handle<Name> name) {
780  WriteBarrierKind kind = kFullWriteBarrier;
781  if (representation == MachineRepresentation::kTaggedSigned) {
782  kind = kNoWriteBarrier;
783  } else if (representation == MachineRepresentation::kTaggedPointer) {
784  kind = kPointerWriteBarrier;
785  }
786  MachineType r = MachineType::TypeForRepresentation(representation);
787  FieldAccess access = {
788  kTaggedBase, PropertyCell::kValueOffset, name, map, type, r, kind};
789  return access;
790 }
791 
792 } // namespace
793 
794 Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
795  Node* node, Node* receiver, Node* value, Handle<Name> name,
796  AccessMode access_mode, Node* index) {
797  Node* effect = NodeProperties::GetEffectInput(node);
798  Node* control = NodeProperties::GetControlInput(node);
799 
800  // Lookup on the global object. We only deal with own data properties
801  // of the global object here (represented as PropertyCell).
802  LookupIterator it(isolate(), global_object(), name, LookupIterator::OWN);
803  it.TryLookupCachedProperty();
804  if (it.state() != LookupIterator::DATA) return NoChange();
805  if (!it.GetHolder<JSObject>()->IsJSGlobalObject()) return NoChange();
806  Handle<PropertyCell> property_cell = it.GetPropertyCell();
807  PropertyDetails property_details = property_cell->property_details();
808  Handle<Object> property_cell_value(property_cell->value(), isolate());
809  PropertyCellType property_cell_type = property_details.cell_type();
810 
811  // We have additional constraints for stores.
812  if (access_mode == AccessMode::kStore) {
813  if (property_details.IsReadOnly()) {
814  // Don't even bother trying to lower stores to read-only data properties.
815  return NoChange();
816  } else if (property_cell_type == PropertyCellType::kUndefined) {
817  // There's no fast-path for dealing with undefined property cells.
818  return NoChange();
819  } else if (property_cell_type == PropertyCellType::kConstantType) {
820  // There's also no fast-path to store to a global cell which pretended
821  // to be stable, but is no longer stable now.
822  if (property_cell_value->IsHeapObject() &&
823  !Handle<HeapObject>::cast(property_cell_value)->map()->is_stable()) {
824  return NoChange();
825  }
826  }
827  }
828 
829  // Ensure that {index} matches the specified {name} (if {index} is given).
830  if (index != nullptr) {
831  effect = BuildCheckEqualsName(name, index, effect, control);
832  }
833 
834  // Check if we have a {receiver} to validate. If so, we need to check that
835  // the {receiver} is actually the JSGlobalProxy for the native context that
836  // we are specializing to.
837  if (receiver != nullptr) {
838  Node* check = graph()->NewNode(simplified()->ReferenceEqual(), receiver,
839  jsgraph()->HeapConstant(global_proxy()));
840  effect = graph()->NewNode(
841  simplified()->CheckIf(DeoptimizeReason::kReceiverNotAGlobalProxy),
842  check, effect, control);
843  }
844 
845  if (access_mode == AccessMode::kLoad) {
846  // Load from non-configurable, read-only data property on the global
847  // object can be constant-folded, even without deoptimization support.
848  if (!property_details.IsConfigurable() && property_details.IsReadOnly()) {
849  value = jsgraph()->Constant(property_cell_value);
850  } else {
851  // Record a code dependency on the cell if we can benefit from the
852  // additional feedback, or the global property is configurable (i.e.
853  // can be deleted or reconfigured to an accessor property).
854  if (property_details.cell_type() != PropertyCellType::kMutable ||
855  property_details.IsConfigurable()) {
856  dependencies()->DependOnGlobalProperty(
857  PropertyCellRef(broker(), property_cell));
858  }
859 
860  // Load from constant/undefined global property can be constant-folded.
861  if (property_details.cell_type() == PropertyCellType::kConstant ||
862  property_details.cell_type() == PropertyCellType::kUndefined) {
863  value = jsgraph()->Constant(property_cell_value);
864  CHECK(
865  !property_cell_value.is_identical_to(factory()->the_hole_value()));
866  } else {
867  // Load from constant type cell can benefit from type feedback.
868  MaybeHandle<Map> map;
869  Type property_cell_value_type = Type::NonInternal();
870  MachineRepresentation representation = MachineRepresentation::kTagged;
871  if (property_details.cell_type() == PropertyCellType::kConstantType) {
872  // Compute proper type based on the current value in the cell.
873  if (property_cell_value->IsSmi()) {
874  property_cell_value_type = Type::SignedSmall();
875  representation = MachineRepresentation::kTaggedSigned;
876  } else if (property_cell_value->IsNumber()) {
877  property_cell_value_type = Type::Number();
878  representation = MachineRepresentation::kTaggedPointer;
879  } else {
880  MapRef property_cell_value_map(
881  broker(), handle(HeapObject::cast(*property_cell_value)->map(),
882  isolate()));
883  property_cell_value_type = Type::For(property_cell_value_map);
884  representation = MachineRepresentation::kTaggedPointer;
885 
886  // We can only use the property cell value map for map check
887  // elimination if it's stable, i.e. the HeapObject wasn't
888  // mutated without the cell state being updated.
889  if (property_cell_value_map.is_stable()) {
890  dependencies()->DependOnStableMap(property_cell_value_map);
891  map = property_cell_value_map.object();
892  }
893  }
894  }
895  value = effect = graph()->NewNode(
896  simplified()->LoadField(ForPropertyCellValue(
897  representation, property_cell_value_type, map, name)),
898  jsgraph()->HeapConstant(property_cell), effect, control);
899  }
900  }
901  } else {
902  DCHECK_EQ(AccessMode::kStore, access_mode);
903  DCHECK(!property_details.IsReadOnly());
904  switch (property_details.cell_type()) {
905  case PropertyCellType::kUndefined: {
906  UNREACHABLE();
907  break;
908  }
909  case PropertyCellType::kConstant: {
910  // Record a code dependency on the cell, and just deoptimize if the new
911  // value doesn't match the previous value stored inside the cell.
912  dependencies()->DependOnGlobalProperty(
913  PropertyCellRef(broker(), property_cell));
914  Node* check =
915  graph()->NewNode(simplified()->ReferenceEqual(), value,
916  jsgraph()->Constant(property_cell_value));
917  effect = graph()->NewNode(
918  simplified()->CheckIf(DeoptimizeReason::kValueMismatch), check,
919  effect, control);
920  break;
921  }
922  case PropertyCellType::kConstantType: {
923  // Record a code dependency on the cell, and just deoptimize if the new
924  // values' type doesn't match the type of the previous value in the
925  // cell.
926  dependencies()->DependOnGlobalProperty(
927  PropertyCellRef(broker(), property_cell));
928  Type property_cell_value_type;
929  MachineRepresentation representation = MachineRepresentation::kTagged;
930  if (property_cell_value->IsHeapObject()) {
931  // We cannot do anything if the {property_cell_value}s map is no
932  // longer stable.
933  Handle<Map> property_cell_value_map(
934  Handle<HeapObject>::cast(property_cell_value)->map(), isolate());
935  DCHECK(property_cell_value_map->is_stable());
936  dependencies()->DependOnStableMap(
937  MapRef(broker(), property_cell_value_map));
938 
939  // Check that the {value} is a HeapObject.
940  value = effect = graph()->NewNode(simplified()->CheckHeapObject(),
941  value, effect, control);
942 
943  // Check {value} map against the {property_cell} map.
944  effect =
945  graph()->NewNode(simplified()->CheckMaps(
946  CheckMapsFlag::kNone,
947  ZoneHandleSet<Map>(property_cell_value_map)),
948  value, effect, control);
949  property_cell_value_type = Type::OtherInternal();
950  representation = MachineRepresentation::kTaggedPointer;
951  } else {
952  // Check that the {value} is a Smi.
953  value = effect = graph()->NewNode(
954  simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
955  property_cell_value_type = Type::SignedSmall();
956  representation = MachineRepresentation::kTaggedSigned;
957  }
958  effect = graph()->NewNode(simplified()->StoreField(ForPropertyCellValue(
959  representation, property_cell_value_type,
960  MaybeHandle<Map>(), name)),
961  jsgraph()->HeapConstant(property_cell), value,
962  effect, control);
963  break;
964  }
965  case PropertyCellType::kMutable: {
966  // Record a code dependency on the cell, and just deoptimize if the
967  // property ever becomes read-only.
968  dependencies()->DependOnGlobalProperty(
969  PropertyCellRef(broker(), property_cell));
970  effect = graph()->NewNode(
971  simplified()->StoreField(ForPropertyCellValue(
972  MachineRepresentation::kTagged, Type::NonInternal(),
973  MaybeHandle<Map>(), name)),
974  jsgraph()->HeapConstant(property_cell), value, effect, control);
975  break;
976  }
977  }
978  }
979 
980  ReplaceWithValue(node, value, effect, control);
981  return Replace(value);
982 }
983 
984 Reduction JSNativeContextSpecialization::ReduceJSLoadGlobal(Node* node) {
985  DCHECK_EQ(IrOpcode::kJSLoadGlobal, node->opcode());
986  NameRef name(broker(), LoadGlobalParametersOf(node->op()).name());
987  Node* effect = NodeProperties::GetEffectInput(node);
988 
989  // Try to lookup the name on the script context table first (lexical scoping).
990  base::Optional<ScriptContextTableRef::LookupResult> result =
991  native_context().script_context_table().lookup(name);
992  if (result) {
993  ObjectRef contents = result->context.get(result->index);
994  if (contents.IsHeapObject() &&
995  contents.AsHeapObject().map().oddball_type() == OddballType::kHole) {
996  return NoChange();
997  }
998  Node* context = jsgraph()->Constant(result->context);
999  Node* value = effect = graph()->NewNode(
1000  javascript()->LoadContext(0, result->index, result->immutable), context,
1001  effect);
1002  ReplaceWithValue(node, value, effect);
1003  return Replace(value);
1004  }
1005 
1006  // Lookup the {name} on the global object instead.
1007  return ReduceGlobalAccess(node, nullptr, nullptr, name.object(),
1008  AccessMode::kLoad);
1009 }
1010 
1011 Reduction JSNativeContextSpecialization::ReduceJSStoreGlobal(Node* node) {
1012  DCHECK_EQ(IrOpcode::kJSStoreGlobal, node->opcode());
1013  NameRef name(broker(), StoreGlobalParametersOf(node->op()).name());
1014  Node* value = NodeProperties::GetValueInput(node, 0);
1015  Node* effect = NodeProperties::GetEffectInput(node);
1016  Node* control = NodeProperties::GetControlInput(node);
1017 
1018  // Try to lookup the name on the script context table first (lexical scoping).
1019  base::Optional<ScriptContextTableRef::LookupResult> result =
1020  native_context().script_context_table().lookup(name);
1021  if (result) {
1022  ObjectRef contents = result->context.get(result->index);
1023  if ((contents.IsHeapObject() &&
1024  contents.AsHeapObject().map().oddball_type() == OddballType::kHole) ||
1025  result->immutable) {
1026  return NoChange();
1027  }
1028  Node* context = jsgraph()->Constant(result->context);
1029  effect = graph()->NewNode(javascript()->StoreContext(0, result->index),
1030  value, context, effect, control);
1031  ReplaceWithValue(node, value, effect, control);
1032  return Replace(value);
1033  }
1034 
1035  // Lookup the {name} on the global object instead.
1036  return ReduceGlobalAccess(node, nullptr, value, name.object(),
1037  AccessMode::kStore);
1038 }
1039 
1040 Reduction JSNativeContextSpecialization::ReduceNamedAccess(
1041  Node* node, Node* value, MapHandles const& receiver_maps, Handle<Name> name,
1042  AccessMode access_mode, Node* index) {
1043  DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
1044  node->opcode() == IrOpcode::kJSStoreNamed ||
1045  node->opcode() == IrOpcode::kJSLoadProperty ||
1046  node->opcode() == IrOpcode::kJSStoreProperty ||
1047  node->opcode() == IrOpcode::kJSStoreNamedOwn);
1048  Node* receiver = NodeProperties::GetValueInput(node, 0);
1049  Node* context = NodeProperties::GetContextInput(node);
1050  Node* frame_state = NodeProperties::GetFrameStateInput(node);
1051  Node* effect = NodeProperties::GetEffectInput(node);
1052  Node* control = NodeProperties::GetControlInput(node);
1053 
1054  // Check if we have an access o.x or o.x=v where o is the current
1055  // native contexts' global proxy, and turn that into a direct access
1056  // to the current native contexts' global object instead.
1057  if (receiver_maps.size() == 1) {
1058  Handle<Map> receiver_map = receiver_maps.front();
1059  if (receiver_map->IsJSGlobalProxyMap()) {
1060  Object* maybe_constructor = receiver_map->GetConstructor();
1061  // Detached global proxies have |null| as their constructor.
1062  if (maybe_constructor->IsJSFunction() &&
1063  JSFunction::cast(maybe_constructor)->native_context() ==
1064  *native_context().object()) {
1065  return ReduceGlobalAccess(node, receiver, value, name, access_mode,
1066  index);
1067  }
1068  }
1069  }
1070 
1071  // Compute property access infos for the receiver maps.
1072  AccessInfoFactory access_info_factory(
1073  broker(), dependencies(), native_context().object(), graph()->zone());
1074  ZoneVector<PropertyAccessInfo> access_infos(zone());
1075  if (!access_info_factory.ComputePropertyAccessInfos(
1076  receiver_maps, name, access_mode, &access_infos)) {
1077  return NoChange();
1078  }
1079 
1080  // Nothing to do if we have no non-deprecated maps.
1081  if (access_infos.empty()) {
1082  return ReduceSoftDeoptimize(
1083  node, DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
1084  }
1085 
1086  // Ensure that {index} matches the specified {name} (if {index} is given).
1087  if (index != nullptr) {
1088  effect = BuildCheckEqualsName(name, index, effect, control);
1089  }
1090 
1091  // Collect call nodes to rewire exception edges.
1092  ZoneVector<Node*> if_exception_nodes(zone());
1093  ZoneVector<Node*>* if_exceptions = nullptr;
1094  Node* if_exception = nullptr;
1095  if (NodeProperties::IsExceptionalCall(node, &if_exception)) {
1096  if_exceptions = &if_exception_nodes;
1097  }
1098 
1099  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
1100 
1101  // Check for the monomorphic cases.
1102  if (access_infos.size() == 1) {
1103  PropertyAccessInfo access_info = access_infos.front();
1104  // Try to build string check or number check if possible.
1105  // Otherwise build a map check.
1106  if (!access_builder.TryBuildStringCheck(access_info.receiver_maps(),
1107  &receiver, &effect, control) &&
1108  !access_builder.TryBuildNumberCheck(access_info.receiver_maps(),
1109  &receiver, &effect, control)) {
1110  if (HasNumberMaps(broker(), access_info.receiver_maps())) {
1111  // We need to also let Smi {receiver}s through in this case, so
1112  // we construct a diamond, guarded by the Sminess of the {receiver}
1113  // and if {receiver} is not a Smi just emit a sequence of map checks.
1114  Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
1115  Node* branch = graph()->NewNode(common()->Branch(), check, control);
1116 
1117  Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
1118  Node* etrue = effect;
1119 
1120  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
1121  Node* efalse = effect;
1122  {
1123  access_builder.BuildCheckMaps(receiver, &efalse, if_false,
1124  access_info.receiver_maps());
1125  }
1126 
1127  control = graph()->NewNode(common()->Merge(2), if_true, if_false);
1128  effect =
1129  graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
1130  } else {
1131  receiver =
1132  access_builder.BuildCheckHeapObject(receiver, &effect, control);
1133  access_builder.BuildCheckMaps(receiver, &effect, control,
1134  access_info.receiver_maps());
1135  }
1136  }
1137 
1138  // Generate the actual property access.
1139  ValueEffectControl continuation = BuildPropertyAccess(
1140  receiver, value, context, frame_state, effect, control, name,
1141  if_exceptions, access_info, access_mode);
1142  value = continuation.value();
1143  effect = continuation.effect();
1144  control = continuation.control();
1145  } else {
1146  // The final states for every polymorphic branch. We join them with
1147  // Merge+Phi+EffectPhi at the bottom.
1148  ZoneVector<Node*> values(zone());
1149  ZoneVector<Node*> effects(zone());
1150  ZoneVector<Node*> controls(zone());
1151 
1152  // Check if {receiver} may be a number.
1153  bool receiverissmi_possible = false;
1154  for (PropertyAccessInfo const& access_info : access_infos) {
1155  if (HasNumberMaps(broker(), access_info.receiver_maps())) {
1156  receiverissmi_possible = true;
1157  break;
1158  }
1159  }
1160 
1161  // Ensure that {receiver} is a heap object.
1162  Node* receiverissmi_control = nullptr;
1163  Node* receiverissmi_effect = effect;
1164  if (receiverissmi_possible) {
1165  Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
1166  Node* branch = graph()->NewNode(common()->Branch(), check, control);
1167  control = graph()->NewNode(common()->IfFalse(), branch);
1168  receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
1169  receiverissmi_effect = effect;
1170  } else {
1171  receiver =
1172  access_builder.BuildCheckHeapObject(receiver, &effect, control);
1173  }
1174 
1175  // Generate code for the various different property access patterns.
1176  Node* fallthrough_control = control;
1177  for (size_t j = 0; j < access_infos.size(); ++j) {
1178  PropertyAccessInfo const& access_info = access_infos[j];
1179  Node* this_value = value;
1180  Node* this_receiver = receiver;
1181  Node* this_effect = effect;
1182  Node* this_control = fallthrough_control;
1183 
1184  // Perform map check on {receiver}.
1185  MapHandles const& receiver_maps = access_info.receiver_maps();
1186  {
1187  // Whether to insert a dedicated MapGuard node into the
1188  // effect to be able to learn from the control flow.
1189  bool insert_map_guard = true;
1190 
1191  // Check maps for the {receiver}s.
1192  if (j == access_infos.size() - 1) {
1193  // Last map check on the fallthrough control path, do a
1194  // conditional eager deoptimization exit here.
1195  access_builder.BuildCheckMaps(receiver, &this_effect, this_control,
1196  receiver_maps);
1197  fallthrough_control = nullptr;
1198 
1199  // Don't insert a MapGuard in this case, as the CheckMaps
1200  // node already gives you all the information you need
1201  // along the effect chain.
1202  insert_map_guard = false;
1203  } else {
1204  // Explicitly branch on the {receiver_maps}.
1205  ZoneHandleSet<Map> maps;
1206  for (Handle<Map> map : receiver_maps) {
1207  maps.insert(map, graph()->zone());
1208  }
1209  Node* check = this_effect =
1210  graph()->NewNode(simplified()->CompareMaps(maps), receiver,
1211  this_effect, this_control);
1212  Node* branch =
1213  graph()->NewNode(common()->Branch(), check, this_control);
1214  fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
1215  this_control = graph()->NewNode(common()->IfTrue(), branch);
1216  }
1217 
1218  // The Number case requires special treatment to also deal with Smis.
1219  if (HasNumberMaps(broker(), receiver_maps)) {
1220  // Join this check with the "receiver is smi" check above.
1221  DCHECK_NOT_NULL(receiverissmi_effect);
1222  DCHECK_NOT_NULL(receiverissmi_control);
1223  this_control = graph()->NewNode(common()->Merge(2), this_control,
1224  receiverissmi_control);
1225  this_effect = graph()->NewNode(common()->EffectPhi(2), this_effect,
1226  receiverissmi_effect, this_control);
1227  receiverissmi_effect = receiverissmi_control = nullptr;
1228 
1229  // The {receiver} can also be a Smi in this case, so
1230  // a MapGuard doesn't make sense for this at all.
1231  insert_map_guard = false;
1232  }
1233 
1234  // Introduce a MapGuard to learn from this on the effect chain.
1235  if (insert_map_guard) {
1236  ZoneHandleSet<Map> maps;
1237  for (auto receiver_map : receiver_maps) {
1238  maps.insert(receiver_map, graph()->zone());
1239  }
1240  this_effect = graph()->NewNode(simplified()->MapGuard(maps), receiver,
1241  this_effect, this_control);
1242  }
1243 
1244  // If all {receiver_maps} are Strings we also need to rename the
1245  // {receiver} here to make sure that TurboFan knows that along this
1246  // path the {this_receiver} is a String. This is because we want
1247  // strict checking of types, for example for StringLength operators.
1248  if (HasOnlyStringMaps(receiver_maps)) {
1249  this_receiver = this_effect =
1250  graph()->NewNode(common()->TypeGuard(Type::String()), receiver,
1251  this_effect, this_control);
1252  }
1253  }
1254 
1255  // Generate the actual property access.
1256  ValueEffectControl continuation = BuildPropertyAccess(
1257  this_receiver, this_value, context, frame_state, this_effect,
1258  this_control, name, if_exceptions, access_info, access_mode);
1259  values.push_back(continuation.value());
1260  effects.push_back(continuation.effect());
1261  controls.push_back(continuation.control());
1262  }
1263 
1264  DCHECK_NULL(fallthrough_control);
1265 
1266  // Generate the final merge point for all (polymorphic) branches.
1267  int const control_count = static_cast<int>(controls.size());
1268  if (control_count == 0) {
1269  value = effect = control = jsgraph()->Dead();
1270  } else if (control_count == 1) {
1271  value = values.front();
1272  effect = effects.front();
1273  control = controls.front();
1274  } else {
1275  control = graph()->NewNode(common()->Merge(control_count), control_count,
1276  &controls.front());
1277  values.push_back(control);
1278  value = graph()->NewNode(
1279  common()->Phi(MachineRepresentation::kTagged, control_count),
1280  control_count + 1, &values.front());
1281  effects.push_back(control);
1282  effect = graph()->NewNode(common()->EffectPhi(control_count),
1283  control_count + 1, &effects.front());
1284  }
1285  }
1286 
1287  // Properly rewire IfException edges if {node} is inside a try-block.
1288  if (!if_exception_nodes.empty()) {
1289  DCHECK_NOT_NULL(if_exception);
1290  DCHECK_EQ(if_exceptions, &if_exception_nodes);
1291  int const if_exception_count = static_cast<int>(if_exceptions->size());
1292  Node* merge = graph()->NewNode(common()->Merge(if_exception_count),
1293  if_exception_count, &if_exceptions->front());
1294  if_exceptions->push_back(merge);
1295  Node* ephi =
1296  graph()->NewNode(common()->EffectPhi(if_exception_count),
1297  if_exception_count + 1, &if_exceptions->front());
1298  Node* phi = graph()->NewNode(
1299  common()->Phi(MachineRepresentation::kTagged, if_exception_count),
1300  if_exception_count + 1, &if_exceptions->front());
1301  ReplaceWithValue(if_exception, phi, ephi, merge);
1302  }
1303 
1304  ReplaceWithValue(node, value, effect, control);
1305  return Replace(value);
1306 }
1307 
1308 Reduction JSNativeContextSpecialization::ReduceNamedAccessFromNexus(
1309  Node* node, Node* value, FeedbackNexus const& nexus, Handle<Name> name,
1310  AccessMode access_mode) {
1311  DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
1312  node->opcode() == IrOpcode::kJSStoreNamed ||
1313  node->opcode() == IrOpcode::kJSStoreNamedOwn);
1314  Node* const receiver = NodeProperties::GetValueInput(node, 0);
1315  Node* const effect = NodeProperties::GetEffectInput(node);
1316 
1317  // Check if we are accessing the current native contexts' global proxy.
1318  HeapObjectMatcher m(receiver);
1319  if (m.HasValue() && m.Value().is_identical_to(global_proxy())) {
1320  // Optimize accesses to the current native contexts' global proxy.
1321  return ReduceGlobalAccess(node, nullptr, value, name, access_mode);
1322  }
1323 
1324  // Extract receiver maps from the IC using the {nexus}.
1325  MapHandles receiver_maps;
1326  if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
1327  return NoChange();
1328  } else if (receiver_maps.empty()) {
1329  if (flags() & kBailoutOnUninitialized) {
1330  return ReduceSoftDeoptimize(
1331  node,
1332  DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
1333  }
1334  return NoChange();
1335  }
1336  DCHECK(!nexus.IsUninitialized());
1337 
1338  // Try to lower the named access based on the {receiver_maps}.
1339  return ReduceNamedAccess(node, value, receiver_maps, name, access_mode);
1340 }
1341 
1342 Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
1343  DCHECK_EQ(IrOpcode::kJSLoadNamed, node->opcode());
1344  NamedAccess const& p = NamedAccessOf(node->op());
1345  Node* const receiver = NodeProperties::GetValueInput(node, 0);
1346  Node* const value = jsgraph()->Dead();
1347 
1348  // Check if we have a constant receiver.
1349  HeapObjectMatcher m(receiver);
1350  if (m.HasValue()) {
1351  if (m.Value()->IsJSFunction() &&
1352  p.name().is_identical_to(factory()->prototype_string())) {
1353  // Optimize "prototype" property of functions.
1354  JSFunctionRef function = m.Ref(broker()).AsJSFunction();
1355  // TODO(neis): This is a temporary hack needed because the copy reducer
1356  // runs only after this pass.
1357  function.Serialize();
1358  // TODO(neis): Remove the has_prototype_slot condition once the broker is
1359  // always enabled.
1360  if (!function.map().has_prototype_slot() || !function.has_prototype() ||
1361  function.PrototypeRequiresRuntimeLookup()) {
1362  return NoChange();
1363  }
1364  ObjectRef prototype = dependencies()->DependOnPrototypeProperty(function);
1365  Node* value = jsgraph()->Constant(prototype);
1366  ReplaceWithValue(node, value);
1367  return Replace(value);
1368  } else if (m.Value()->IsString() &&
1369  p.name().is_identical_to(factory()->length_string())) {
1370  // Constant-fold "length" property on constant strings.
1371  Handle<String> string = Handle<String>::cast(m.Value());
1372  Node* value = jsgraph()->Constant(string->length());
1373  ReplaceWithValue(node, value);
1374  return Replace(value);
1375  }
1376  }
1377 
1378  // Extract receiver maps from the load IC using the FeedbackNexus.
1379  if (!p.feedback().IsValid()) return NoChange();
1380  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1381 
1382  // Try to lower the named access based on the {receiver_maps}.
1383  return ReduceNamedAccessFromNexus(node, value, nexus, p.name(),
1384  AccessMode::kLoad);
1385 }
1386 
1387 
1388 Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
1389  DCHECK_EQ(IrOpcode::kJSStoreNamed, node->opcode());
1390  NamedAccess const& p = NamedAccessOf(node->op());
1391  Node* const value = NodeProperties::GetValueInput(node, 1);
1392 
1393  // Extract receiver maps from the store IC using the FeedbackNexus.
1394  if (!p.feedback().IsValid()) return NoChange();
1395  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1396 
1397  // Try to lower the named access based on the {receiver_maps}.
1398  return ReduceNamedAccessFromNexus(node, value, nexus, p.name(),
1399  AccessMode::kStore);
1400 }
1401 
1402 Reduction JSNativeContextSpecialization::ReduceJSStoreNamedOwn(Node* node) {
1403  DCHECK_EQ(IrOpcode::kJSStoreNamedOwn, node->opcode());
1404  StoreNamedOwnParameters const& p = StoreNamedOwnParametersOf(node->op());
1405  Node* const value = NodeProperties::GetValueInput(node, 1);
1406 
1407  // Extract receiver maps from the IC using the FeedbackNexus.
1408  if (!p.feedback().IsValid()) return NoChange();
1409  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1410 
1411  // Try to lower the creation of a named property based on the {receiver_maps}.
1412  return ReduceNamedAccessFromNexus(node, value, nexus, p.name(),
1413  AccessMode::kStoreInLiteral);
1414 }
1415 
1416 Reduction JSNativeContextSpecialization::ReduceElementAccess(
1417  Node* node, Node* index, Node* value, MapHandles const& receiver_maps,
1418  AccessMode access_mode, KeyedAccessLoadMode load_mode,
1419  KeyedAccessStoreMode store_mode) {
1420  DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
1421  node->opcode() == IrOpcode::kJSStoreProperty ||
1422  node->opcode() == IrOpcode::kJSStoreInArrayLiteral);
1423  Node* receiver = NodeProperties::GetValueInput(node, 0);
1424  Node* effect = NodeProperties::GetEffectInput(node);
1425  Node* control = NodeProperties::GetControlInput(node);
1426  Node* frame_state = NodeProperties::FindFrameStateBefore(node);
1427 
1428  // Check for keyed access to strings.
1429  if (HasOnlyStringMaps(receiver_maps)) {
1430  // Strings are immutable in JavaScript.
1431  if (access_mode == AccessMode::kStore) return NoChange();
1432 
1433  // Ensure that the {receiver} is actually a String.
1434  receiver = effect = graph()->NewNode(
1435  simplified()->CheckString(VectorSlotPair()), receiver, effect, control);
1436 
1437  // Determine the {receiver} length.
1438  Node* length = graph()->NewNode(simplified()->StringLength(), receiver);
1439 
1440  // Load the single character string from {receiver} or yield undefined
1441  // if the {index} is out of bounds (depending on the {load_mode}).
1442  value = BuildIndexedStringLoad(receiver, index, length, &effect, &control,
1443  load_mode);
1444  } else {
1445  // Retrieve the native context from the given {node}.
1446  // Compute element access infos for the receiver maps.
1447  AccessInfoFactory access_info_factory(
1448  broker(), dependencies(), native_context().object(), graph()->zone());
1449  ZoneVector<ElementAccessInfo> access_infos(zone());
1450  if (!access_info_factory.ComputeElementAccessInfos(
1451  receiver_maps, access_mode, &access_infos)) {
1452  return NoChange();
1453  }
1454 
1455  // Nothing to do if we have no non-deprecated maps.
1456  if (access_infos.empty()) {
1457  return ReduceSoftDeoptimize(
1458  node,
1459  DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
1460  }
1461 
1462  // For holey stores or growing stores, we need to check that the prototype
1463  // chain contains no setters for elements, and we need to guard those checks
1464  // via code dependencies on the relevant prototype maps.
1465  if (access_mode == AccessMode::kStore) {
1466  // TODO(turbofan): We could have a fast path here, that checks for the
1467  // common case of Array or Object prototype only and therefore avoids
1468  // the zone allocation of this vector.
1469  ZoneVector<Handle<Map>> prototype_maps(zone());
1470  for (ElementAccessInfo const& access_info : access_infos) {
1471  for (Handle<Map> receiver_map : access_info.receiver_maps()) {
1472  // If the {receiver_map} has a prototype and it's elements backing
1473  // store is either holey, or we have a potentially growing store,
1474  // then we need to check that all prototypes have stable maps with
1475  // fast elements (and we need to guard against changes to that below).
1476  if (IsHoleyOrDictionaryElementsKind(receiver_map->elements_kind()) ||
1477  IsGrowStoreMode(store_mode)) {
1478  // Make sure all prototypes are stable and have fast elements.
1479  for (Handle<Map> map = receiver_map;;) {
1480  Handle<Object> map_prototype(map->prototype(), isolate());
1481  if (map_prototype->IsNull(isolate())) break;
1482  if (!map_prototype->IsJSObject()) return NoChange();
1483  map = handle(Handle<JSObject>::cast(map_prototype)->map(),
1484  isolate());
1485  if (!map->is_stable()) return NoChange();
1486  if (!IsFastElementsKind(map->elements_kind())) return NoChange();
1487  prototype_maps.push_back(map);
1488  }
1489  }
1490  }
1491  }
1492 
1493  // Install dependencies on the relevant prototype maps.
1494  for (Handle<Map> prototype_map : prototype_maps) {
1495  dependencies()->DependOnStableMap(MapRef(broker(), prototype_map));
1496  }
1497  }
1498 
1499  // Ensure that {receiver} is a heap object.
1500  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
1501  receiver = access_builder.BuildCheckHeapObject(receiver, &effect, control);
1502 
1503  // Check for the monomorphic case.
1504  if (access_infos.size() == 1) {
1505  ElementAccessInfo access_info = access_infos.front();
1506 
1507  // Perform possible elements kind transitions.
1508  for (auto transition : access_info.transitions()) {
1509  Handle<Map> const transition_source = transition.first;
1510  Handle<Map> const transition_target = transition.second;
1511  effect = graph()->NewNode(
1512  simplified()->TransitionElementsKind(ElementsTransition(
1513  IsSimpleMapChangeTransition(transition_source->elements_kind(),
1514  transition_target->elements_kind())
1515  ? ElementsTransition::kFastTransition
1516  : ElementsTransition::kSlowTransition,
1517  transition_source, transition_target)),
1518  receiver, effect, control);
1519  }
1520 
1521  // TODO(turbofan): The effect/control linearization will not find a
1522  // FrameState after the StoreField or Call that is generated for the
1523  // elements kind transition above. This is because those operators
1524  // don't have the kNoWrite flag on it, even though they are not
1525  // observable by JavaScript.
1526  effect = graph()->NewNode(common()->Checkpoint(), frame_state, effect,
1527  control);
1528 
1529  // Perform map check on the {receiver}.
1530  access_builder.BuildCheckMaps(receiver, &effect, control,
1531  access_info.receiver_maps());
1532 
1533  // Access the actual element.
1534  ValueEffectControl continuation =
1535  BuildElementAccess(receiver, index, value, effect, control,
1536  access_info, access_mode, load_mode, store_mode);
1537  value = continuation.value();
1538  effect = continuation.effect();
1539  control = continuation.control();
1540  } else {
1541  // The final states for every polymorphic branch. We join them with
1542  // Merge+Phi+EffectPhi at the bottom.
1543  ZoneVector<Node*> values(zone());
1544  ZoneVector<Node*> effects(zone());
1545  ZoneVector<Node*> controls(zone());
1546 
1547  // Generate code for the various different element access patterns.
1548  Node* fallthrough_control = control;
1549  for (size_t j = 0; j < access_infos.size(); ++j) {
1550  ElementAccessInfo const& access_info = access_infos[j];
1551  Node* this_receiver = receiver;
1552  Node* this_value = value;
1553  Node* this_index = index;
1554  Node* this_effect = effect;
1555  Node* this_control = fallthrough_control;
1556 
1557  // Perform possible elements kind transitions.
1558  for (auto transition : access_info.transitions()) {
1559  Handle<Map> const transition_source = transition.first;
1560  Handle<Map> const transition_target = transition.second;
1561  this_effect = graph()->NewNode(
1562  simplified()->TransitionElementsKind(
1563  ElementsTransition(IsSimpleMapChangeTransition(
1564  transition_source->elements_kind(),
1565  transition_target->elements_kind())
1566  ? ElementsTransition::kFastTransition
1567  : ElementsTransition::kSlowTransition,
1568  transition_source, transition_target)),
1569  receiver, this_effect, this_control);
1570  }
1571 
1572  // Perform map check(s) on {receiver}.
1573  MapHandles const& receiver_maps = access_info.receiver_maps();
1574  if (j == access_infos.size() - 1) {
1575  // Last map check on the fallthrough control path, do a
1576  // conditional eager deoptimization exit here.
1577  access_builder.BuildCheckMaps(receiver, &this_effect, this_control,
1578  receiver_maps);
1579  fallthrough_control = nullptr;
1580  } else {
1581  // Explicitly branch on the {receiver_maps}.
1582  ZoneHandleSet<Map> maps;
1583  for (Handle<Map> map : receiver_maps) {
1584  maps.insert(map, graph()->zone());
1585  }
1586  Node* check = this_effect =
1587  graph()->NewNode(simplified()->CompareMaps(maps), receiver,
1588  this_effect, fallthrough_control);
1589  Node* branch =
1590  graph()->NewNode(common()->Branch(), check, fallthrough_control);
1591  fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
1592  this_control = graph()->NewNode(common()->IfTrue(), branch);
1593 
1594  // Introduce a MapGuard to learn from this on the effect chain.
1595  this_effect = graph()->NewNode(simplified()->MapGuard(maps), receiver,
1596  this_effect, this_control);
1597  }
1598 
1599  // Access the actual element.
1600  ValueEffectControl continuation = BuildElementAccess(
1601  this_receiver, this_index, this_value, this_effect, this_control,
1602  access_info, access_mode, load_mode, store_mode);
1603  values.push_back(continuation.value());
1604  effects.push_back(continuation.effect());
1605  controls.push_back(continuation.control());
1606  }
1607 
1608  DCHECK_NULL(fallthrough_control);
1609 
1610  // Generate the final merge point for all (polymorphic) branches.
1611  int const control_count = static_cast<int>(controls.size());
1612  if (control_count == 0) {
1613  value = effect = control = jsgraph()->Dead();
1614  } else if (control_count == 1) {
1615  value = values.front();
1616  effect = effects.front();
1617  control = controls.front();
1618  } else {
1619  control = graph()->NewNode(common()->Merge(control_count),
1620  control_count, &controls.front());
1621  values.push_back(control);
1622  value = graph()->NewNode(
1623  common()->Phi(MachineRepresentation::kTagged, control_count),
1624  control_count + 1, &values.front());
1625  effects.push_back(control);
1626  effect = graph()->NewNode(common()->EffectPhi(control_count),
1627  control_count + 1, &effects.front());
1628  }
1629  }
1630  }
1631 
1632  ReplaceWithValue(node, value, effect, control);
1633  return Replace(value);
1634 }
1635 
1636 Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
1637  Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
1638  AccessMode access_mode, KeyedAccessLoadMode load_mode,
1639  KeyedAccessStoreMode store_mode) {
1640  DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
1641  node->opcode() == IrOpcode::kJSStoreProperty);
1642  Node* receiver = NodeProperties::GetValueInput(node, 0);
1643  Node* effect = NodeProperties::GetEffectInput(node);
1644  Node* control = NodeProperties::GetControlInput(node);
1645 
1646  // Optimize the case where we load from a constant {receiver}.
1647  if (access_mode == AccessMode::kLoad) {
1648  HeapObjectMatcher mreceiver(receiver);
1649  if (mreceiver.HasValue() && !mreceiver.Value()->IsTheHole(isolate()) &&
1650  !mreceiver.Value()->IsNullOrUndefined(isolate())) {
1651  // Check whether we're accessing a known element on the {receiver}
1652  // that is non-configurable, non-writable (i.e. the {receiver} was
1653  // frozen using Object.freeze).
1654  NumberMatcher mindex(index);
1655  if (mindex.IsInteger() && mindex.IsInRange(0.0, kMaxUInt32 - 1.0)) {
1656  LookupIterator it(isolate(), mreceiver.Value(),
1657  static_cast<uint32_t>(mindex.Value()),
1658  LookupIterator::OWN);
1659  if (it.state() == LookupIterator::DATA) {
1660  if (it.IsReadOnly() && !it.IsConfigurable()) {
1661  // We can safely constant-fold the {index} access to {receiver},
1662  // since the element is non-configurable, non-writable and thus
1663  // cannot change anymore.
1664  value = jsgraph()->Constant(it.GetDataValue());
1665  ReplaceWithValue(node, value, effect, control);
1666  return Replace(value);
1667  }
1668 
1669  // Check if the {receiver} is a known constant with a copy-on-write
1670  // backing store, and whether {index} is within the appropriate
1671  // bounds. In that case we can constant-fold the access and only
1672  // check that the {elements} didn't change. This is sufficient as
1673  // the backing store of a copy-on-write JSArray is defensively copied
1674  // whenever the length or the elements (might) change.
1675  //
1676  // What's interesting here is that we don't need to map check the
1677  // {receiver}, since JSArray's will always have their elements in
1678  // the backing store.
1679  if (mreceiver.Value()->IsJSArray()) {
1680  Handle<JSArray> array = Handle<JSArray>::cast(mreceiver.Value());
1681  if (array->elements()->IsCowArray()) {
1682  Node* elements = effect = graph()->NewNode(
1683  simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
1684  receiver, effect, control);
1685  Handle<FixedArray> array_elements(
1686  FixedArray::cast(array->elements()), isolate());
1687  Node* check =
1688  graph()->NewNode(simplified()->ReferenceEqual(), elements,
1689  jsgraph()->HeapConstant(array_elements));
1690  effect = graph()->NewNode(
1691  simplified()->CheckIf(
1692  DeoptimizeReason::kCowArrayElementsChanged),
1693  check, effect, control);
1694  value = jsgraph()->Constant(it.GetDataValue());
1695  ReplaceWithValue(node, value, effect, control);
1696  return Replace(value);
1697  }
1698  }
1699  }
1700  }
1701 
1702  // For constant Strings we can eagerly strength-reduce the keyed
1703  // accesses using the known length, which doesn't change.
1704  if (mreceiver.Value()->IsString()) {
1705  Handle<String> string = Handle<String>::cast(mreceiver.Value());
1706 
1707  // We can only assume that the {index} is a valid array index if the IC
1708  // is in element access mode and not MEGAMORPHIC, otherwise there's no
1709  // guard for the bounds check below.
1710  if (nexus.ic_state() != MEGAMORPHIC && nexus.GetKeyType() == ELEMENT) {
1711  // Ensure that {index} is less than {receiver} length.
1712  Node* length = jsgraph()->Constant(string->length());
1713 
1714  // Load the single character string from {receiver} or yield undefined
1715  // if the {index} is out of bounds (depending on the {load_mode}).
1716  value = BuildIndexedStringLoad(receiver, index, length, &effect,
1717  &control, load_mode);
1718  ReplaceWithValue(node, value, effect, control);
1719  return Replace(value);
1720  }
1721  }
1722  }
1723  }
1724 
1725  // Extract receiver maps from the {nexus}.
1726  MapHandles receiver_maps;
1727  if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
1728  return NoChange();
1729  } else if (receiver_maps.empty()) {
1730  if (flags() & kBailoutOnUninitialized) {
1731  return ReduceSoftDeoptimize(
1732  node,
1733  DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
1734  }
1735  return NoChange();
1736  }
1737  DCHECK(!nexus.IsUninitialized());
1738 
1739  // Optimize access for constant {index}.
1740  HeapObjectMatcher mindex(index);
1741  if (mindex.HasValue() && mindex.Value()->IsPrimitive()) {
1742  // Keyed access requires a ToPropertyKey on the {index} first before
1743  // looking up the property on the object (see ES6 section 12.3.2.1).
1744  // We can only do this for non-observable ToPropertyKey invocations,
1745  // so we limit the constant indices to primitives at this point.
1746  Handle<Name> name;
1747  if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
1748  uint32_t array_index;
1749  if (name->AsArrayIndex(&array_index)) {
1750  // Use the constant array index.
1751  index = jsgraph()->Constant(static_cast<double>(array_index));
1752  } else {
1753  name = factory()->InternalizeName(name);
1754  return ReduceNamedAccess(node, value, receiver_maps, name, access_mode);
1755  }
1756  }
1757  }
1758 
1759  // Check if we have feedback for a named access.
1760  Name name = nexus.FindFirstName();
1761  if (!name.is_null()) {
1762  return ReduceNamedAccess(node, value, receiver_maps,
1763  handle(name, isolate()), access_mode, index);
1764  } else if (nexus.GetKeyType() != ELEMENT) {
1765  // The KeyedLoad/StoreIC has seen non-element accesses, so we cannot assume
1766  // that the {index} is a valid array index, thus we just let the IC continue
1767  // to deal with this load/store.
1768  return NoChange();
1769  } else if (nexus.ic_state() == MEGAMORPHIC) {
1770  // The KeyedLoad/StoreIC uses the MEGAMORPHIC state to guard the assumption
1771  // that a numeric {index} is within the valid bounds for {receiver}, i.e.
1772  // it transitions to MEGAMORPHIC once it sees an out-of-bounds access. Thus
1773  // we cannot continue here if the IC state is MEGAMORPHIC.
1774  return NoChange();
1775  }
1776 
1777  // Try to lower the element access based on the {receiver_maps}.
1778  return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
1779  load_mode, store_mode);
1780 }
1781 
1782 Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(
1783  Node* node, DeoptimizeReason reason) {
1784  Node* effect = NodeProperties::GetEffectInput(node);
1785  Node* control = NodeProperties::GetControlInput(node);
1786  Node* frame_state = NodeProperties::FindFrameStateBefore(node);
1787  Node* deoptimize = graph()->NewNode(
1788  common()->Deoptimize(DeoptimizeKind::kSoft, reason, VectorSlotPair()),
1789  frame_state, effect, control);
1790  // TODO(bmeurer): This should be on the AdvancedReducer somehow.
1791  NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
1792  Revisit(graph()->end());
1793  node->TrimInputCount(0);
1794  NodeProperties::ChangeOp(node, common()->Dead());
1795  return Changed(node);
1796 }
1797 
1798 Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
1799  DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
1800  PropertyAccess const& p = PropertyAccessOf(node->op());
1801  Node* receiver = NodeProperties::GetValueInput(node, 0);
1802  Node* name = NodeProperties::GetValueInput(node, 1);
1803  Node* value = jsgraph()->Dead();
1804  Node* effect = NodeProperties::GetEffectInput(node);
1805  Node* control = NodeProperties::GetControlInput(node);
1806 
1807  // We can optimize a property load if it's being used inside a for..in,
1808  // so for code like this:
1809  //
1810  // for (name in receiver) {
1811  // value = receiver[name];
1812  // ...
1813  // }
1814  //
1815  // If the for..in is in fast-mode, we know that the {receiver} has {name}
1816  // as own property, otherwise the enumeration wouldn't include it. The graph
1817  // constructed by the BytecodeGraphBuilder in this case looks like this:
1818 
1819  // receiver
1820  // ^ ^
1821  // | |
1822  // | +-+
1823  // | |
1824  // | JSToObject
1825  // | ^
1826  // | |
1827  // | |
1828  // | JSForInNext
1829  // | ^
1830  // | |
1831  // +----+ |
1832  // | |
1833  // | |
1834  // JSLoadProperty
1835 
1836  // If the for..in has only seen maps with enum cache consisting of keys
1837  // and indices so far, we can turn the {JSLoadProperty} into a map check
1838  // on the {receiver} and then just load the field value dynamically via
1839  // the {LoadFieldByIndex} operator. The map check is only necessary when
1840  // TurboFan cannot prove that there is no observable side effect between
1841  // the {JSForInNext} and the {JSLoadProperty} node.
1842  //
1843  // Also note that it's safe to look through the {JSToObject}, since the
1844  // [[Get]] operation does an implicit ToObject anyway, and these operations
1845  // are not observable.
1846  if (name->opcode() == IrOpcode::kJSForInNext) {
1847  ForInMode const mode = ForInModeOf(name->op());
1848  if (mode == ForInMode::kUseEnumCacheKeysAndIndices) {
1849  Node* object = NodeProperties::GetValueInput(name, 0);
1850  Node* enumerator = NodeProperties::GetValueInput(name, 2);
1851  Node* index = NodeProperties::GetValueInput(name, 3);
1852  if (object->opcode() == IrOpcode::kJSToObject) {
1853  object = NodeProperties::GetValueInput(object, 0);
1854  }
1855  if (object == receiver) {
1856  // No need to repeat the map check if we can prove that there's no
1857  // observable side effect between {effect} and {name].
1858  if (!NodeProperties::NoObservableSideEffectBetween(effect, name)) {
1859  // Check that the {receiver} map is still valid.
1860  Node* receiver_map = effect =
1861  graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
1862  receiver, effect, control);
1863  Node* check = graph()->NewNode(simplified()->ReferenceEqual(),
1864  receiver_map, enumerator);
1865  effect = graph()->NewNode(
1866  simplified()->CheckIf(DeoptimizeReason::kWrongMap), check, effect,
1867  control);
1868  }
1869 
1870  // Load the enum cache indices from the {cache_type}.
1871  Node* descriptor_array = effect = graph()->NewNode(
1872  simplified()->LoadField(AccessBuilder::ForMapDescriptors()),
1873  enumerator, effect, control);
1874  Node* enum_cache = effect =
1875  graph()->NewNode(simplified()->LoadField(
1876  AccessBuilder::ForDescriptorArrayEnumCache()),
1877  descriptor_array, effect, control);
1878  Node* enum_indices = effect = graph()->NewNode(
1879  simplified()->LoadField(AccessBuilder::ForEnumCacheIndices()),
1880  enum_cache, effect, control);
1881 
1882  // Ensure that the {enum_indices} are valid.
1883  Node* check = graph()->NewNode(
1884  simplified()->BooleanNot(),
1885  graph()->NewNode(simplified()->ReferenceEqual(), enum_indices,
1886  jsgraph()->EmptyFixedArrayConstant()));
1887  effect = graph()->NewNode(
1888  simplified()->CheckIf(DeoptimizeReason::kWrongEnumIndices), check,
1889  effect, control);
1890 
1891  // Determine the index from the {enum_indices}.
1892  index = effect = graph()->NewNode(
1893  simplified()->LoadElement(
1894  AccessBuilder::ForFixedArrayElement(PACKED_SMI_ELEMENTS)),
1895  enum_indices, index, effect, control);
1896 
1897  // Load the actual field value.
1898  Node* value = effect = graph()->NewNode(
1899  simplified()->LoadFieldByIndex(), receiver, index, effect, control);
1900  ReplaceWithValue(node, value, effect, control);
1901  return Replace(value);
1902  }
1903  }
1904  }
1905 
1906  // Extract receiver maps from the keyed load IC using the FeedbackNexus.
1907  if (!p.feedback().IsValid()) return NoChange();
1908  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1909 
1910  // Extract the keyed access load mode from the keyed load IC.
1911  KeyedAccessLoadMode load_mode = nexus.GetKeyedAccessLoadMode();
1912 
1913  // Try to lower the keyed access based on the {nexus}.
1914  return ReduceKeyedAccess(node, name, value, nexus, AccessMode::kLoad,
1915  load_mode, STANDARD_STORE);
1916 }
1917 
1918 Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
1919  DCHECK_EQ(IrOpcode::kJSStoreProperty, node->opcode());
1920  PropertyAccess const& p = PropertyAccessOf(node->op());
1921  Node* const index = NodeProperties::GetValueInput(node, 1);
1922  Node* const value = NodeProperties::GetValueInput(node, 2);
1923 
1924  // Extract receiver maps from the keyed store IC using the FeedbackNexus.
1925  if (!p.feedback().IsValid()) return NoChange();
1926  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1927 
1928  // Extract the keyed access store mode from the keyed store IC.
1929  KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
1930 
1931  // Try to lower the keyed access based on the {nexus}.
1932  return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
1933  STANDARD_LOAD, store_mode);
1934 }
1935 
1936 Node* JSNativeContextSpecialization::InlinePropertyGetterCall(
1937  Node* receiver, Node* context, Node* frame_state, Node** effect,
1938  Node** control, ZoneVector<Node*>* if_exceptions,
1939  PropertyAccessInfo const& access_info) {
1940  Node* target = jsgraph()->Constant(access_info.constant());
1941  FrameStateInfo const& frame_info = FrameStateInfoOf(frame_state->op());
1942  Handle<SharedFunctionInfo> shared_info =
1943  frame_info.shared_info().ToHandleChecked();
1944  // Introduce the call to the getter function.
1945  Node* value;
1946  if (access_info.constant()->IsJSFunction()) {
1947  value = *effect = *control = graph()->NewNode(
1948  jsgraph()->javascript()->Call(2, CallFrequency(), VectorSlotPair(),
1949  ConvertReceiverMode::kNotNullOrUndefined),
1950  target, receiver, context, frame_state, *effect, *control);
1951  } else {
1952  DCHECK(access_info.constant()->IsFunctionTemplateInfo());
1953  Handle<FunctionTemplateInfo> function_template_info(
1954  Handle<FunctionTemplateInfo>::cast(access_info.constant()));
1955  DCHECK(!function_template_info->call_code()->IsUndefined(isolate()));
1956  Node* holder =
1957  access_info.holder().is_null()
1958  ? receiver
1959  : jsgraph()->Constant(access_info.holder().ToHandleChecked());
1960  value = InlineApiCall(receiver, holder, frame_state, nullptr, effect,
1961  control, shared_info, function_template_info);
1962  }
1963  // Remember to rewire the IfException edge if this is inside a try-block.
1964  if (if_exceptions != nullptr) {
1965  // Create the appropriate IfException/IfSuccess projections.
1966  Node* const if_exception =
1967  graph()->NewNode(common()->IfException(), *control, *effect);
1968  Node* const if_success = graph()->NewNode(common()->IfSuccess(), *control);
1969  if_exceptions->push_back(if_exception);
1970  *control = if_success;
1971  }
1972  return value;
1973 }
1974 
1975 void JSNativeContextSpecialization::InlinePropertySetterCall(
1976  Node* receiver, Node* value, Node* context, Node* frame_state,
1977  Node** effect, Node** control, ZoneVector<Node*>* if_exceptions,
1978  PropertyAccessInfo const& access_info) {
1979  Node* target = jsgraph()->Constant(access_info.constant());
1980  FrameStateInfo const& frame_info = FrameStateInfoOf(frame_state->op());
1981  Handle<SharedFunctionInfo> shared_info =
1982  frame_info.shared_info().ToHandleChecked();
1983  // Introduce the call to the setter function.
1984  if (access_info.constant()->IsJSFunction()) {
1985  *effect = *control = graph()->NewNode(
1986  jsgraph()->javascript()->Call(3, CallFrequency(), VectorSlotPair(),
1987  ConvertReceiverMode::kNotNullOrUndefined),
1988  target, receiver, value, context, frame_state, *effect, *control);
1989  } else {
1990  DCHECK(access_info.constant()->IsFunctionTemplateInfo());
1991  Handle<FunctionTemplateInfo> function_template_info(
1992  Handle<FunctionTemplateInfo>::cast(access_info.constant()));
1993  DCHECK(!function_template_info->call_code()->IsUndefined(isolate()));
1994  Node* holder =
1995  access_info.holder().is_null()
1996  ? receiver
1997  : jsgraph()->Constant(access_info.holder().ToHandleChecked());
1998  InlineApiCall(receiver, holder, frame_state, value, effect, control,
1999  shared_info, function_template_info);
2000  }
2001  // Remember to rewire the IfException edge if this is inside a try-block.
2002  if (if_exceptions != nullptr) {
2003  // Create the appropriate IfException/IfSuccess projections.
2004  Node* const if_exception =
2005  graph()->NewNode(common()->IfException(), *control, *effect);
2006  Node* const if_success = graph()->NewNode(common()->IfSuccess(), *control);
2007  if_exceptions->push_back(if_exception);
2008  *control = if_success;
2009  }
2010 }
2011 
2012 Node* JSNativeContextSpecialization::InlineApiCall(
2013  Node* receiver, Node* holder, Node* frame_state, Node* value, Node** effect,
2014  Node** control, Handle<SharedFunctionInfo> shared_info,
2015  Handle<FunctionTemplateInfo> function_template_info) {
2016  Handle<CallHandlerInfo> call_handler_info = handle(
2017  CallHandlerInfo::cast(function_template_info->call_code()), isolate());
2018  Handle<Object> call_data_object(call_handler_info->data(), isolate());
2019 
2020  // Only setters have a value.
2021  int const argc = value == nullptr ? 0 : 1;
2022  // The stub always expects the receiver as the first param on the stack.
2023  Callable call_api_callback = CodeFactory::CallApiCallback(isolate(), argc);
2024  CallInterfaceDescriptor call_interface_descriptor =
2025  call_api_callback.descriptor();
2026  auto call_descriptor = Linkage::GetStubCallDescriptor(
2027  graph()->zone(), call_interface_descriptor,
2028  call_interface_descriptor.GetStackParameterCount() + argc +
2029  1 /* implicit receiver */,
2030  CallDescriptor::kNeedsFrameState);
2031 
2032  Node* data = jsgraph()->Constant(call_data_object);
2033  ApiFunction function(v8::ToCData<Address>(call_handler_info->callback()));
2034  Node* function_reference =
2035  graph()->NewNode(common()->ExternalConstant(ExternalReference::Create(
2036  &function, ExternalReference::DIRECT_API_CALL)));
2037  Node* code = jsgraph()->HeapConstant(call_api_callback.code());
2038 
2039  // Add CallApiCallbackStub's register argument as well.
2040  Node* context = jsgraph()->Constant(native_context());
2041  Node* inputs[10] = {code, context, data, holder, function_reference,
2042  receiver};
2043  int index = 6 + argc;
2044  inputs[index++] = frame_state;
2045  inputs[index++] = *effect;
2046  inputs[index++] = *control;
2047  // This needs to stay here because of the edge case described in
2048  // http://crbug.com/675648.
2049  if (value != nullptr) {
2050  inputs[6] = value;
2051  }
2052 
2053  return *effect = *control =
2054  graph()->NewNode(common()->Call(call_descriptor), index, inputs);
2055 }
2056 
2057 JSNativeContextSpecialization::ValueEffectControl
2058 JSNativeContextSpecialization::BuildPropertyLoad(
2059  Node* receiver, Node* context, Node* frame_state, Node* effect,
2060  Node* control, Handle<Name> name, ZoneVector<Node*>* if_exceptions,
2061  PropertyAccessInfo const& access_info) {
2062  // Determine actual holder and perform prototype chain checks.
2063  Handle<JSObject> holder;
2064  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
2065  if (access_info.holder().ToHandle(&holder)) {
2066  dependencies()->DependOnStablePrototypeChains(
2067  broker(), access_info.receiver_maps(), JSObjectRef(broker(), holder));
2068  }
2069 
2070  // Generate the actual property access.
2071  Node* value;
2072  if (access_info.IsNotFound()) {
2073  value = jsgraph()->UndefinedConstant();
2074  } else if (access_info.IsDataConstant()) {
2075  DCHECK(!FLAG_track_constant_fields);
2076  value = jsgraph()->Constant(access_info.constant());
2077  } else if (access_info.IsAccessorConstant()) {
2078  value = InlinePropertyGetterCall(receiver, context, frame_state, &effect,
2079  &control, if_exceptions, access_info);
2080  } else if (access_info.IsModuleExport()) {
2081  Node* cell = jsgraph()->Constant(access_info.export_cell());
2082  value = effect =
2083  graph()->NewNode(simplified()->LoadField(AccessBuilder::ForCellValue()),
2084  cell, effect, control);
2085  } else if (access_info.IsStringLength()) {
2086  value = graph()->NewNode(simplified()->StringLength(), receiver);
2087  } else {
2088  DCHECK(access_info.IsDataField() || access_info.IsDataConstantField());
2089  value = access_builder.BuildLoadDataField(name, access_info, receiver,
2090  &effect, &control);
2091  }
2092 
2093  return ValueEffectControl(value, effect, control);
2094 }
2095 
2096 JSNativeContextSpecialization::ValueEffectControl
2097 JSNativeContextSpecialization::BuildPropertyAccess(
2098  Node* receiver, Node* value, Node* context, Node* frame_state, Node* effect,
2099  Node* control, Handle<Name> name, ZoneVector<Node*>* if_exceptions,
2100  PropertyAccessInfo const& access_info, AccessMode access_mode) {
2101  switch (access_mode) {
2102  case AccessMode::kLoad:
2103  return BuildPropertyLoad(receiver, context, frame_state, effect, control,
2104  name, if_exceptions, access_info);
2105  case AccessMode::kStore:
2106  case AccessMode::kStoreInLiteral:
2107  return BuildPropertyStore(receiver, value, context, frame_state, effect,
2108  control, name, if_exceptions, access_info,
2109  access_mode);
2110  }
2111  UNREACHABLE();
2112  return ValueEffectControl();
2113 }
2114 
2115 JSNativeContextSpecialization::ValueEffectControl
2116 JSNativeContextSpecialization::BuildPropertyStore(
2117  Node* receiver, Node* value, Node* context, Node* frame_state, Node* effect,
2118  Node* control, Handle<Name> name, ZoneVector<Node*>* if_exceptions,
2119  PropertyAccessInfo const& access_info, AccessMode access_mode) {
2120  // Determine actual holder and perform prototype chain checks.
2121  Handle<JSObject> holder;
2122  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
2123  if (access_info.holder().ToHandle(&holder)) {
2124  DCHECK_NE(AccessMode::kStoreInLiteral, access_mode);
2125  dependencies()->DependOnStablePrototypeChains(
2126  broker(), access_info.receiver_maps(), JSObjectRef(broker(), holder));
2127  }
2128 
2129  DCHECK(!access_info.IsNotFound());
2130 
2131  // Generate the actual property access.
2132  if (access_info.IsDataConstant()) {
2133  DCHECK(!FLAG_track_constant_fields);
2134  Node* constant_value = jsgraph()->Constant(access_info.constant());
2135  Node* check =
2136  graph()->NewNode(simplified()->ReferenceEqual(), value, constant_value);
2137  effect =
2138  graph()->NewNode(simplified()->CheckIf(DeoptimizeReason::kWrongValue),
2139  check, effect, control);
2140  value = constant_value;
2141  } else if (access_info.IsAccessorConstant()) {
2142  InlinePropertySetterCall(receiver, value, context, frame_state, &effect,
2143  &control, if_exceptions, access_info);
2144  } else {
2145  DCHECK(access_info.IsDataField() || access_info.IsDataConstantField());
2146  FieldIndex const field_index = access_info.field_index();
2147  Type const field_type = access_info.field_type();
2148  MachineRepresentation const field_representation =
2149  access_info.field_representation();
2150  Node* storage = receiver;
2151  if (!field_index.is_inobject()) {
2152  storage = effect = graph()->NewNode(
2153  simplified()->LoadField(AccessBuilder::ForJSObjectPropertiesOrHash()),
2154  storage, effect, control);
2155  }
2156  FieldAccess field_access = {
2157  kTaggedBase,
2158  field_index.offset(),
2159  name,
2160  MaybeHandle<Map>(),
2161  field_type,
2162  MachineType::TypeForRepresentation(field_representation),
2163  kFullWriteBarrier};
2164  bool store_to_constant_field = FLAG_track_constant_fields &&
2165  (access_mode == AccessMode::kStore) &&
2166  access_info.IsDataConstantField();
2167 
2168  DCHECK(access_mode == AccessMode::kStore ||
2169  access_mode == AccessMode::kStoreInLiteral);
2170  switch (field_representation) {
2171  case MachineRepresentation::kFloat64: {
2172  value = effect =
2173  graph()->NewNode(simplified()->CheckNumber(VectorSlotPair()), value,
2174  effect, control);
2175  if (!field_index.is_inobject() || field_index.is_hidden_field() ||
2176  !FLAG_unbox_double_fields) {
2177  if (access_info.HasTransitionMap()) {
2178  // Allocate a MutableHeapNumber for the new property.
2179  AllocationBuilder a(jsgraph(), effect, control);
2180  a.Allocate(HeapNumber::kSize, NOT_TENURED, Type::OtherInternal());
2181  a.Store(AccessBuilder::ForMap(),
2182  factory()->mutable_heap_number_map());
2183  a.Store(AccessBuilder::ForHeapNumberValue(), value);
2184  value = effect = a.Finish();
2185 
2186  field_access.type = Type::Any();
2187  field_access.machine_type = MachineType::TaggedPointer();
2188  field_access.write_barrier_kind = kPointerWriteBarrier;
2189  } else {
2190  // We just store directly to the MutableHeapNumber.
2191  FieldAccess const storage_access = {kTaggedBase,
2192  field_index.offset(),
2193  name,
2194  MaybeHandle<Map>(),
2195  Type::OtherInternal(),
2196  MachineType::TaggedPointer(),
2197  kPointerWriteBarrier};
2198  storage = effect =
2199  graph()->NewNode(simplified()->LoadField(storage_access),
2200  storage, effect, control);
2201  field_access.offset = HeapNumber::kValueOffset;
2202  field_access.name = MaybeHandle<Name>();
2203  field_access.machine_type = MachineType::Float64();
2204  }
2205  }
2206  if (store_to_constant_field) {
2207  DCHECK(!access_info.HasTransitionMap());
2208  // If the field is constant check that the value we are going
2209  // to store matches current value.
2210  Node* current_value = effect = graph()->NewNode(
2211  simplified()->LoadField(field_access), storage, effect, control);
2212 
2213  Node* check = graph()->NewNode(simplified()->NumberEqual(),
2214  current_value, value);
2215  effect = graph()->NewNode(
2216  simplified()->CheckIf(DeoptimizeReason::kWrongValue), check,
2217  effect, control);
2218  return ValueEffectControl(value, effect, control);
2219  }
2220  break;
2221  }
2222  case MachineRepresentation::kTaggedSigned:
2223  case MachineRepresentation::kTaggedPointer:
2224  case MachineRepresentation::kTagged:
2225  if (store_to_constant_field) {
2226  DCHECK(!access_info.HasTransitionMap());
2227  // If the field is constant check that the value we are going
2228  // to store matches current value.
2229  Node* current_value = effect = graph()->NewNode(
2230  simplified()->LoadField(field_access), storage, effect, control);
2231 
2232  Node* check = graph()->NewNode(simplified()->ReferenceEqual(),
2233  current_value, value);
2234  effect = graph()->NewNode(
2235  simplified()->CheckIf(DeoptimizeReason::kWrongValue), check,
2236  effect, control);
2237  return ValueEffectControl(value, effect, control);
2238  }
2239 
2240  if (field_representation == MachineRepresentation::kTaggedSigned) {
2241  value = effect = graph()->NewNode(
2242  simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
2243  field_access.write_barrier_kind = kNoWriteBarrier;
2244 
2245  } else if (field_representation ==
2246  MachineRepresentation::kTaggedPointer) {
2247  // Ensure that {value} is a HeapObject.
2248  value = access_builder.BuildCheckHeapObject(value, &effect, control);
2249  Handle<Map> field_map;
2250  if (access_info.field_map().ToHandle(&field_map)) {
2251  // Emit a map check for the value.
2252  effect = graph()->NewNode(
2253  simplified()->CheckMaps(CheckMapsFlag::kNone,
2254  ZoneHandleSet<Map>(field_map)),
2255  value, effect, control);
2256  }
2257  field_access.write_barrier_kind = kPointerWriteBarrier;
2258 
2259  } else {
2260  DCHECK_EQ(MachineRepresentation::kTagged, field_representation);
2261  }
2262  break;
2263  case MachineRepresentation::kNone:
2264  case MachineRepresentation::kBit:
2265  case MachineRepresentation::kWord8:
2266  case MachineRepresentation::kWord16:
2267  case MachineRepresentation::kWord32:
2268  case MachineRepresentation::kWord64:
2269  case MachineRepresentation::kFloat32:
2270  case MachineRepresentation::kSimd128:
2271  UNREACHABLE();
2272  break;
2273  }
2274  // Check if we need to perform a transitioning store.
2275  Handle<Map> transition_map;
2276  if (access_info.transition_map().ToHandle(&transition_map)) {
2277  // Check if we need to grow the properties backing store
2278  // with this transitioning store.
2279  Handle<Map> original_map(Map::cast(transition_map->GetBackPointer()),
2280  isolate());
2281  if (original_map->UnusedPropertyFields() == 0) {
2282  DCHECK(!field_index.is_inobject());
2283 
2284  // Reallocate the properties {storage}.
2285  storage = effect = BuildExtendPropertiesBackingStore(
2286  original_map, storage, effect, control);
2287 
2288  // Perform the actual store.
2289  effect = graph()->NewNode(simplified()->StoreField(field_access),
2290  storage, value, effect, control);
2291 
2292  // Atomically switch to the new properties below.
2293  field_access = AccessBuilder::ForJSObjectPropertiesOrHash();
2294  value = storage;
2295  storage = receiver;
2296  }
2297  effect = graph()->NewNode(
2298  common()->BeginRegion(RegionObservability::kObservable), effect);
2299  effect = graph()->NewNode(
2300  simplified()->StoreField(AccessBuilder::ForMap()), receiver,
2301  jsgraph()->Constant(transition_map), effect, control);
2302  effect = graph()->NewNode(simplified()->StoreField(field_access), storage,
2303  value, effect, control);
2304  effect = graph()->NewNode(common()->FinishRegion(),
2305  jsgraph()->UndefinedConstant(), effect);
2306  } else {
2307  // Regular non-transitioning field store.
2308  effect = graph()->NewNode(simplified()->StoreField(field_access), storage,
2309  value, effect, control);
2310  }
2311  }
2312 
2313  return ValueEffectControl(value, effect, control);
2314 }
2315 
2316 Reduction JSNativeContextSpecialization::ReduceJSStoreDataPropertyInLiteral(
2317  Node* node) {
2318  DCHECK_EQ(IrOpcode::kJSStoreDataPropertyInLiteral, node->opcode());
2319 
2320  FeedbackParameter const& p = FeedbackParameterOf(node->op());
2321 
2322  if (!p.feedback().IsValid()) return NoChange();
2323 
2324  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
2325  if (nexus.IsUninitialized()) {
2326  return NoChange();
2327  }
2328 
2329  if (nexus.ic_state() == MEGAMORPHIC) {
2330  return NoChange();
2331  }
2332 
2333  DCHECK_EQ(MONOMORPHIC, nexus.ic_state());
2334 
2335  Map map = nexus.FindFirstMap();
2336  if (map.is_null()) {
2337  // Maps are weakly held in the type feedback vector, we may not have one.
2338  return NoChange();
2339  }
2340 
2341  Handle<Map> receiver_map(map, isolate());
2342  if (!Map::TryUpdate(isolate(), receiver_map).ToHandle(&receiver_map))
2343  return NoChange();
2344 
2345  Handle<Name> cached_name =
2346  handle(Name::cast(nexus.GetFeedbackExtra()->GetHeapObjectAssumeStrong()),
2347  isolate());
2348 
2349  PropertyAccessInfo access_info;
2350  AccessInfoFactory access_info_factory(
2351  broker(), dependencies(), native_context().object(), graph()->zone());
2352  if (!access_info_factory.ComputePropertyAccessInfo(
2353  receiver_map, cached_name, AccessMode::kStoreInLiteral,
2354  &access_info)) {
2355  return NoChange();
2356  }
2357 
2358  Node* receiver = NodeProperties::GetValueInput(node, 0);
2359  Node* effect = NodeProperties::GetEffectInput(node);
2360  Node* control = NodeProperties::GetControlInput(node);
2361 
2362  // Monomorphic property access.
2363  PropertyAccessBuilder access_builder(jsgraph(), broker(), dependencies());
2364  receiver = access_builder.BuildCheckHeapObject(receiver, &effect, control);
2365  access_builder.BuildCheckMaps(receiver, &effect, control,
2366  access_info.receiver_maps());
2367 
2368  // Ensure that {name} matches the cached name.
2369  Node* name = NodeProperties::GetValueInput(node, 1);
2370  Node* check = graph()->NewNode(simplified()->ReferenceEqual(), name,
2371  jsgraph()->HeapConstant(cached_name));
2372  effect = graph()->NewNode(simplified()->CheckIf(DeoptimizeReason::kWrongName),
2373  check, effect, control);
2374 
2375  Node* value = NodeProperties::GetValueInput(node, 2);
2376  Node* context = NodeProperties::GetContextInput(node);
2377  Node* frame_state_lazy = NodeProperties::GetFrameStateInput(node);
2378 
2379  // Generate the actual property access.
2380  ValueEffectControl continuation = BuildPropertyAccess(
2381  receiver, value, context, frame_state_lazy, effect, control, cached_name,
2382  nullptr, access_info, AccessMode::kStoreInLiteral);
2383  value = continuation.value();
2384  effect = continuation.effect();
2385  control = continuation.control();
2386 
2387  ReplaceWithValue(node, value, effect, control);
2388  return Replace(value);
2389 }
2390 
2391 Reduction JSNativeContextSpecialization::ReduceJSStoreInArrayLiteral(
2392  Node* node) {
2393  DCHECK_EQ(IrOpcode::kJSStoreInArrayLiteral, node->opcode());
2394  FeedbackParameter const& p = FeedbackParameterOf(node->op());
2395  Node* const receiver = NodeProperties::GetValueInput(node, 0);
2396  Node* const index = NodeProperties::GetValueInput(node, 1);
2397  Node* const value = NodeProperties::GetValueInput(node, 2);
2398  Node* const effect = NodeProperties::GetEffectInput(node);
2399 
2400  // Extract receiver maps from the keyed store IC using the FeedbackNexus.
2401  if (!p.feedback().IsValid()) return NoChange();
2402  FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
2403 
2404  // Extract the keyed access store mode from the keyed store IC.
2405  KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
2406 
2407  // Extract receiver maps from the {nexus}.
2408  MapHandles receiver_maps;
2409  if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
2410  return NoChange();
2411  } else if (receiver_maps.empty()) {
2412  if (flags() & kBailoutOnUninitialized) {
2413  return ReduceSoftDeoptimize(
2414  node,
2415  DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
2416  }
2417  return NoChange();
2418  }
2419  DCHECK(!nexus.IsUninitialized());
2420  DCHECK_EQ(ELEMENT, nexus.GetKeyType());
2421 
2422  if (nexus.ic_state() == MEGAMORPHIC) return NoChange();
2423 
2424  // Try to lower the element access based on the {receiver_maps}.
2425  return ReduceElementAccess(node, index, value, receiver_maps,
2426  AccessMode::kStoreInLiteral, STANDARD_LOAD,
2427  store_mode);
2428 }
2429 
2430 Reduction JSNativeContextSpecialization::ReduceJSToObject(Node* node) {
2431  DCHECK_EQ(IrOpcode::kJSToObject, node->opcode());
2432  Node* receiver = NodeProperties::GetValueInput(node, 0);
2433  Node* effect = NodeProperties::GetEffectInput(node);
2434 
2435  ZoneHandleSet<Map> receiver_maps;
2436  NodeProperties::InferReceiverMapsResult result =
2437  NodeProperties::InferReceiverMaps(broker(), receiver, effect,
2438  &receiver_maps);
2439  if (result == NodeProperties::kNoReceiverMaps) return NoChange();
2440 
2441  for (size_t i = 0; i < receiver_maps.size(); ++i) {
2442  if (!receiver_maps[i]->IsJSReceiverMap()) return NoChange();
2443  }
2444 
2445  ReplaceWithValue(node, receiver, effect);
2446  return Replace(receiver);
2447 }
2448 
2449 namespace {
2450 
2451 ExternalArrayType GetArrayTypeFromElementsKind(ElementsKind kind) {
2452  switch (kind) {
2453 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2454  case TYPE##_ELEMENTS: \
2455  return kExternal##Type##Array;
2456  TYPED_ARRAYS(TYPED_ARRAY_CASE)
2457 #undef TYPED_ARRAY_CASE
2458  default:
2459  break;
2460  }
2461  UNREACHABLE();
2462 }
2463 
2464 base::Optional<JSTypedArrayRef> GetTypedArrayConstant(JSHeapBroker* broker,
2465  Node* receiver) {
2466  HeapObjectMatcher m(receiver);
2467  if (!m.HasValue()) return base::nullopt;
2468  ObjectRef object = m.Ref(broker);
2469  if (!object.IsJSTypedArray()) return base::nullopt;
2470  JSTypedArrayRef typed_array = object.AsJSTypedArray();
2471  if (typed_array.is_on_heap()) return base::nullopt;
2472  return typed_array;
2473 }
2474 
2475 } // namespace
2476 
2477 JSNativeContextSpecialization::ValueEffectControl
2478 JSNativeContextSpecialization::BuildElementAccess(
2479  Node* receiver, Node* index, Node* value, Node* effect, Node* control,
2480  ElementAccessInfo const& access_info, AccessMode access_mode,
2481  KeyedAccessLoadMode load_mode, KeyedAccessStoreMode store_mode) {
2482 
2483  // TODO(bmeurer): We currently specialize based on elements kind. We should
2484  // also be able to properly support strings and other JSObjects here.
2485  ElementsKind elements_kind = access_info.elements_kind();
2486  MapHandles const& receiver_maps = access_info.receiver_maps();
2487 
2488  if (IsFixedTypedArrayElementsKind(elements_kind)) {
2489  Node* buffer;
2490  Node* length;
2491  Node* base_pointer;
2492  Node* external_pointer;
2493 
2494  // Check if we can constant-fold information about the {receiver} (i.e.
2495  // for asm.js-like code patterns).
2496  base::Optional<JSTypedArrayRef> typed_array =
2497  GetTypedArrayConstant(broker(), receiver);
2498  if (typed_array.has_value()) {
2499  typed_array->Serialize();
2500  buffer = jsgraph()->Constant(typed_array->buffer());
2501  length =
2502  jsgraph()->Constant(static_cast<double>(typed_array->length_value()));
2503 
2504  // Load the (known) base and external pointer for the {receiver}. The
2505  // {external_pointer} might be invalid if the {buffer} was neutered, so
2506  // we need to make sure that any access is properly guarded.
2507  base_pointer = jsgraph()->ZeroConstant();
2508  external_pointer =
2509  jsgraph()->PointerConstant(typed_array->elements_external_pointer());
2510  } else {
2511  // Load the {receiver}s length.
2512  length = effect = graph()->NewNode(
2513  simplified()->LoadField(AccessBuilder::ForJSTypedArrayLength()),
2514  receiver, effect, control);
2515 
2516  // Load the buffer for the {receiver}.
2517  buffer = effect = graph()->NewNode(
2518  simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
2519  receiver, effect, control);
2520 
2521  // Load the elements for the {receiver}.
2522  Node* elements = effect = graph()->NewNode(
2523  simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
2524  receiver, effect, control);
2525 
2526  // Load the base pointer for the {receiver}. This will always be Smi
2527  // zero unless we allow on-heap TypedArrays, which is only the case
2528  // for Chrome. Node and Electron both set this limit to 0. Setting
2529  // the base to Smi zero here allows the EffectControlLinearizer to
2530  // optimize away the tricky part of the access later.
2531  if (V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP == 0) {
2532  base_pointer = jsgraph()->ZeroConstant();
2533  } else {
2534  base_pointer = effect = graph()->NewNode(
2535  simplified()->LoadField(
2536  AccessBuilder::ForFixedTypedArrayBaseBasePointer()),
2537  elements, effect, control);
2538  }
2539 
2540  // Load the external pointer for the {receiver}s {elements}.
2541  external_pointer = effect = graph()->NewNode(
2542  simplified()->LoadField(
2543  AccessBuilder::ForFixedTypedArrayBaseExternalPointer()),
2544  elements, effect, control);
2545  }
2546 
2547  // See if we can skip the neutering check.
2548  if (isolate()->IsArrayBufferNeuteringIntact()) {
2549  // Add a code dependency so we are deoptimized in case an ArrayBuffer
2550  // gets neutered.
2551  dependencies()->DependOnProtector(PropertyCellRef(
2552  broker(), factory()->array_buffer_neutering_protector()));
2553  } else {
2554  // Deopt if the {buffer} was neutered.
2555  // Note: A neutered buffer leads to megamorphic feedback.
2556  Node* buffer_bit_field = effect = graph()->NewNode(
2557  simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()),
2558  buffer, effect, control);
2559  Node* check = graph()->NewNode(
2560  simplified()->NumberEqual(),
2561  graph()->NewNode(
2562  simplified()->NumberBitwiseAnd(), buffer_bit_field,
2563  jsgraph()->Constant(JSArrayBuffer::WasNeuteredBit::kMask)),
2564  jsgraph()->ZeroConstant());
2565  effect = graph()->NewNode(
2566  simplified()->CheckIf(DeoptimizeReason::kArrayBufferWasNeutered),
2567  check, effect, control);
2568  }
2569 
2570  if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS ||
2571  store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2572  // Only check that the {index} is in SignedSmall range. We do the actual
2573  // bounds check below and just skip the property access if it's out of
2574  // bounds for the {receiver}.
2575  index = effect = graph()->NewNode(
2576  simplified()->CheckSmi(VectorSlotPair()), index, effect, control);
2577 
2578  // Cast the {index} to Unsigned32 range, so that the bounds checks
2579  // below are performed on unsigned values, which means that all the
2580  // Negative32 values are treated as out-of-bounds.
2581  index = graph()->NewNode(simplified()->NumberToUint32(), index);
2582  } else {
2583  // Check that the {index} is in the valid range for the {receiver}.
2584  index = effect =
2585  graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2586  length, effect, control);
2587  }
2588 
2589  // Access the actual element.
2590  ExternalArrayType external_array_type =
2591  GetArrayTypeFromElementsKind(elements_kind);
2592  switch (access_mode) {
2593  case AccessMode::kLoad: {
2594  // Check if we can return undefined for out-of-bounds loads.
2595  if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS) {
2596  Node* check =
2597  graph()->NewNode(simplified()->NumberLessThan(), index, length);
2598  Node* branch = graph()->NewNode(
2599  common()->Branch(BranchHint::kTrue,
2600  IsSafetyCheck::kCriticalSafetyCheck),
2601  check, control);
2602 
2603  Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2604  Node* etrue = effect;
2605  Node* vtrue;
2606  {
2607  // Perform the actual load
2608  vtrue = etrue = graph()->NewNode(
2609  simplified()->LoadTypedElement(external_array_type), buffer,
2610  base_pointer, external_pointer, index, etrue, if_true);
2611  }
2612 
2613  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2614  Node* efalse = effect;
2615  Node* vfalse;
2616  {
2617  // Materialize undefined for out-of-bounds loads.
2618  vfalse = jsgraph()->UndefinedConstant();
2619  }
2620 
2621  control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2622  effect =
2623  graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2624  value =
2625  graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
2626  vtrue, vfalse, control);
2627  } else {
2628  // Perform the actual load.
2629  value = effect = graph()->NewNode(
2630  simplified()->LoadTypedElement(external_array_type), buffer,
2631  base_pointer, external_pointer, index, effect, control);
2632  }
2633  break;
2634  }
2635  case AccessMode::kStoreInLiteral:
2636  UNREACHABLE();
2637  break;
2638  case AccessMode::kStore: {
2639  // Ensure that the {value} is actually a Number or an Oddball,
2640  // and truncate it to a Number appropriately.
2641  value = effect = graph()->NewNode(
2642  simplified()->SpeculativeToNumber(
2643  NumberOperationHint::kNumberOrOddball, VectorSlotPair()),
2644  value, effect, control);
2645 
2646  // Introduce the appropriate truncation for {value}. Currently we
2647  // only need to do this for ClamedUint8Array {receiver}s, as the
2648  // other truncations are implicit in the StoreTypedElement, but we
2649  // might want to change that at some point.
2650  if (external_array_type == kExternalUint8ClampedArray) {
2651  value = graph()->NewNode(simplified()->NumberToUint8Clamped(), value);
2652  }
2653 
2654  // Check if we can skip the out-of-bounds store.
2655  if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2656  Node* check =
2657  graph()->NewNode(simplified()->NumberLessThan(), index, length);
2658  Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
2659  check, control);
2660 
2661  Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2662  Node* etrue = effect;
2663  {
2664  // Perform the actual store.
2665  etrue = graph()->NewNode(
2666  simplified()->StoreTypedElement(external_array_type), buffer,
2667  base_pointer, external_pointer, index, value, etrue, if_true);
2668  }
2669 
2670  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2671  Node* efalse = effect;
2672  {
2673  // Just ignore the out-of-bounds write.
2674  }
2675 
2676  control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2677  effect =
2678  graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2679  } else {
2680  // Perform the actual store
2681  effect = graph()->NewNode(
2682  simplified()->StoreTypedElement(external_array_type), buffer,
2683  base_pointer, external_pointer, index, value, effect, control);
2684  }
2685  break;
2686  }
2687  }
2688  } else {
2689  // Load the elements for the {receiver}.
2690  Node* elements = effect = graph()->NewNode(
2691  simplified()->LoadField(AccessBuilder::ForJSObjectElements()), receiver,
2692  effect, control);
2693 
2694  // Don't try to store to a copy-on-write backing store (unless supported by
2695  // the store mode).
2696  if (access_mode == AccessMode::kStore &&
2697  IsSmiOrObjectElementsKind(elements_kind) &&
2698  !IsCOWHandlingStoreMode(store_mode)) {
2699  effect = graph()->NewNode(
2700  simplified()->CheckMaps(
2701  CheckMapsFlag::kNone,
2702  ZoneHandleSet<Map>(factory()->fixed_array_map())),
2703  elements, effect, control);
2704  }
2705 
2706  // Check if the {receiver} is a JSArray.
2707  bool receiver_is_jsarray = HasOnlyJSArrayMaps(broker(), receiver_maps);
2708 
2709  // Load the length of the {receiver}.
2710  Node* length = effect =
2711  receiver_is_jsarray
2712  ? graph()->NewNode(
2713  simplified()->LoadField(
2714  AccessBuilder::ForJSArrayLength(elements_kind)),
2715  receiver, effect, control)
2716  : graph()->NewNode(
2717  simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
2718  elements, effect, control);
2719 
2720  // Check if we might need to grow the {elements} backing store.
2721  if (IsGrowStoreMode(store_mode)) {
2722  // For growing stores we validate the {index} below.
2723  DCHECK(access_mode == AccessMode::kStore ||
2724  access_mode == AccessMode::kStoreInLiteral);
2725  } else if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS &&
2726  CanTreatHoleAsUndefined(receiver_maps)) {
2727  // Check that the {index} is a valid array index, we do the actual
2728  // bounds check below and just skip the store below if it's out of
2729  // bounds for the {receiver}.
2730  index = effect = graph()->NewNode(
2731  simplified()->CheckBounds(VectorSlotPair()), index,
2732  jsgraph()->Constant(Smi::kMaxValue), effect, control);
2733  } else {
2734  // Check that the {index} is in the valid range for the {receiver}.
2735  index = effect =
2736  graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2737  length, effect, control);
2738  }
2739 
2740  // Compute the element access.
2741  Type element_type = Type::NonInternal();
2742  MachineType element_machine_type = MachineType::AnyTagged();
2743  if (IsDoubleElementsKind(elements_kind)) {
2744  element_type = Type::Number();
2745  element_machine_type = MachineType::Float64();
2746  } else if (IsSmiElementsKind(elements_kind)) {
2747  element_type = Type::SignedSmall();
2748  element_machine_type = MachineType::TaggedSigned();
2749  }
2750  ElementAccess element_access = {
2751  kTaggedBase, FixedArray::kHeaderSize,
2752  element_type, element_machine_type,
2753  kFullWriteBarrier, LoadSensitivity::kCritical};
2754 
2755  // Access the actual element.
2756  if (access_mode == AccessMode::kLoad) {
2757  // Compute the real element access type, which includes the hole in case
2758  // of holey backing stores.
2759  if (IsHoleyElementsKind(elements_kind)) {
2760  element_access.type =
2761  Type::Union(element_type, Type::Hole(), graph()->zone());
2762  }
2763  if (elements_kind == HOLEY_ELEMENTS ||
2764  elements_kind == HOLEY_SMI_ELEMENTS) {
2765  element_access.machine_type = MachineType::AnyTagged();
2766  }
2767 
2768  // Check if we can return undefined for out-of-bounds loads.
2769  if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS &&
2770  CanTreatHoleAsUndefined(receiver_maps)) {
2771  Node* check =
2772  graph()->NewNode(simplified()->NumberLessThan(), index, length);
2773  Node* branch = graph()->NewNode(
2774  common()->Branch(BranchHint::kTrue,
2775  IsSafetyCheck::kCriticalSafetyCheck),
2776  check, control);
2777 
2778  Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2779  Node* etrue = effect;
2780  Node* vtrue;
2781  {
2782  // Perform the actual load
2783  vtrue = etrue =
2784  graph()->NewNode(simplified()->LoadElement(element_access),
2785  elements, index, etrue, if_true);
2786 
2787  // Handle loading from holey backing stores correctly, by either
2788  // mapping the hole to undefined if possible, or deoptimizing
2789  // otherwise.
2790  if (elements_kind == HOLEY_ELEMENTS ||
2791  elements_kind == HOLEY_SMI_ELEMENTS) {
2792  // Turn the hole into undefined.
2793  vtrue = graph()->NewNode(
2794  simplified()->ConvertTaggedHoleToUndefined(), vtrue);
2795  } else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
2796  // Return the signaling NaN hole directly if all uses are
2797  // truncating.
2798  vtrue = etrue = graph()->NewNode(
2799  simplified()->CheckFloat64Hole(
2800  CheckFloat64HoleMode::kAllowReturnHole, VectorSlotPair()),
2801  vtrue, etrue, if_true);
2802  }
2803  }
2804 
2805  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2806  Node* efalse = effect;
2807  Node* vfalse;
2808  {
2809  // Materialize undefined for out-of-bounds loads.
2810  vfalse = jsgraph()->UndefinedConstant();
2811  }
2812 
2813  control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2814  effect =
2815  graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2816  value =
2817  graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
2818  vtrue, vfalse, control);
2819  } else {
2820  // Perform the actual load.
2821  value = effect =
2822  graph()->NewNode(simplified()->LoadElement(element_access),
2823  elements, index, effect, control);
2824 
2825  // Handle loading from holey backing stores correctly, by either mapping
2826  // the hole to undefined if possible, or deoptimizing otherwise.
2827  if (elements_kind == HOLEY_ELEMENTS ||
2828  elements_kind == HOLEY_SMI_ELEMENTS) {
2829  // Check if we are allowed to turn the hole into undefined.
2830  if (CanTreatHoleAsUndefined(receiver_maps)) {
2831  // Turn the hole into undefined.
2832  value = graph()->NewNode(
2833  simplified()->ConvertTaggedHoleToUndefined(), value);
2834  } else {
2835  // Bailout if we see the hole.
2836  value = effect = graph()->NewNode(
2837  simplified()->CheckNotTaggedHole(), value, effect, control);
2838  }
2839  } else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
2840  // Perform the hole check on the result.
2841  CheckFloat64HoleMode mode = CheckFloat64HoleMode::kNeverReturnHole;
2842  // Check if we are allowed to return the hole directly.
2843  if (CanTreatHoleAsUndefined(receiver_maps)) {
2844  // Return the signaling NaN hole directly if all uses are
2845  // truncating.
2846  mode = CheckFloat64HoleMode::kAllowReturnHole;
2847  }
2848  value = effect = graph()->NewNode(
2849  simplified()->CheckFloat64Hole(mode, VectorSlotPair()), value,
2850  effect, control);
2851  }
2852  }
2853  } else {
2854  DCHECK(access_mode == AccessMode::kStore ||
2855  access_mode == AccessMode::kStoreInLiteral);
2856  if (IsSmiElementsKind(elements_kind)) {
2857  value = effect = graph()->NewNode(
2858  simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
2859  } else if (IsDoubleElementsKind(elements_kind)) {
2860  value = effect =
2861  graph()->NewNode(simplified()->CheckNumber(VectorSlotPair()), value,
2862  effect, control);
2863  // Make sure we do not store signalling NaNs into double arrays.
2864  value = graph()->NewNode(simplified()->NumberSilenceNaN(), value);
2865  }
2866 
2867  // Ensure that copy-on-write backing store is writable.
2868  if (IsSmiOrObjectElementsKind(elements_kind) &&
2869  store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2870  elements = effect =
2871  graph()->NewNode(simplified()->EnsureWritableFastElements(),
2872  receiver, elements, effect, control);
2873  } else if (IsGrowStoreMode(store_mode)) {
2874  // Determine the length of the {elements} backing store.
2875  Node* elements_length = effect = graph()->NewNode(
2876  simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
2877  elements, effect, control);
2878 
2879  // Validate the {index} depending on holeyness:
2880  //
2881  // For HOLEY_*_ELEMENTS the {index} must not exceed the {elements}
2882  // backing store capacity plus the maximum allowed gap, as otherwise
2883  // the (potential) backing store growth would normalize and thus
2884  // the elements kind of the {receiver} would change to slow mode.
2885  //
2886  // For PACKED_*_ELEMENTS the {index} must be within the range
2887  // [0,length+1[ to be valid. In case {index} equals {length},
2888  // the {receiver} will be extended, but kept packed.
2889  Node* limit =
2890  IsHoleyElementsKind(elements_kind)
2891  ? graph()->NewNode(simplified()->NumberAdd(), elements_length,
2892  jsgraph()->Constant(JSObject::kMaxGap))
2893  : graph()->NewNode(simplified()->NumberAdd(), length,
2894  jsgraph()->OneConstant());
2895  index = effect =
2896  graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2897  limit, effect, control);
2898 
2899  // Grow {elements} backing store if necessary.
2900  GrowFastElementsMode mode =
2901  IsDoubleElementsKind(elements_kind)
2902  ? GrowFastElementsMode::kDoubleElements
2903  : GrowFastElementsMode::kSmiOrObjectElements;
2904  elements = effect = graph()->NewNode(
2905  simplified()->MaybeGrowFastElements(mode, VectorSlotPair()),
2906  receiver, elements, index, elements_length, effect, control);
2907 
2908  // If we didn't grow {elements}, it might still be COW, in which case we
2909  // copy it now.
2910  if (IsSmiOrObjectElementsKind(elements_kind) &&
2911  store_mode == STORE_AND_GROW_NO_TRANSITION_HANDLE_COW) {
2912  elements = effect =
2913  graph()->NewNode(simplified()->EnsureWritableFastElements(),
2914  receiver, elements, effect, control);
2915  }
2916 
2917  // Also update the "length" property if {receiver} is a JSArray.
2918  if (receiver_is_jsarray) {
2919  Node* check =
2920  graph()->NewNode(simplified()->NumberLessThan(), index, length);
2921  Node* branch = graph()->NewNode(common()->Branch(), check, control);
2922 
2923  Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2924  Node* etrue = effect;
2925  {
2926  // We don't need to do anything, the {index} is within
2927  // the valid bounds for the JSArray {receiver}.
2928  }
2929 
2930  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2931  Node* efalse = effect;
2932  {
2933  // Update the JSArray::length field. Since this is observable,
2934  // there must be no other check after this.
2935  Node* new_length = graph()->NewNode(
2936  simplified()->NumberAdd(), index, jsgraph()->OneConstant());
2937  efalse = graph()->NewNode(
2938  simplified()->StoreField(
2939  AccessBuilder::ForJSArrayLength(elements_kind)),
2940  receiver, new_length, efalse, if_false);
2941  }
2942 
2943  control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2944  effect =
2945  graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2946  }
2947  }
2948 
2949  // Perform the actual element access.
2950  effect = graph()->NewNode(simplified()->StoreElement(element_access),
2951  elements, index, value, effect, control);
2952  }
2953  }
2954 
2955  return ValueEffectControl(value, effect, control);
2956 }
2957 
2958 Node* JSNativeContextSpecialization::BuildIndexedStringLoad(
2959  Node* receiver, Node* index, Node* length, Node** effect, Node** control,
2960  KeyedAccessLoadMode load_mode) {
2961  if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS &&
2962  isolate()->IsNoElementsProtectorIntact()) {
2963  dependencies()->DependOnProtector(
2964  PropertyCellRef(broker(), factory()->no_elements_protector()));
2965 
2966  // Ensure that the {index} is a valid String length.
2967  index = *effect = graph()->NewNode(
2968  simplified()->CheckBounds(VectorSlotPair()), index,
2969  jsgraph()->Constant(String::kMaxLength), *effect, *control);
2970 
2971  // Load the single character string from {receiver} or yield
2972  // undefined if the {index} is not within the valid bounds.
2973  Node* check =
2974  graph()->NewNode(simplified()->NumberLessThan(), index, length);
2975  Node* branch =
2976  graph()->NewNode(common()->Branch(BranchHint::kTrue,
2977  IsSafetyCheck::kCriticalSafetyCheck),
2978  check, *control);
2979 
2980  Node* masked_index = graph()->NewNode(simplified()->PoisonIndex(), index);
2981 
2982  Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2983  Node* etrue;
2984  Node* vtrue = etrue =
2985  graph()->NewNode(simplified()->StringCharCodeAt(), receiver,
2986  masked_index, *effect, if_true);
2987  vtrue = graph()->NewNode(simplified()->StringFromSingleCharCode(), vtrue);
2988 
2989  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2990  Node* vfalse = jsgraph()->UndefinedConstant();
2991 
2992  *control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2993  *effect =
2994  graph()->NewNode(common()->EffectPhi(2), etrue, *effect, *control);
2995  return graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
2996  vtrue, vfalse, *control);
2997  } else {
2998  // Ensure that {index} is less than {receiver} length.
2999  index = *effect =
3000  graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
3001  length, *effect, *control);
3002 
3003  Node* masked_index = graph()->NewNode(simplified()->PoisonIndex(), index);
3004 
3005  // Return the character from the {receiver} as single character string.
3006  Node* value = *effect =
3007  graph()->NewNode(simplified()->StringCharCodeAt(), receiver,
3008  masked_index, *effect, *control);
3009  value = graph()->NewNode(simplified()->StringFromSingleCharCode(), value);
3010  return value;
3011  }
3012 }
3013 
3014 Node* JSNativeContextSpecialization::BuildExtendPropertiesBackingStore(
3015  Handle<Map> map, Node* properties, Node* effect, Node* control) {
3016  // TODO(bmeurer/jkummerow): Property deletions can undo map transitions
3017  // while keeping the backing store around, meaning that even though the
3018  // map might believe that objects have no unused property fields, there
3019  // might actually be some. It would be nice to not create a new backing
3020  // store in that case (i.e. when properties->length() >= new_length).
3021  // However, introducing branches and Phi nodes here would make it more
3022  // difficult for escape analysis to get rid of the backing stores used
3023  // for intermediate states of chains of property additions. That makes
3024  // it unclear what the best approach is here.
3025  DCHECK_EQ(0, map->UnusedPropertyFields());
3026  // Compute the length of the old {properties} and the new properties.
3027  int length = map->NextFreePropertyIndex() - map->GetInObjectProperties();
3028  int new_length = length + JSObject::kFieldsAdded;
3029  // Collect the field values from the {properties}.
3030  ZoneVector<Node*> values(zone());
3031  values.reserve(new_length);
3032  for (int i = 0; i < length; ++i) {
3033  Node* value = effect = graph()->NewNode(
3034  simplified()->LoadField(AccessBuilder::ForFixedArraySlot(i)),
3035  properties, effect, control);
3036  values.push_back(value);
3037  }
3038  // Initialize the new fields to undefined.
3039  for (int i = 0; i < JSObject::kFieldsAdded; ++i) {
3040  values.push_back(jsgraph()->UndefinedConstant());
3041  }
3042 
3043  // Compute new length and hash.
3044  Node* hash;
3045  if (length == 0) {
3046  hash = graph()->NewNode(
3047  common()->Select(MachineRepresentation::kTaggedSigned),
3048  graph()->NewNode(simplified()->ObjectIsSmi(), properties), properties,
3049  jsgraph()->SmiConstant(PropertyArray::kNoHashSentinel));
3050  hash = effect = graph()->NewNode(common()->TypeGuard(Type::SignedSmall()),
3051  hash, effect, control);
3052  hash =
3053  graph()->NewNode(simplified()->NumberShiftLeft(), hash,
3054  jsgraph()->Constant(PropertyArray::HashField::kShift));
3055  } else {
3056  hash = effect = graph()->NewNode(
3057  simplified()->LoadField(AccessBuilder::ForPropertyArrayLengthAndHash()),
3058  properties, effect, control);
3059  hash =
3060  graph()->NewNode(simplified()->NumberBitwiseAnd(), hash,
3061  jsgraph()->Constant(PropertyArray::HashField::kMask));
3062  }
3063  Node* new_length_and_hash = graph()->NewNode(
3064  simplified()->NumberBitwiseOr(), jsgraph()->Constant(new_length), hash);
3065  // TDOO(jarin): Fix the typer to infer tighter bound for NumberBitwiseOr.
3066  new_length_and_hash = effect =
3067  graph()->NewNode(common()->TypeGuard(Type::SignedSmall()),
3068  new_length_and_hash, effect, control);
3069 
3070  // Allocate and initialize the new properties.
3071  AllocationBuilder a(jsgraph(), effect, control);
3072  a.Allocate(PropertyArray::SizeFor(new_length), NOT_TENURED,
3073  Type::OtherInternal());
3074  a.Store(AccessBuilder::ForMap(), jsgraph()->PropertyArrayMapConstant());
3075  a.Store(AccessBuilder::ForPropertyArrayLengthAndHash(), new_length_and_hash);
3076  for (int i = 0; i < new_length; ++i) {
3077  a.Store(AccessBuilder::ForFixedArraySlot(i), values[i]);
3078  }
3079  return a.Finish();
3080 }
3081 
3082 Node* JSNativeContextSpecialization::BuildCheckEqualsName(Handle<Name> name,
3083  Node* value,
3084  Node* effect,
3085  Node* control) {
3086  DCHECK(name->IsUniqueName());
3087  Operator const* const op =
3088  name->IsSymbol() ? simplified()->CheckEqualsSymbol()
3089  : simplified()->CheckEqualsInternalizedString();
3090  return graph()->NewNode(op, jsgraph()->HeapConstant(name), value, effect,
3091  control);
3092 }
3093 
3094 bool JSNativeContextSpecialization::CanTreatHoleAsUndefined(
3095  MapHandles const& receiver_maps) {
3096  // Check if all {receiver_maps} either have one of the initial Array.prototype
3097  // or Object.prototype objects as their prototype (in any of the current
3098  // native contexts, as the global Array protector works isolate-wide).
3099  for (Handle<Map> map : receiver_maps) {
3100  MapRef receiver_map(broker(), map);
3101  // TODO(neis): Remove SerializePrototype call once brokerization is
3102  // complete.
3103  receiver_map.SerializePrototype();
3104  ObjectRef receiver_prototype = receiver_map.prototype();
3105  if (!receiver_prototype.IsJSObject() ||
3106  !broker()->IsArrayOrObjectPrototype(receiver_prototype.AsJSObject())) {
3107  return false;
3108  }
3109  }
3110 
3111  // Check if the array prototype chain is intact.
3112  if (!isolate()->IsNoElementsProtectorIntact()) return false;
3113 
3114  dependencies()->DependOnProtector(
3115  PropertyCellRef(broker(), factory()->no_elements_protector()));
3116  return true;
3117 }
3118 
3119 bool JSNativeContextSpecialization::ExtractReceiverMaps(
3120  Node* receiver, Node* effect, FeedbackNexus const& nexus,
3121  MapHandles* receiver_maps) {
3122  DCHECK_EQ(0, receiver_maps->size());
3123  if (nexus.IsUninitialized()) return true;
3124 
3125  // See if we can infer a concrete type for the {receiver}. Solely relying on
3126  // the inference is not safe for keyed stores, because we would potentially
3127  // miss out on transitions that need to be performed.
3128  {
3129  FeedbackSlotKind kind = nexus.kind();
3130  bool use_inference =
3131  !IsKeyedStoreICKind(kind) && !IsStoreInArrayLiteralICKind(kind);
3132  if (use_inference && InferReceiverMaps(receiver, effect, receiver_maps)) {
3133  // We can assume that {receiver} still has the inferred {receiver_maps}.
3134  return true;
3135  }
3136  }
3137 
3138  // Try to extract some maps from the {nexus}.
3139  if (nexus.ExtractMaps(receiver_maps) != 0) {
3140  // Try to filter impossible candidates based on inferred root map.
3141  Handle<Map> receiver_map;
3142  if (InferReceiverRootMap(receiver).ToHandle(&receiver_map)) {
3143  DCHECK(!receiver_map->is_abandoned_prototype_map());
3144  Isolate* isolate = this->isolate();
3145  receiver_maps->erase(
3146  std::remove_if(receiver_maps->begin(), receiver_maps->end(),
3147  [receiver_map, isolate](const Handle<Map>& map) {
3148  return map->is_abandoned_prototype_map() ||
3149  map->FindRootMap(isolate) != *receiver_map;
3150  }),
3151  receiver_maps->end());
3152  }
3153  return true;
3154  }
3155 
3156  return false;
3157 }
3158 
3159 bool JSNativeContextSpecialization::InferReceiverMaps(
3160  Node* receiver, Node* effect, MapHandles* receiver_maps) {
3161  ZoneHandleSet<Map> maps;
3162  NodeProperties::InferReceiverMapsResult result =
3163  NodeProperties::InferReceiverMaps(broker(), receiver, effect, &maps);
3164  if (result == NodeProperties::kReliableReceiverMaps) {
3165  for (size_t i = 0; i < maps.size(); ++i) {
3166  receiver_maps->push_back(maps[i]);
3167  }
3168  return true;
3169  } else if (result == NodeProperties::kUnreliableReceiverMaps) {
3170  // For untrusted receiver maps, we can still use the information
3171  // if the maps are stable.
3172  for (size_t i = 0; i < maps.size(); ++i) {
3173  MapRef map(broker(), maps[i]);
3174  if (!map.is_stable()) return false;
3175  }
3176  for (size_t i = 0; i < maps.size(); ++i) {
3177  receiver_maps->push_back(maps[i]);
3178  }
3179  return true;
3180  }
3181  return false;
3182 }
3183 
3184 MaybeHandle<Map> JSNativeContextSpecialization::InferReceiverRootMap(
3185  Node* receiver) {
3186  HeapObjectMatcher m(receiver);
3187  if (m.HasValue()) {
3188  return handle(m.Value()->map()->FindRootMap(isolate()), isolate());
3189  } else if (m.IsJSCreate()) {
3190  HeapObjectMatcher mtarget(m.InputAt(0));
3191  HeapObjectMatcher mnewtarget(m.InputAt(1));
3192  if (mtarget.HasValue() && mnewtarget.HasValue()) {
3193  Handle<JSFunction> constructor =
3194  Handle<JSFunction>::cast(mtarget.Value());
3195  if (constructor->has_initial_map()) {
3196  Handle<Map> initial_map(constructor->initial_map(), isolate());
3197  if (initial_map->constructor_or_backpointer() == *mnewtarget.Value()) {
3198  DCHECK_EQ(*initial_map, initial_map->FindRootMap(isolate()));
3199  return initial_map;
3200  }
3201  }
3202  }
3203  }
3204  return MaybeHandle<Map>();
3205 }
3206 
3207 Graph* JSNativeContextSpecialization::graph() const {
3208  return jsgraph()->graph();
3209 }
3210 
3211 Isolate* JSNativeContextSpecialization::isolate() const {
3212  return jsgraph()->isolate();
3213 }
3214 
3215 Factory* JSNativeContextSpecialization::factory() const {
3216  return isolate()->factory();
3217 }
3218 
3219 CommonOperatorBuilder* JSNativeContextSpecialization::common() const {
3220  return jsgraph()->common();
3221 }
3222 
3223 JSOperatorBuilder* JSNativeContextSpecialization::javascript() const {
3224  return jsgraph()->javascript();
3225 }
3226 
3227 SimplifiedOperatorBuilder* JSNativeContextSpecialization::simplified() const {
3228  return jsgraph()->simplified();
3229 }
3230 
3231 #undef V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP
3232 
3233 } // namespace compiler
3234 } // namespace internal
3235 } // namespace v8
Definition: libplatform.h:13