V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
code-stub-assembler.cc
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/code-stub-assembler.h"
6 
7 #include "src/code-factory.h"
8 #include "src/counters.h"
9 #include "src/frames-inl.h"
10 #include "src/frames.h"
11 #include "src/objects/api-callbacks.h"
12 #include "src/objects/descriptor-array.h"
13 #include "src/objects/ordered-hash-table-inl.h"
14 #include "src/wasm/wasm-objects.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 using compiler::Node;
20 template <class T>
21 using TNode = compiler::TNode<T>;
22 template <class T>
23 using SloppyTNode = compiler::SloppyTNode<T>;
24 
25 CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
26  : compiler::CodeAssembler(state), BaseBuiltinsFromDSLAssembler(state) {
27  if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
28  HandleBreakOnNode();
29  }
30 }
31 
32 void CodeStubAssembler::HandleBreakOnNode() {
33  // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
34  // string specifying the name of a stub and NODE is number specifying node id.
35  const char* name = state()->name();
36  size_t name_length = strlen(name);
37  if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
38  // Different name.
39  return;
40  }
41  size_t option_length = strlen(FLAG_csa_trap_on_node);
42  if (option_length < name_length + 2 ||
43  FLAG_csa_trap_on_node[name_length] != ',') {
44  // Option is too short.
45  return;
46  }
47  const char* start = &FLAG_csa_trap_on_node[name_length + 1];
48  char* end;
49  int node_id = static_cast<int>(strtol(start, &end, 10));
50  if (start == end) {
51  // Bad node id.
52  return;
53  }
54  BreakOnNode(node_id);
55 }
56 
57 void CodeStubAssembler::Assert(const BranchGenerator& branch,
58  const char* message, const char* file, int line,
59  Node* extra_node1, const char* extra_node1_name,
60  Node* extra_node2, const char* extra_node2_name,
61  Node* extra_node3, const char* extra_node3_name,
62  Node* extra_node4, const char* extra_node4_name,
63  Node* extra_node5,
64  const char* extra_node5_name) {
65 #if defined(DEBUG)
66  if (FLAG_debug_code) {
67  Check(branch, message, file, line, extra_node1, extra_node1_name,
68  extra_node2, extra_node2_name, extra_node3, extra_node3_name,
69  extra_node4, extra_node4_name, extra_node5, extra_node5_name);
70  }
71 #endif
72 }
73 
74 void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
75  const char* message, const char* file, int line,
76  Node* extra_node1, const char* extra_node1_name,
77  Node* extra_node2, const char* extra_node2_name,
78  Node* extra_node3, const char* extra_node3_name,
79  Node* extra_node4, const char* extra_node4_name,
80  Node* extra_node5,
81  const char* extra_node5_name) {
82 #if defined(DEBUG)
83  if (FLAG_debug_code) {
84  Check(condition_body, message, file, line, extra_node1, extra_node1_name,
85  extra_node2, extra_node2_name, extra_node3, extra_node3_name,
86  extra_node4, extra_node4_name, extra_node5, extra_node5_name);
87  }
88 #endif
89 }
90 
91 #ifdef DEBUG
92 namespace {
93 void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
94  const char* node_name) {
95  if (node != nullptr) {
96  csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
97  csa->StringConstant(node_name), node);
98  }
99 }
100 } // namespace
101 #endif
102 
103 void CodeStubAssembler::Check(const BranchGenerator& branch,
104  const char* message, const char* file, int line,
105  Node* extra_node1, const char* extra_node1_name,
106  Node* extra_node2, const char* extra_node2_name,
107  Node* extra_node3, const char* extra_node3_name,
108  Node* extra_node4, const char* extra_node4_name,
109  Node* extra_node5, const char* extra_node5_name) {
110  Label ok(this);
111  Label not_ok(this, Label::kDeferred);
112  if (message != nullptr && FLAG_code_comments) {
113  Comment("[ Assert: %s", message);
114  } else {
115  Comment("[ Assert");
116  }
117  branch(&ok, &not_ok);
118 
119  BIND(&not_ok);
120  FailAssert(message, file, line, extra_node1, extra_node1_name, extra_node2,
121  extra_node2_name, extra_node3, extra_node3_name, extra_node4,
122  extra_node4_name, extra_node5, extra_node5_name);
123 
124  BIND(&ok);
125  Comment("] Assert");
126 }
127 
128 void CodeStubAssembler::Check(const NodeGenerator& condition_body,
129  const char* message, const char* file, int line,
130  Node* extra_node1, const char* extra_node1_name,
131  Node* extra_node2, const char* extra_node2_name,
132  Node* extra_node3, const char* extra_node3_name,
133  Node* extra_node4, const char* extra_node4_name,
134  Node* extra_node5, const char* extra_node5_name) {
135  BranchGenerator branch = [=](Label* ok, Label* not_ok) {
136  Node* condition = condition_body();
137  DCHECK_NOT_NULL(condition);
138  Branch(condition, ok, not_ok);
139  };
140 
141  Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
142  extra_node2_name, extra_node3, extra_node3_name, extra_node4,
143  extra_node4_name, extra_node5, extra_node5_name);
144 }
145 
146 void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
147  Label ok(this);
148  GotoIf(condition, &ok);
149  DebugBreak();
150  Goto(&ok);
151  BIND(&ok);
152 }
153 
154 void CodeStubAssembler::FailAssert(
155  const char* message, const char* file, int line, Node* extra_node1,
156  const char* extra_node1_name, Node* extra_node2,
157  const char* extra_node2_name, Node* extra_node3,
158  const char* extra_node3_name, Node* extra_node4,
159  const char* extra_node4_name, Node* extra_node5,
160  const char* extra_node5_name) {
161  DCHECK_NOT_NULL(message);
162  char chars[1024];
163  Vector<char> buffer(chars);
164  if (file != nullptr) {
165  SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
166  } else {
167  SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
168  }
169  Node* message_node = StringConstant(&(buffer[0]));
170 
171 #ifdef DEBUG
172  // Only print the extra nodes in debug builds.
173  MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
174  MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
175  MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
176  MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
177  MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
178 #endif
179 
180  DebugAbort(message_node);
181  Unreachable();
182 }
183 
184 Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
185  const NodeGenerator& true_body,
186  const NodeGenerator& false_body,
187  MachineRepresentation rep) {
188  VARIABLE(value, rep);
189  Label vtrue(this), vfalse(this), end(this);
190  Branch(condition, &vtrue, &vfalse);
191 
192  BIND(&vtrue);
193  {
194  value.Bind(true_body());
195  Goto(&end);
196  }
197  BIND(&vfalse);
198  {
199  value.Bind(false_body());
200  Goto(&end);
201  }
202 
203  BIND(&end);
204  return value.value();
205 }
206 
207 TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
208  SloppyTNode<BoolT> condition, int true_value, int false_value) {
209  return SelectConstant<Int32T>(condition, Int32Constant(true_value),
210  Int32Constant(false_value));
211 }
212 
213 TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
214  SloppyTNode<BoolT> condition, int true_value, int false_value) {
215  return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
216  IntPtrConstant(false_value));
217 }
218 
219 TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
220  SloppyTNode<BoolT> condition) {
221  return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
222 }
223 
224 TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
225  Smi true_value,
226  Smi false_value) {
227  return SelectConstant<Smi>(condition, SmiConstant(true_value),
228  SmiConstant(false_value));
229 }
230 
231 TNode<Object> CodeStubAssembler::NoContextConstant() {
232  return SmiConstant(Context::kNoContext);
233 }
234 
235 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
236  compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
237  std::declval<Heap>().rootAccessorName())>::type>::type> \
238  CodeStubAssembler::name##Constant() { \
239  return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
240  std::declval<Heap>().rootAccessorName())>::type>::type>( \
241  LoadRoot(RootIndex::k##rootIndexName)); \
242  }
243 HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
244 #undef HEAP_CONSTANT_ACCESSOR
245 
246 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
247  compiler::TNode<std::remove_pointer<std::remove_reference<decltype( \
248  std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
249  CodeStubAssembler::name##Constant() { \
250  return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
251  std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
252  LoadRoot(RootIndex::k##rootIndexName)); \
253  }
254 HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
255 #undef HEAP_CONSTANT_ACCESSOR
256 
257 #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
258  compiler::TNode<BoolT> CodeStubAssembler::Is##name( \
259  SloppyTNode<Object> value) { \
260  return WordEqual(value, name##Constant()); \
261  } \
262  compiler::TNode<BoolT> CodeStubAssembler::IsNot##name( \
263  SloppyTNode<Object> value) { \
264  return WordNotEqual(value, name##Constant()); \
265  }
266 HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST);
267 #undef HEAP_CONSTANT_TEST
268 
269 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
270  if (mode == SMI_PARAMETERS) {
271  return SmiConstant(value);
272  } else {
273  DCHECK_EQ(INTPTR_PARAMETERS, mode);
274  return IntPtrConstant(value);
275  }
276 }
277 
278 bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
279  ParameterMode mode) {
280  int32_t constant_test;
281  Smi smi_test;
282  if (mode == INTPTR_PARAMETERS) {
283  if (ToInt32Constant(test, constant_test) && constant_test == 0) {
284  return true;
285  }
286  } else {
287  DCHECK_EQ(mode, SMI_PARAMETERS);
288  if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
289  return true;
290  }
291  }
292  return false;
293 }
294 
295 bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
296  int* value,
297  ParameterMode mode) {
298  int32_t int32_constant;
299  if (mode == INTPTR_PARAMETERS) {
300  if (ToInt32Constant(maybe_constant, int32_constant)) {
301  *value = int32_constant;
302  return true;
303  }
304  } else {
305  DCHECK_EQ(mode, SMI_PARAMETERS);
306  Smi smi_constant;
307  if (ToSmiConstant(maybe_constant, &smi_constant)) {
308  *value = Smi::ToInt(smi_constant);
309  return true;
310  }
311  }
312  return false;
313 }
314 
315 TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
316  TNode<IntPtrT> value) {
317  Comment("IntPtrRoundUpToPowerOfTwo32");
318  CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
319  value = Signed(IntPtrSub(value, IntPtrConstant(1)));
320  for (int i = 1; i <= 16; i *= 2) {
321  value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
322  }
323  return Signed(IntPtrAdd(value, IntPtrConstant(1)));
324 }
325 
326 Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
327  if (mode == SMI_PARAMETERS) {
328  return TaggedIsSmi(value);
329  } else {
330  return Int32Constant(1);
331  }
332 }
333 
334 TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
335  // value && !(value & (value - 1))
336  return WordEqual(
337  Select<IntPtrT>(
338  WordEqual(value, IntPtrConstant(0)),
339  [=] { return IntPtrConstant(1); },
340  [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
341  IntPtrConstant(0));
342 }
343 
344 TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
345  Node* one = Float64Constant(1.0);
346  Node* one_half = Float64Constant(0.5);
347 
348  Label return_x(this);
349 
350  // Round up {x} towards Infinity.
351  VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
352 
353  GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
354  &return_x);
355  var_x.Bind(Float64Sub(var_x.value(), one));
356  Goto(&return_x);
357 
358  BIND(&return_x);
359  return TNode<Float64T>::UncheckedCast(var_x.value());
360 }
361 
362 TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
363  if (IsFloat64RoundUpSupported()) {
364  return Float64RoundUp(x);
365  }
366 
367  Node* one = Float64Constant(1.0);
368  Node* zero = Float64Constant(0.0);
369  Node* two_52 = Float64Constant(4503599627370496.0E0);
370  Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
371 
372  VARIABLE(var_x, MachineRepresentation::kFloat64, x);
373  Label return_x(this), return_minus_x(this);
374 
375  // Check if {x} is greater than zero.
376  Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
377  Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
378  &if_xnotgreaterthanzero);
379 
380  BIND(&if_xgreaterthanzero);
381  {
382  // Just return {x} unless it's in the range ]0,2^52[.
383  GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
384 
385  // Round positive {x} towards Infinity.
386  var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
387  GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
388  var_x.Bind(Float64Add(var_x.value(), one));
389  Goto(&return_x);
390  }
391 
392  BIND(&if_xnotgreaterthanzero);
393  {
394  // Just return {x} unless it's in the range ]-2^52,0[
395  GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
396  GotoIfNot(Float64LessThan(x, zero), &return_x);
397 
398  // Round negated {x} towards Infinity and return the result negated.
399  Node* minus_x = Float64Neg(x);
400  var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
401  GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
402  var_x.Bind(Float64Sub(var_x.value(), one));
403  Goto(&return_minus_x);
404  }
405 
406  BIND(&return_minus_x);
407  var_x.Bind(Float64Neg(var_x.value()));
408  Goto(&return_x);
409 
410  BIND(&return_x);
411  return TNode<Float64T>::UncheckedCast(var_x.value());
412 }
413 
414 TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
415  if (IsFloat64RoundDownSupported()) {
416  return Float64RoundDown(x);
417  }
418 
419  Node* one = Float64Constant(1.0);
420  Node* zero = Float64Constant(0.0);
421  Node* two_52 = Float64Constant(4503599627370496.0E0);
422  Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
423 
424  VARIABLE(var_x, MachineRepresentation::kFloat64, x);
425  Label return_x(this), return_minus_x(this);
426 
427  // Check if {x} is greater than zero.
428  Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
429  Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
430  &if_xnotgreaterthanzero);
431 
432  BIND(&if_xgreaterthanzero);
433  {
434  // Just return {x} unless it's in the range ]0,2^52[.
435  GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
436 
437  // Round positive {x} towards -Infinity.
438  var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
439  GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
440  var_x.Bind(Float64Sub(var_x.value(), one));
441  Goto(&return_x);
442  }
443 
444  BIND(&if_xnotgreaterthanzero);
445  {
446  // Just return {x} unless it's in the range ]-2^52,0[
447  GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
448  GotoIfNot(Float64LessThan(x, zero), &return_x);
449 
450  // Round negated {x} towards -Infinity and return the result negated.
451  Node* minus_x = Float64Neg(x);
452  var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
453  GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
454  var_x.Bind(Float64Add(var_x.value(), one));
455  Goto(&return_minus_x);
456  }
457 
458  BIND(&return_minus_x);
459  var_x.Bind(Float64Neg(var_x.value()));
460  Goto(&return_x);
461 
462  BIND(&return_x);
463  return TNode<Float64T>::UncheckedCast(var_x.value());
464 }
465 
466 TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
467  if (IsFloat64RoundTiesEvenSupported()) {
468  return Float64RoundTiesEven(x);
469  }
470  // See ES#sec-touint8clamp for details.
471  Node* f = Float64Floor(x);
472  Node* f_and_half = Float64Add(f, Float64Constant(0.5));
473 
474  VARIABLE(var_result, MachineRepresentation::kFloat64);
475  Label return_f(this), return_f_plus_one(this), done(this);
476 
477  GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
478  GotoIf(Float64LessThan(x, f_and_half), &return_f);
479  {
480  Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
481  Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
482  &return_f_plus_one);
483  }
484 
485  BIND(&return_f);
486  var_result.Bind(f);
487  Goto(&done);
488 
489  BIND(&return_f_plus_one);
490  var_result.Bind(Float64Add(f, Float64Constant(1.0)));
491  Goto(&done);
492 
493  BIND(&done);
494  return TNode<Float64T>::UncheckedCast(var_result.value());
495 }
496 
497 TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
498  if (IsFloat64RoundTruncateSupported()) {
499  return Float64RoundTruncate(x);
500  }
501 
502  Node* one = Float64Constant(1.0);
503  Node* zero = Float64Constant(0.0);
504  Node* two_52 = Float64Constant(4503599627370496.0E0);
505  Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
506 
507  VARIABLE(var_x, MachineRepresentation::kFloat64, x);
508  Label return_x(this), return_minus_x(this);
509 
510  // Check if {x} is greater than 0.
511  Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
512  Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
513  &if_xnotgreaterthanzero);
514 
515  BIND(&if_xgreaterthanzero);
516  {
517  if (IsFloat64RoundDownSupported()) {
518  var_x.Bind(Float64RoundDown(x));
519  } else {
520  // Just return {x} unless it's in the range ]0,2^52[.
521  GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
522 
523  // Round positive {x} towards -Infinity.
524  var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
525  GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
526  var_x.Bind(Float64Sub(var_x.value(), one));
527  }
528  Goto(&return_x);
529  }
530 
531  BIND(&if_xnotgreaterthanzero);
532  {
533  if (IsFloat64RoundUpSupported()) {
534  var_x.Bind(Float64RoundUp(x));
535  Goto(&return_x);
536  } else {
537  // Just return {x} unless its in the range ]-2^52,0[.
538  GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
539  GotoIfNot(Float64LessThan(x, zero), &return_x);
540 
541  // Round negated {x} towards -Infinity and return result negated.
542  Node* minus_x = Float64Neg(x);
543  var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
544  GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
545  var_x.Bind(Float64Sub(var_x.value(), one));
546  Goto(&return_minus_x);
547  }
548  }
549 
550  BIND(&return_minus_x);
551  var_x.Bind(Float64Neg(var_x.value()));
552  Goto(&return_x);
553 
554  BIND(&return_x);
555  return TNode<Float64T>::UncheckedCast(var_x.value());
556 }
557 
558 TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
559  if (SmiValuesAre31Bits() && kPointerSize == kInt64Size) {
560  // Check that the Smi value is properly sign-extended.
561  TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
562  return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
563  }
564  return Int32TrueConstant();
565 }
566 
567 Node* CodeStubAssembler::SmiShiftBitsConstant() {
568  return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
569 }
570 
571 TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
572  TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
573  TNode<Smi> smi =
574  BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
575  return smi;
576 }
577 
578 TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
579  intptr_t constant_value;
580  if (ToIntPtrConstant(value, constant_value)) {
581  return (static_cast<uintptr_t>(constant_value) <=
582  static_cast<uintptr_t>(Smi::kMaxValue))
583  ? Int32TrueConstant()
584  : Int32FalseConstant();
585  }
586 
587  return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
588 }
589 
590 TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
591  int32_t constant_value;
592  if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
593  return SmiConstant(constant_value);
594  }
595  TNode<Smi> smi =
596  BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
597  return smi;
598 }
599 
600 TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
601  intptr_t constant_value;
602  if (ToIntPtrConstant(value, constant_value)) {
603  return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
604  }
605  return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
606 }
607 
608 TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
609  TNode<IntPtrT> result = SmiUntag(value);
610  return TruncateIntPtrToInt32(result);
611 }
612 
613 TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
614  return ChangeInt32ToFloat64(SmiToInt32(value));
615 }
616 
617 TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
618  return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
619 }
620 
621 TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
622  return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
623 }
624 
625 TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
626  TNode<IntPtrT> b,
627  Label* if_overflow) {
628  TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
629  TNode<BoolT> overflow = Projection<1>(pair);
630  GotoIf(overflow, if_overflow);
631  return Projection<0>(pair);
632 }
633 
634 TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
635  Label* if_overflow) {
636  if (SmiValuesAre32Bits()) {
637  return BitcastWordToTaggedSigned(TryIntPtrAdd(
638  BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs), if_overflow));
639  } else {
640  DCHECK(SmiValuesAre31Bits());
641  TNode<PairT<Int32T, BoolT>> pair =
642  Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
643  TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
644  TNode<BoolT> overflow = Projection<1>(pair);
645  GotoIf(overflow, if_overflow);
646  TNode<Int32T> result = Projection<0>(pair);
647  return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
648  }
649 }
650 
651 TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
652  Label* if_overflow) {
653  if (SmiValuesAre32Bits()) {
654  TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
655  BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
656  TNode<BoolT> overflow = Projection<1>(pair);
657  GotoIf(overflow, if_overflow);
658  TNode<IntPtrT> result = Projection<0>(pair);
659  return BitcastWordToTaggedSigned(result);
660  } else {
661  DCHECK(SmiValuesAre31Bits());
662  TNode<PairT<Int32T, BoolT>> pair =
663  Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
664  TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
665  TNode<BoolT> overflow = Projection<1>(pair);
666  GotoIf(overflow, if_overflow);
667  TNode<Int32T> result = Projection<0>(pair);
668  return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
669  }
670 }
671 
672 TNode<Number> CodeStubAssembler::NumberMax(SloppyTNode<Number> a,
673  SloppyTNode<Number> b) {
674  // TODO(danno): This could be optimized by specifically handling smi cases.
675  TVARIABLE(Number, result);
676  Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
677  GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
678  GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
679  result = NanConstant();
680  Goto(&done);
681  BIND(&greater_than_equal_a);
682  result = a;
683  Goto(&done);
684  BIND(&greater_than_equal_b);
685  result = b;
686  Goto(&done);
687  BIND(&done);
688  return result.value();
689 }
690 
691 TNode<Number> CodeStubAssembler::NumberMin(SloppyTNode<Number> a,
692  SloppyTNode<Number> b) {
693  // TODO(danno): This could be optimized by specifically handling smi cases.
694  TVARIABLE(Number, result);
695  Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
696  GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
697  GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
698  result = NanConstant();
699  Goto(&done);
700  BIND(&greater_than_equal_a);
701  result = b;
702  Goto(&done);
703  BIND(&greater_than_equal_b);
704  result = a;
705  Goto(&done);
706  BIND(&done);
707  return result.value();
708 }
709 
710 TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
711  TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
712  TVARIABLE(IntPtrT, result);
713 
714  TNode<Number> const index_int =
715  ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
716  TNode<IntPtrT> zero = IntPtrConstant(0);
717 
718  Label done(this);
719  Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
720  Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
721 
722  BIND(&if_issmi);
723  {
724  TNode<Smi> const index_smi = CAST(index_int);
725  result = Select<IntPtrT>(
726  IntPtrLessThan(SmiUntag(index_smi), zero),
727  [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
728  [=] { return IntPtrMin(SmiUntag(index_smi), length); });
729  Goto(&done);
730  }
731 
732  BIND(&if_isheapnumber);
733  {
734  // If {index} is a heap number, it is definitely out of bounds. If it is
735  // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
736  // set {index} to {length}.
737  TNode<HeapNumber> const index_hn = CAST(index_int);
738  TNode<Float64T> const float_zero = Float64Constant(0.);
739  TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
740  result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
741  zero, length);
742  Goto(&done);
743  }
744  BIND(&done);
745  return result.value();
746 }
747 
748 TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
749  TVARIABLE(Number, var_result);
750  Label return_result(this, &var_result),
751  return_minuszero(this, Label::kDeferred),
752  return_nan(this, Label::kDeferred);
753 
754  // Untag {a} and {b}.
755  TNode<Int32T> int_a = SmiToInt32(a);
756  TNode<Int32T> int_b = SmiToInt32(b);
757 
758  // Return NaN if {b} is zero.
759  GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
760 
761  // Check if {a} is non-negative.
762  Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
763  Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
764  &if_aisnegative);
765 
766  BIND(&if_aisnotnegative);
767  {
768  // Fast case, don't need to check any other edge cases.
769  TNode<Int32T> r = Int32Mod(int_a, int_b);
770  var_result = SmiFromInt32(r);
771  Goto(&return_result);
772  }
773 
774  BIND(&if_aisnegative);
775  {
776  if (SmiValuesAre32Bits()) {
777  // Check if {a} is kMinInt and {b} is -1 (only relevant if the
778  // kMinInt is actually representable as a Smi).
779  Label join(this);
780  GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
781  GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
782  Goto(&join);
783  BIND(&join);
784  }
785 
786  // Perform the integer modulus operation.
787  TNode<Int32T> r = Int32Mod(int_a, int_b);
788 
789  // Check if {r} is zero, and if so return -0, because we have to
790  // take the sign of the left hand side {a}, which is negative.
791  GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
792 
793  // The remainder {r} can be outside the valid Smi range on 32bit
794  // architectures, so we cannot just say SmiFromInt32(r) here.
795  var_result = ChangeInt32ToTagged(r);
796  Goto(&return_result);
797  }
798 
799  BIND(&return_minuszero);
800  var_result = MinusZeroConstant();
801  Goto(&return_result);
802 
803  BIND(&return_nan);
804  var_result = NanConstant();
805  Goto(&return_result);
806 
807  BIND(&return_result);
808  return var_result.value();
809 }
810 
811 TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
812  TVARIABLE(Number, var_result);
813  VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
814  VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
815  Label return_result(this, &var_result);
816 
817  // Both {a} and {b} are Smis. Convert them to integers and multiply.
818  Node* lhs32 = SmiToInt32(a);
819  Node* rhs32 = SmiToInt32(b);
820  Node* pair = Int32MulWithOverflow(lhs32, rhs32);
821 
822  Node* overflow = Projection(1, pair);
823 
824  // Check if the multiplication overflowed.
825  Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
826  Branch(overflow, &if_overflow, &if_notoverflow);
827  BIND(&if_notoverflow);
828  {
829  // If the answer is zero, we may need to return -0.0, depending on the
830  // input.
831  Label answer_zero(this), answer_not_zero(this);
832  Node* answer = Projection(0, pair);
833  Node* zero = Int32Constant(0);
834  Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
835  BIND(&answer_not_zero);
836  {
837  var_result = ChangeInt32ToTagged(answer);
838  Goto(&return_result);
839  }
840  BIND(&answer_zero);
841  {
842  Node* or_result = Word32Or(lhs32, rhs32);
843  Label if_should_be_negative_zero(this), if_should_be_zero(this);
844  Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
845  &if_should_be_zero);
846  BIND(&if_should_be_negative_zero);
847  {
848  var_result = MinusZeroConstant();
849  Goto(&return_result);
850  }
851  BIND(&if_should_be_zero);
852  {
853  var_result = SmiConstant(0);
854  Goto(&return_result);
855  }
856  }
857  }
858  BIND(&if_overflow);
859  {
860  var_lhs_float64.Bind(SmiToFloat64(a));
861  var_rhs_float64.Bind(SmiToFloat64(b));
862  Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
863  var_result = AllocateHeapNumberWithValue(value);
864  Goto(&return_result);
865  }
866 
867  BIND(&return_result);
868  return var_result.value();
869 }
870 
871 TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
872  Label* bailout) {
873  // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
874  // is zero.
875  GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
876 
877  // Do floating point division if {dividend} is zero and {divisor} is
878  // negative.
879  Label dividend_is_zero(this), dividend_is_not_zero(this);
880  Branch(WordEqual(dividend, SmiConstant(0)), &dividend_is_zero,
881  &dividend_is_not_zero);
882 
883  BIND(&dividend_is_zero);
884  {
885  GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
886  Goto(&dividend_is_not_zero);
887  }
888  BIND(&dividend_is_not_zero);
889 
890  TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
891  TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
892 
893  // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
894  // if the Smi size is 31) and {divisor} is -1.
895  Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
896  Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
897  &divisor_is_minus_one, &divisor_is_not_minus_one);
898 
899  BIND(&divisor_is_minus_one);
900  {
901  GotoIf(Word32Equal(
902  untagged_dividend,
903  Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
904  bailout);
905  Goto(&divisor_is_not_minus_one);
906  }
907  BIND(&divisor_is_not_minus_one);
908 
909  TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
910  TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
911 
912  // Do floating point division if the remainder is not 0.
913  GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
914 
915  return SmiFromInt32(untagged_result);
916 }
917 
918 TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
919  TNode<Smi> y) {
920  TNode<ExternalReference> smi_lexicographic_compare =
921  ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
922  TNode<ExternalReference> isolate_ptr =
923  ExternalConstant(ExternalReference::isolate_address(isolate()));
924  return CAST(CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
925  MachineType::AnyTagged(), MachineType::AnyTagged(),
926  smi_lexicographic_compare, isolate_ptr, x, y));
927 }
928 
929 TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
930  SloppyTNode<IntPtrT> value) {
931  if (Is64()) {
932  return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
933  }
934  return ReinterpretCast<Int32T>(value);
935 }
936 
937 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
938  return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
939  IntPtrConstant(0));
940 }
941 
942 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
943  return WordEqual(
944  WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
945  IntPtrConstant(0));
946 }
947 
948 TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
949  return WordNotEqual(
950  WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
951  IntPtrConstant(0));
952 }
953 
954 TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
955  return WordEqual(WordAnd(BitcastTaggedToWord(a),
956  IntPtrConstant(kSmiTagMask | kSmiSignMask)),
957  IntPtrConstant(0));
958 }
959 
960 TNode<BoolT> CodeStubAssembler::WordIsWordAligned(SloppyTNode<WordT> word) {
961  return WordEqual(IntPtrConstant(0),
962  WordAnd(word, IntPtrConstant(kPointerSize - 1)));
963 }
964 
965 #if DEBUG
966 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
967  CodeAssembler::Bind(label, debug_info);
968 }
969 #endif // DEBUG
970 
971 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
972 
973 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
974  TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
975  return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
976  SMI_PARAMETERS, if_hole);
977 }
978 
979 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
980  TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
981  return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
982  INTPTR_PARAMETERS, if_hole);
983 }
984 
985 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
986  Node* receiver_map, Label* definitely_no_elements,
987  Label* possibly_elements) {
988  CSA_SLOW_ASSERT(this, IsMap(receiver_map));
989  VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
990  Label loop_body(this, &var_map);
991  Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
992  Node* empty_slow_element_dictionary =
993  LoadRoot(RootIndex::kEmptySlowElementDictionary);
994  Goto(&loop_body);
995 
996  BIND(&loop_body);
997  {
998  Node* map = var_map.value();
999  Node* prototype = LoadMapPrototype(map);
1000  GotoIf(IsNull(prototype), definitely_no_elements);
1001  Node* prototype_map = LoadMap(prototype);
1002  TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1003 
1004  // Pessimistically assume elements if a Proxy, Special API Object,
1005  // or JSValue wrapper is found on the prototype chain. After this
1006  // instance type check, it's not necessary to check for interceptors or
1007  // access checks.
1008  Label if_custom(this, Label::kDeferred), if_notcustom(this);
1009  Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1010  &if_custom, &if_notcustom);
1011 
1012  BIND(&if_custom);
1013  {
1014  // For string JSValue wrappers we still support the checks as long
1015  // as they wrap the empty string.
1016  GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1017  possibly_elements);
1018  Node* prototype_value = LoadJSValueValue(prototype);
1019  Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1020  }
1021 
1022  BIND(&if_notcustom);
1023  {
1024  Node* prototype_elements = LoadElements(prototype);
1025  var_map.Bind(prototype_map);
1026  GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1027  Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1028  &loop_body, possibly_elements);
1029  }
1030  }
1031 }
1032 
1033 void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1034  Label* if_false) {
1035  GotoIf(TaggedIsSmi(object), if_false);
1036  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1037  Branch(IsJSReceiver(object), if_true, if_false);
1038 }
1039 
1040 void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1041 #ifdef V8_ENABLE_FORCE_SLOW_PATH
1042  Node* const force_slow_path_addr =
1043  ExternalConstant(ExternalReference::force_slow_path(isolate()));
1044  Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1045 
1046  GotoIf(force_slow, if_true);
1047 #endif
1048 }
1049 
1050 void CodeStubAssembler::GotoIfDebugExecutionModeChecksSideEffects(
1051  Label* if_true) {
1052  STATIC_ASSERT(sizeof(DebugInfo::ExecutionMode) >= sizeof(int32_t));
1053 
1054  TNode<ExternalReference> execution_mode_address = ExternalConstant(
1055  ExternalReference::debug_execution_mode_address(isolate()));
1056  TNode<Int32T> execution_mode =
1057  UncheckedCast<Int32T>(Load(MachineType::Int32(), execution_mode_address));
1058 
1059  GotoIf(Word32Equal(execution_mode, Int32Constant(DebugInfo::kSideEffects)),
1060  if_true);
1061 }
1062 
1063 TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1064  AllocationFlags flags,
1065  TNode<RawPtrT> top_address,
1066  TNode<RawPtrT> limit_address) {
1067  // TODO(jgruber, chromium:848672): Call FatalProcessOutOfMemory if this fails.
1068  {
1069  intptr_t constant_value;
1070  if (ToIntPtrConstant(size_in_bytes, constant_value)) {
1071  CHECK(Internals::IsValidSmi(constant_value));
1072  CHECK_GT(constant_value, 0);
1073  } else {
1074  CSA_CHECK(this, IsValidPositiveSmi(size_in_bytes));
1075  }
1076  }
1077 
1078  TNode<RawPtrT> top =
1079  UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), top_address));
1080  TNode<RawPtrT> limit =
1081  UncheckedCast<RawPtrT>(Load(MachineType::Pointer(), limit_address));
1082 
1083  // If there's not enough space, call the runtime.
1084  TVARIABLE(Object, result);
1085  Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1086 
1087  bool needs_double_alignment = flags & kDoubleAlignment;
1088 
1089  if (flags & kAllowLargeObjectAllocation) {
1090  Label next(this);
1091  GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1092 
1093  TNode<Smi> runtime_flags = SmiConstant(
1094  Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1095  AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
1096  result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1097  SmiTag(size_in_bytes), runtime_flags);
1098  Goto(&out);
1099 
1100  BIND(&next);
1101  }
1102 
1103  TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1104 
1105  if (needs_double_alignment) {
1106  Label next(this);
1107  GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1108 
1109  adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1110  Goto(&next);
1111 
1112  BIND(&next);
1113  }
1114 
1115  TNode<IntPtrT> new_top =
1116  IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1117 
1118  Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1119  &no_runtime_call);
1120 
1121  BIND(&runtime_call);
1122  {
1123  if (flags & kPretenured) {
1124  TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1125  AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1126  AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
1127  result = CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1128  SmiTag(size_in_bytes), runtime_flags);
1129  } else {
1130  result = CallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
1131  SmiTag(size_in_bytes));
1132  }
1133  Goto(&out);
1134  }
1135 
1136  // When there is enough space, return `top' and bump it up.
1137  BIND(&no_runtime_call);
1138  {
1139  StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1140  new_top);
1141 
1142  TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1143 
1144  if (needs_double_alignment) {
1145  Label next(this);
1146  GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1147 
1148  // Store a filler and increase the address by kPointerSize.
1149  StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1150  LoadRoot(RootIndex::kOnePointerFillerMap));
1151  address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1152  Goto(&next);
1153 
1154  BIND(&next);
1155  }
1156 
1157  result = BitcastWordToTagged(
1158  IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1159  Goto(&out);
1160  }
1161 
1162  BIND(&out);
1163  return UncheckedCast<HeapObject>(result.value());
1164 }
1165 
1166 TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1167  TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1168  TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1169  DCHECK_EQ(flags & kDoubleAlignment, 0);
1170  return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1171 }
1172 
1173 TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1174  TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1175  TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1176 #if defined(V8_HOST_ARCH_32_BIT)
1177  return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1178  limit_address);
1179 #elif defined(V8_HOST_ARCH_64_BIT)
1180  // Allocation on 64 bit machine is naturally double aligned
1181  return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1182  limit_address);
1183 #else
1184 #error Architecture not supported
1185 #endif
1186 }
1187 
1188 TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1189  TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1190  DCHECK(flags == kNone || flags == kDoubleAlignment);
1191  CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1192  return Allocate(size_in_bytes, flags);
1193 }
1194 
1195 TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1196  AllocationFlags flags) {
1197  Comment("Allocate");
1198  bool const new_space = !(flags & kPretenured);
1199  TNode<ExternalReference> top_address = ExternalConstant(
1200  new_space
1201  ? ExternalReference::new_space_allocation_top_address(isolate())
1202  : ExternalReference::old_space_allocation_top_address(isolate()));
1203  DCHECK_EQ(kPointerSize,
1204  ExternalReference::new_space_allocation_limit_address(isolate())
1205  .address() -
1206  ExternalReference::new_space_allocation_top_address(isolate())
1207  .address());
1208  DCHECK_EQ(kPointerSize,
1209  ExternalReference::old_space_allocation_limit_address(isolate())
1210  .address() -
1211  ExternalReference::old_space_allocation_top_address(isolate())
1212  .address());
1213  TNode<IntPtrT> limit_address = IntPtrAdd(
1214  ReinterpretCast<IntPtrT>(top_address), IntPtrConstant(kPointerSize));
1215 
1216  if (flags & kDoubleAlignment) {
1217  return AllocateRawDoubleAligned(size_in_bytes, flags,
1218  ReinterpretCast<RawPtrT>(top_address),
1219  ReinterpretCast<RawPtrT>(limit_address));
1220  } else {
1221  return AllocateRawUnaligned(size_in_bytes, flags,
1222  ReinterpretCast<RawPtrT>(top_address),
1223  ReinterpretCast<RawPtrT>(limit_address));
1224  }
1225 }
1226 
1227 TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1228  AllocationFlags flags) {
1229  CHECK(flags == kNone || flags == kDoubleAlignment);
1230  DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1231  return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1232 }
1233 
1234 TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1235  AllocationFlags flags) {
1236  return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1237 }
1238 
1239 TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1240  TNode<IntPtrT> offset) {
1241  return UncheckedCast<HeapObject>(
1242  BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1243 }
1244 
1245 TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1246  int offset) {
1247  return InnerAllocate(previous, IntPtrConstant(offset));
1248 }
1249 
1250 TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1251  return UintPtrLessThanOrEqual(size,
1252  IntPtrConstant(kMaxRegularHeapObjectSize));
1253 }
1254 
1255 void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1256  Label* if_false) {
1257  Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1258  if_bigint(this, Label::kDeferred);
1259  // Rule out false {value}.
1260  GotoIf(WordEqual(value, FalseConstant()), if_false);
1261 
1262  // Check if {value} is a Smi or a HeapObject.
1263  Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1264 
1265  BIND(&if_smi);
1266  {
1267  // The {value} is a Smi, only need to check against zero.
1268  BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1269  }
1270 
1271  BIND(&if_notsmi);
1272  {
1273  // Check if {value} is the empty string.
1274  GotoIf(IsEmptyString(value), if_false);
1275 
1276  // The {value} is a HeapObject, load its map.
1277  Node* value_map = LoadMap(value);
1278 
1279  // Only null, undefined and document.all have the undetectable bit set,
1280  // so we can return false immediately when that bit is set.
1281  GotoIf(IsUndetectableMap(value_map), if_false);
1282 
1283  // We still need to handle numbers specially, but all other {value}s
1284  // that make it here yield true.
1285  GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1286  Branch(IsBigInt(value), &if_bigint, if_true);
1287 
1288  BIND(&if_heapnumber);
1289  {
1290  // Load the floating point value of {value}.
1291  Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1292  MachineType::Float64());
1293 
1294  // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1295  Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1296  if_true, if_false);
1297  }
1298 
1299  BIND(&if_bigint);
1300  {
1301  Node* result =
1302  CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1303  CSA_ASSERT(this, IsBoolean(result));
1304  Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1305  }
1306  }
1307 }
1308 
1309 Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
1310  Node* frame_pointer = LoadFramePointer();
1311  return Load(rep, frame_pointer, IntPtrConstant(offset));
1312 }
1313 
1314 Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1315  Node* frame_pointer = LoadParentFramePointer();
1316  return Load(rep, frame_pointer, IntPtrConstant(offset));
1317 }
1318 
1319 TNode<JSFunction> CodeStubAssembler::LoadTargetFromFrame() {
1320  DCHECK(IsJSFunctionCall());
1321  return CAST(LoadFromFrame(StandardFrameConstants::kFunctionOffset,
1322  MachineType::TaggedPointer()));
1323 }
1324 
1325 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1326  MachineType rep) {
1327  return Load(rep, buffer, IntPtrConstant(offset));
1328 }
1329 
1330 Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1331  int offset, MachineType rep) {
1332  CSA_ASSERT(this, IsStrong(object));
1333  return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1334 }
1335 
1336 Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1337  SloppyTNode<IntPtrT> offset,
1338  MachineType rep) {
1339  CSA_ASSERT(this, IsStrong(object));
1340  return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1341 }
1342 
1343 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1344  SloppyTNode<HeapObject> object, int offset) {
1345  if (SmiValuesAre32Bits()) {
1346 #if V8_TARGET_LITTLE_ENDIAN
1347  offset += kPointerSize / 2;
1348 #endif
1349  return ChangeInt32ToIntPtr(
1350  LoadObjectField(object, offset, MachineType::Int32()));
1351  } else {
1352  return SmiToIntPtr(
1353  LoadObjectField(object, offset, MachineType::AnyTagged()));
1354  }
1355 }
1356 
1357 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1358  int offset) {
1359  if (SmiValuesAre32Bits()) {
1360 #if V8_TARGET_LITTLE_ENDIAN
1361  offset += kPointerSize / 2;
1362 #endif
1363  return UncheckedCast<Int32T>(
1364  LoadObjectField(object, offset, MachineType::Int32()));
1365  } else {
1366  return SmiToInt32(
1367  LoadObjectField(object, offset, MachineType::AnyTagged()));
1368  }
1369 }
1370 
1371 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1372  if (SmiValuesAre32Bits()) {
1373 #if V8_TARGET_LITTLE_ENDIAN
1374  index += kPointerSize / 2;
1375 #endif
1376  return ChangeInt32ToIntPtr(
1377  Load(MachineType::Int32(), base, IntPtrConstant(index)));
1378  } else {
1379  return SmiToIntPtr(
1380  Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1381  }
1382 }
1383 
1384 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32Root(
1385  RootIndex root_index) {
1386  Node* isolate_root =
1387  ExternalConstant(ExternalReference::isolate_root(isolate()));
1388  int offset = IsolateData::root_slot_offset(root_index);
1389  if (SmiValuesAre32Bits()) {
1390 #if V8_TARGET_LITTLE_ENDIAN
1391  offset += kPointerSize / 2;
1392 #endif
1393  return UncheckedCast<Int32T>(
1394  Load(MachineType::Int32(), isolate_root, IntPtrConstant(offset)));
1395  } else {
1396  return SmiToInt32(
1397  Load(MachineType::AnyTagged(), isolate_root, IntPtrConstant(offset)));
1398  }
1399 }
1400 
1401 Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1402  if (SmiValuesAre32Bits()) {
1403  int zero_offset = offset + kPointerSize / 2;
1404  int payload_offset = offset;
1405 #if V8_TARGET_LITTLE_ENDIAN
1406  std::swap(zero_offset, payload_offset);
1407 #endif
1408  StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1409  IntPtrConstant(zero_offset), Int32Constant(0));
1410  return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1411  IntPtrConstant(payload_offset),
1412  TruncateInt64ToInt32(value));
1413  } else {
1414  return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1415  IntPtrConstant(offset), SmiTag(value));
1416  }
1417 }
1418 
1419 TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1420  SloppyTNode<HeapNumber> object) {
1421  return TNode<Float64T>::UncheckedCast(LoadObjectField(
1422  object, HeapNumber::kValueOffset, MachineType::Float64()));
1423 }
1424 
1425 TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1426  return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset));
1427 }
1428 
1429 TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1430  SloppyTNode<HeapObject> object) {
1431  return LoadMapInstanceType(LoadMap(object));
1432 }
1433 
1434 TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1435  InstanceType instance_type) {
1436  return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1437 }
1438 
1439 TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1440  SloppyTNode<HeapObject> object, InstanceType instance_type) {
1441  return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1442 }
1443 
1444 TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1445  SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1446  /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1447  TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1448  return Select<BoolT>(
1449  tagged_is_smi, [=]() { return tagged_is_smi; },
1450  [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1451 }
1452 
1453 TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1454  SloppyTNode<JSObject> object) {
1455  CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1456  TNode<Object> properties =
1457  LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1458  return Select<HeapObject>(TaggedIsSmi(properties),
1459  [=] { return EmptyFixedArrayConstant(); },
1460  [=] { return CAST(properties); });
1461 }
1462 
1463 TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1464  SloppyTNode<JSObject> object) {
1465  CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1466  TNode<Object> properties =
1467  LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1468  return Select<HeapObject>(TaggedIsSmi(properties),
1469  [=] { return EmptyPropertyDictionaryConstant(); },
1470  [=] { return CAST(properties); });
1471 }
1472 
1473 TNode<FixedArrayBase> CodeStubAssembler::LoadElements(
1474  SloppyTNode<JSObject> object) {
1475  return CAST(LoadObjectField(object, JSObject::kElementsOffset));
1476 }
1477 
1478 TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1479  CSA_ASSERT(this, IsJSArray(array));
1480  return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1481 }
1482 
1483 TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectWithLength(
1484  SloppyTNode<JSArgumentsObjectWithLength> array) {
1485  return LoadObjectField(array, JSArgumentsObjectWithLength::kLengthOffset);
1486 }
1487 
1488 TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1489  SloppyTNode<JSArray> array) {
1490  TNode<Object> length = LoadJSArrayLength(array);
1491  CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(array)));
1492  // JSArray length is always a positive Smi for fast arrays.
1493  CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1494  return UncheckedCast<Smi>(length);
1495 }
1496 
1497 TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1498  SloppyTNode<FixedArrayBase> array) {
1499  CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1500  return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1501 }
1502 
1503 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1504  SloppyTNode<FixedArrayBase> array) {
1505  return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1506 }
1507 
1508 TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1509  TNode<FeedbackVector> vector) {
1510  return ChangeInt32ToIntPtr(
1511  LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1512 }
1513 
1514 TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1515  TNode<WeakFixedArray> array) {
1516  return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1517 }
1518 
1519 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1520  SloppyTNode<WeakFixedArray> array) {
1521  return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1522 }
1523 
1524 TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1525  TNode<DescriptorArray> array) {
1526  return UncheckedCast<Int32T>(
1527  LoadObjectField(array, DescriptorArray::kNumberOfDescriptorsOffset,
1528  MachineType::Int16()));
1529 }
1530 
1531 TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1532  CSA_SLOW_ASSERT(this, IsMap(map));
1533  return UncheckedCast<Int32T>(
1534  LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1535 }
1536 
1537 TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1538  CSA_SLOW_ASSERT(this, IsMap(map));
1539  return UncheckedCast<Int32T>(
1540  LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1541 }
1542 
1543 TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1544  CSA_SLOW_ASSERT(this, IsMap(map));
1545  return UncheckedCast<Uint32T>(
1546  LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1547 }
1548 
1549 TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1550  return UncheckedCast<Int32T>(
1551  LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1552 }
1553 
1554 TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1555  CSA_SLOW_ASSERT(this, IsMap(map));
1556  Node* bit_field2 = LoadMapBitField2(map);
1557  return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1558 }
1559 
1560 TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1561  SloppyTNode<HeapObject> object) {
1562  return LoadMapElementsKind(LoadMap(object));
1563 }
1564 
1565 TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1566  SloppyTNode<Map> map) {
1567  CSA_SLOW_ASSERT(this, IsMap(map));
1568  return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1569 }
1570 
1571 TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1572  CSA_SLOW_ASSERT(this, IsMap(map));
1573  return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1574 }
1575 
1576 TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1577  SloppyTNode<Map> map, Label* if_no_proto_info) {
1578  Label if_strong_heap_object(this);
1579  CSA_ASSERT(this, IsMap(map));
1580  TNode<MaybeObject> maybe_prototype_info =
1581  LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1582  TVARIABLE(Object, prototype_info);
1583  DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1584  if_no_proto_info, &if_strong_heap_object,
1585  &prototype_info);
1586 
1587  BIND(&if_strong_heap_object);
1588  GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1589  LoadRoot(RootIndex::kPrototypeInfoMap)),
1590  if_no_proto_info);
1591  return CAST(prototype_info.value());
1592 }
1593 
1594 TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1595  SloppyTNode<Map> map) {
1596  CSA_SLOW_ASSERT(this, IsMap(map));
1597  return ChangeInt32ToIntPtr(LoadObjectField(
1598  map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1599 }
1600 
1601 TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1602  SloppyTNode<Map> map) {
1603  CSA_SLOW_ASSERT(this, IsMap(map));
1604  // See Map::GetInObjectPropertiesStartInWords() for details.
1605  CSA_ASSERT(this, IsJSObjectMap(map));
1606  return ChangeInt32ToIntPtr(LoadObjectField(
1607  map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1608  MachineType::Uint8()));
1609 }
1610 
1611 TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1612  SloppyTNode<Map> map) {
1613  CSA_SLOW_ASSERT(this, IsMap(map));
1614  // See Map::GetConstructorFunctionIndex() for details.
1615  CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1616  return ChangeInt32ToIntPtr(LoadObjectField(
1617  map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1618  MachineType::Uint8()));
1619 }
1620 
1621 TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1622  CSA_SLOW_ASSERT(this, IsMap(map));
1623  TVARIABLE(Object, result,
1624  LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1625 
1626  Label done(this), loop(this, &result);
1627  Goto(&loop);
1628  BIND(&loop);
1629  {
1630  GotoIf(TaggedIsSmi(result.value()), &done);
1631  Node* is_map_type =
1632  InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1633  GotoIfNot(is_map_type, &done);
1634  result = LoadObjectField(CAST(result.value()),
1635  Map::kConstructorOrBackPointerOffset);
1636  Goto(&loop);
1637  }
1638  BIND(&done);
1639  return result.value();
1640 }
1641 
1642 Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1643  CSA_SLOW_ASSERT(this, IsMap(map));
1644  Node* bit_field3 = LoadMapBitField3(map);
1645  return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1646 }
1647 
1648 TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1649  TNode<HeapObject> object =
1650  CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1651  return Select<Object>(IsMap(object), [=] { return object; },
1652  [=] { return UndefinedConstant(); });
1653 }
1654 
1655 TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1656  TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1657  // This check can have false positives, since it applies to any JSValueType.
1658  GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1659 
1660  TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1661  GotoIf(IsSetWord32(bit_field3, Map::IsDictionaryMapBit::kMask |
1662  Map::HasHiddenPrototypeBit::kMask),
1663  bailout);
1664 
1665  return bit_field3;
1666 }
1667 
1668 TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1669  SloppyTNode<Object> receiver, Label* if_no_hash) {
1670  TVARIABLE(IntPtrT, var_hash);
1671  Label done(this), if_smi(this), if_property_array(this),
1672  if_property_dictionary(this), if_fixed_array(this);
1673 
1674  TNode<Object> properties_or_hash =
1675  LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1676  JSReceiver::kPropertiesOrHashOffset);
1677  GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1678 
1679  TNode<HeapObject> properties =
1680  TNode<HeapObject>::UncheckedCast(properties_or_hash);
1681  TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1682 
1683  GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1684  &if_property_array);
1685  Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1686  &if_property_dictionary, &if_fixed_array);
1687 
1688  BIND(&if_fixed_array);
1689  {
1690  var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1691  Goto(&done);
1692  }
1693 
1694  BIND(&if_smi);
1695  {
1696  var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1697  Goto(&done);
1698  }
1699 
1700  BIND(&if_property_array);
1701  {
1702  TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1703  properties, PropertyArray::kLengthAndHashOffset);
1704  var_hash = TNode<IntPtrT>::UncheckedCast(
1705  DecodeWord<PropertyArray::HashField>(length_and_hash));
1706  Goto(&done);
1707  }
1708 
1709  BIND(&if_property_dictionary);
1710  {
1711  var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1712  CAST(properties), NameDictionary::kObjectHashIndex)));
1713  Goto(&done);
1714  }
1715 
1716  BIND(&done);
1717  if (if_no_hash != nullptr) {
1718  GotoIf(IntPtrEqual(var_hash.value(),
1719  IntPtrConstant(PropertyArray::kNoHashSentinel)),
1720  if_no_hash);
1721  }
1722  return var_hash.value();
1723 }
1724 
1725 TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1726  CSA_ASSERT(this, IsName(name));
1727  return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1728 }
1729 
1730 TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1731  Label* if_hash_not_computed) {
1732  TNode<Uint32T> hash_field = LoadNameHashField(name);
1733  if (if_hash_not_computed != nullptr) {
1734  GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1735  if_hash_not_computed);
1736  }
1737  return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1738 }
1739 
1740 TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1741  SloppyTNode<String> string) {
1742  return SmiFromIntPtr(LoadStringLengthAsWord(string));
1743 }
1744 
1745 TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1746  SloppyTNode<String> string) {
1747  return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
1748 }
1749 
1750 TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
1751  SloppyTNode<String> string) {
1752  CSA_ASSERT(this, IsString(string));
1753  return LoadObjectField<Uint32T>(string, String::kLengthOffset);
1754 }
1755 
1756 Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1757  CSA_ASSERT(this, IsString(seq_string));
1758  CSA_ASSERT(this,
1759  IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1760  STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1761  return IntPtrAdd(
1762  BitcastTaggedToWord(seq_string),
1763  IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1764 }
1765 
1766 Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1767  CSA_ASSERT(this, IsJSValue(object));
1768  return LoadObjectField(object, JSValue::kValueOffset);
1769 }
1770 
1771 void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1772  Label* if_smi, Label* if_cleared,
1773  Label* if_weak, Label* if_strong,
1774  TVariable<Object>* extracted) {
1775  Label inner_if_smi(this), inner_if_strong(this);
1776 
1777  GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1778 
1779  GotoIf(IsCleared(maybe_object), if_cleared);
1780 
1781  GotoIf(Word32Equal(Word32And(TruncateIntPtrToInt32(
1782  BitcastMaybeObjectToWord(maybe_object)),
1783  Int32Constant(kHeapObjectTagMask)),
1784  Int32Constant(kHeapObjectTag)),
1785  &inner_if_strong);
1786 
1787  *extracted =
1788  BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1789  IntPtrConstant(~kWeakHeapObjectMask)));
1790  Goto(if_weak);
1791 
1792  BIND(&inner_if_smi);
1793  *extracted = CAST(maybe_object);
1794  Goto(if_smi);
1795 
1796  BIND(&inner_if_strong);
1797  *extracted = CAST(maybe_object);
1798  Goto(if_strong);
1799 }
1800 
1801 TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
1802  return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1803  IntPtrConstant(kHeapObjectTagMask)),
1804  IntPtrConstant(kHeapObjectTag));
1805 }
1806 
1807 TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
1808  TNode<MaybeObject> value, Label* if_not_strong) {
1809  GotoIfNot(IsStrong(value), if_not_strong);
1810  return CAST(value);
1811 }
1812 
1813 TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
1814  return Word32Equal(
1815  Word32And(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1816  Int32Constant(kHeapObjectTagMask)),
1817  Int32Constant(kWeakHeapObjectTag));
1818 }
1819 
1820 TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
1821  return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1822  Int32Constant(kClearedWeakHeapObjectLower32));
1823 }
1824 
1825 TNode<BoolT> CodeStubAssembler::IsNotCleared(TNode<MaybeObject> value) {
1826  return Word32NotEqual(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1827  Int32Constant(kClearedWeakHeapObjectLower32));
1828 }
1829 
1830 TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1831  TNode<MaybeObject> value) {
1832  CSA_ASSERT(this, IsWeakOrCleared(value));
1833  CSA_ASSERT(this, IsNotCleared(value));
1834  return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1835  BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1836 }
1837 
1838 TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1839  TNode<MaybeObject> value, Label* if_cleared) {
1840  GotoIf(IsCleared(value), if_cleared);
1841  return GetHeapObjectAssumeWeak(value);
1842 }
1843 
1844 TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(TNode<MaybeObject> object,
1845  TNode<Object> value) {
1846  return WordEqual(WordAnd(BitcastMaybeObjectToWord(object),
1847  IntPtrConstant(~kWeakHeapObjectMask)),
1848  BitcastTaggedToWord(value));
1849 }
1850 
1851 TNode<BoolT> CodeStubAssembler::IsStrongReferenceTo(TNode<MaybeObject> object,
1852  TNode<Object> value) {
1853  return WordEqual(BitcastMaybeObjectToWord(object),
1854  BitcastTaggedToWord(value));
1855 }
1856 
1857 TNode<BoolT> CodeStubAssembler::IsNotWeakReferenceTo(TNode<MaybeObject> object,
1858  TNode<Object> value) {
1859  return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(object),
1860  IntPtrConstant(~kWeakHeapObjectMask)),
1861  BitcastTaggedToWord(value));
1862 }
1863 
1864 TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1865  return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1866  WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1867 }
1868 
1869 template <>
1870 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
1871  return LoadAndUntagFixedArrayBaseLength(array);
1872 }
1873 
1874 template <>
1875 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
1876  return LoadAndUntagWeakFixedArrayLength(array);
1877 }
1878 
1879 template <>
1880 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
1881  return LoadPropertyArrayLength(array);
1882 }
1883 
1884 template <>
1885 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1886  TNode<DescriptorArray> array) {
1887  return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
1888  IntPtrConstant(DescriptorArray::kEntrySize));
1889 }
1890 
1891 template <>
1892 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1893  TNode<TransitionArray> array) {
1894  return LoadAndUntagWeakFixedArrayLength(array);
1895 }
1896 
1897 template <typename Array>
1898 TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1899  TNode<Array> array, int array_header_size, Node* index_node,
1900  int additional_offset, ParameterMode parameter_mode,
1901  LoadSensitivity needs_poisoning) {
1902  CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
1903  ParameterToIntPtr(index_node, parameter_mode),
1904  IntPtrConstant(0)));
1905  DCHECK_EQ(additional_offset % kPointerSize, 0);
1906  int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1907  TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1908  parameter_mode, header_size);
1909  CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(array),
1910  array_header_size));
1911  return UncheckedCast<MaybeObject>(
1912  Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1913 }
1914 
1915 template TNode<MaybeObject>
1916 CodeStubAssembler::LoadArrayElement<TransitionArray>(TNode<TransitionArray>,
1917  int, Node*, int,
1918  ParameterMode,
1919  LoadSensitivity);
1920 
1921 template TNode<MaybeObject>
1922 CodeStubAssembler::LoadArrayElement<DescriptorArray>(TNode<DescriptorArray>,
1923  int, Node*, int,
1924  ParameterMode,
1925  LoadSensitivity);
1926 
1927 void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
1928  Node* index,
1929  int additional_offset,
1930  ParameterMode parameter_mode) {
1931  if (!FLAG_fixed_array_bounds_checks) return;
1932  DCHECK_EQ(0, additional_offset % kPointerSize);
1933  if (parameter_mode == ParameterMode::SMI_PARAMETERS) {
1934  TNode<Smi> effective_index;
1935  Smi constant_index;
1936  bool index_is_constant = ToSmiConstant(index, &constant_index);
1937  if (index_is_constant) {
1938  effective_index = SmiConstant(Smi::ToInt(constant_index) +
1939  additional_offset / kPointerSize);
1940  } else if (additional_offset != 0) {
1941  effective_index =
1942  SmiAdd(CAST(index), SmiConstant(additional_offset / kPointerSize));
1943  } else {
1944  effective_index = CAST(index);
1945  }
1946  CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
1947  } else {
1948  // IntPtrAdd does constant-folding automatically.
1949  TNode<IntPtrT> effective_index =
1950  IntPtrAdd(UncheckedCast<IntPtrT>(index),
1951  IntPtrConstant(additional_offset / kPointerSize));
1952  CSA_CHECK(this, UintPtrLessThan(effective_index,
1953  LoadAndUntagFixedArrayBaseLength(array)));
1954  }
1955 }
1956 
1957 TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1958  TNode<FixedArray> object, Node* index_node, int additional_offset,
1959  ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
1960  CSA_ASSERT(this, IsFixedArraySubclass(object));
1961  CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1962  FixedArrayBoundsCheck(object, index_node, additional_offset, parameter_mode);
1963  TNode<MaybeObject> element =
1964  LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1965  additional_offset, parameter_mode, needs_poisoning);
1966  return CAST(element);
1967 }
1968 
1969 TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
1970  TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
1971  int additional_offset = 0;
1972  ParameterMode parameter_mode = INTPTR_PARAMETERS;
1973  LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
1974  return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
1975  additional_offset, parameter_mode,
1976  needs_poisoning));
1977 }
1978 
1979 TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
1980  TNode<PropertyArray> object) {
1981  TNode<IntPtrT> value =
1982  LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
1983  return Signed(DecodeWord<PropertyArray::LengthField>(value));
1984 }
1985 
1986 TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
1987  TNode<FixedTypedArrayBase> typed_array) {
1988  // Backing store = external_pointer + base_pointer.
1989  Node* external_pointer =
1990  LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
1991  MachineType::Pointer());
1992  Node* base_pointer =
1993  LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
1994  return UncheckedCast<RawPtrT>(
1995  IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
1996 }
1997 
1998 Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
1999  Node* data_pointer, Node* offset) {
2000  if (Is64()) {
2001  TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
2002  Load(MachineType::IntPtr(), data_pointer, offset));
2003  return BigIntFromInt64(value);
2004  } else {
2005  DCHECK(!Is64());
2006 #if defined(V8_TARGET_BIG_ENDIAN)
2007  TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2008  Load(MachineType::UintPtr(), data_pointer, offset));
2009  TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2010  Load(MachineType::UintPtr(), data_pointer,
2011  Int32Add(offset, Int32Constant(kPointerSize))));
2012 #else
2013  TNode<IntPtrT> low = UncheckedCast<IntPtrT>(
2014  Load(MachineType::UintPtr(), data_pointer, offset));
2015  TNode<IntPtrT> high = UncheckedCast<IntPtrT>(
2016  Load(MachineType::UintPtr(), data_pointer,
2017  Int32Add(offset, Int32Constant(kPointerSize))));
2018 #endif
2019  return BigIntFromInt32Pair(low, high);
2020  }
2021 }
2022 
2023 TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2024  TNode<IntPtrT> high) {
2025  DCHECK(!Is64());
2026  TVARIABLE(BigInt, var_result);
2027  TVARIABLE(WordT, var_sign, IntPtrConstant(BigInt::SignBits::encode(false)));
2028  TVARIABLE(IntPtrT, var_high, high);
2029  TVARIABLE(IntPtrT, var_low, low);
2030  Label high_zero(this), negative(this), allocate_one_digit(this),
2031  allocate_two_digits(this), if_zero(this), done(this);
2032 
2033  GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2034  Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2035  &allocate_two_digits);
2036 
2037  BIND(&high_zero);
2038  Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2039  &allocate_one_digit);
2040 
2041  BIND(&negative);
2042  {
2043  var_sign = IntPtrConstant(BigInt::SignBits::encode(true));
2044  // We must negate the value by computing "0 - (high|low)", performing
2045  // both parts of the subtraction separately and manually taking care
2046  // of the carry bit (which is 1 iff low != 0).
2047  var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2048  Label carry(this), no_carry(this);
2049  Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2050  BIND(&carry);
2051  var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2052  Goto(&no_carry);
2053  BIND(&no_carry);
2054  var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2055  // var_high was non-zero going into this block, but subtracting the
2056  // carry bit from it could bring us back onto the "one digit" path.
2057  Branch(WordEqual(var_high.value(), IntPtrConstant(0)), &allocate_one_digit,
2058  &allocate_two_digits);
2059  }
2060 
2061  BIND(&allocate_one_digit);
2062  {
2063  var_result = AllocateRawBigInt(IntPtrConstant(1));
2064  StoreBigIntBitfield(var_result.value(),
2065  WordOr(var_sign.value(),
2066  IntPtrConstant(BigInt::LengthBits::encode(1))));
2067  StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2068  Goto(&done);
2069  }
2070 
2071  BIND(&allocate_two_digits);
2072  {
2073  var_result = AllocateRawBigInt(IntPtrConstant(2));
2074  StoreBigIntBitfield(var_result.value(),
2075  WordOr(var_sign.value(),
2076  IntPtrConstant(BigInt::LengthBits::encode(2))));
2077  StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2078  StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2079  Goto(&done);
2080  }
2081 
2082  BIND(&if_zero);
2083  var_result = AllocateBigInt(IntPtrConstant(0));
2084  Goto(&done);
2085 
2086  BIND(&done);
2087  return var_result.value();
2088 }
2089 
2090 TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2091  DCHECK(Is64());
2092  TVARIABLE(BigInt, var_result);
2093  Label done(this), if_positive(this), if_negative(this), if_zero(this);
2094  GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2095  var_result = AllocateRawBigInt(IntPtrConstant(1));
2096  Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2097  &if_negative);
2098 
2099  BIND(&if_positive);
2100  {
2101  StoreBigIntBitfield(var_result.value(),
2102  IntPtrConstant(BigInt::SignBits::encode(false) |
2103  BigInt::LengthBits::encode(1)));
2104  StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2105  Goto(&done);
2106  }
2107 
2108  BIND(&if_negative);
2109  {
2110  StoreBigIntBitfield(var_result.value(),
2111  IntPtrConstant(BigInt::SignBits::encode(true) |
2112  BigInt::LengthBits::encode(1)));
2113  StoreBigIntDigit(var_result.value(), 0,
2114  Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2115  Goto(&done);
2116  }
2117 
2118  BIND(&if_zero);
2119  {
2120  var_result = AllocateBigInt(IntPtrConstant(0));
2121  Goto(&done);
2122  }
2123 
2124  BIND(&done);
2125  return var_result.value();
2126 }
2127 
2128 Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2129  Node* data_pointer, Node* offset) {
2130  Label if_zero(this), done(this);
2131  if (Is64()) {
2132  TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2133  Load(MachineType::UintPtr(), data_pointer, offset));
2134  return BigIntFromUint64(value);
2135  } else {
2136  DCHECK(!Is64());
2137 #if defined(V8_TARGET_BIG_ENDIAN)
2138  TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2139  Load(MachineType::UintPtr(), data_pointer, offset));
2140  TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2141  Load(MachineType::UintPtr(), data_pointer,
2142  Int32Add(offset, Int32Constant(kPointerSize))));
2143 #else
2144  TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2145  Load(MachineType::UintPtr(), data_pointer, offset));
2146  TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2147  Load(MachineType::UintPtr(), data_pointer,
2148  Int32Add(offset, Int32Constant(kPointerSize))));
2149 #endif
2150  return BigIntFromUint32Pair(low, high);
2151  }
2152 }
2153 
2154 TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2155  TNode<UintPtrT> high) {
2156  DCHECK(!Is64());
2157  TVARIABLE(BigInt, var_result);
2158  Label high_zero(this), if_zero(this), done(this);
2159 
2160  GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2161  var_result = AllocateBigInt(IntPtrConstant(2));
2162  StoreBigIntDigit(var_result.value(), 0, low);
2163  StoreBigIntDigit(var_result.value(), 1, high);
2164  Goto(&done);
2165 
2166  BIND(&high_zero);
2167  GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2168  var_result = AllocateBigInt(IntPtrConstant(1));
2169  StoreBigIntDigit(var_result.value(), 0, low);
2170  Goto(&done);
2171 
2172  BIND(&if_zero);
2173  var_result = AllocateBigInt(IntPtrConstant(0));
2174  Goto(&done);
2175 
2176  BIND(&done);
2177  return var_result.value();
2178 }
2179 
2180 TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2181  DCHECK(Is64());
2182  TVARIABLE(BigInt, var_result);
2183  Label done(this), if_zero(this);
2184  GotoIf(WordEqual(value, IntPtrConstant(0)), &if_zero);
2185  var_result = AllocateBigInt(IntPtrConstant(1));
2186  StoreBigIntDigit(var_result.value(), 0, value);
2187  Goto(&done);
2188 
2189  BIND(&if_zero);
2190  var_result = AllocateBigInt(IntPtrConstant(0));
2191  Goto(&done);
2192  BIND(&done);
2193  return var_result.value();
2194 }
2195 
2196 Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2197  Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2198  ParameterMode parameter_mode) {
2199  Node* offset =
2200  ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2201  switch (elements_kind) {
2202  case UINT8_ELEMENTS: /* fall through */
2203  case UINT8_CLAMPED_ELEMENTS:
2204  return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2205  case INT8_ELEMENTS:
2206  return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2207  case UINT16_ELEMENTS:
2208  return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2209  case INT16_ELEMENTS:
2210  return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2211  case UINT32_ELEMENTS:
2212  return ChangeUint32ToTagged(
2213  Load(MachineType::Uint32(), data_pointer, offset));
2214  case INT32_ELEMENTS:
2215  return ChangeInt32ToTagged(
2216  Load(MachineType::Int32(), data_pointer, offset));
2217  case FLOAT32_ELEMENTS:
2218  return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2219  Load(MachineType::Float32(), data_pointer, offset)));
2220  case FLOAT64_ELEMENTS:
2221  return AllocateHeapNumberWithValue(
2222  Load(MachineType::Float64(), data_pointer, offset));
2223  case BIGINT64_ELEMENTS:
2224  return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2225  case BIGUINT64_ELEMENTS:
2226  return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2227  default:
2228  UNREACHABLE();
2229  }
2230 }
2231 
2232 TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2233  TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind) {
2234  TVARIABLE(Numeric, var_result);
2235  Label done(this), if_unknown_type(this, Label::kDeferred);
2236  int32_t elements_kinds[] = {
2237 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2238  TYPED_ARRAYS(TYPED_ARRAY_CASE)
2239 #undef TYPED_ARRAY_CASE
2240  };
2241 
2242 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2243  TYPED_ARRAYS(TYPED_ARRAY_CASE)
2244 #undef TYPED_ARRAY_CASE
2245 
2246  Label* elements_kind_labels[] = {
2247 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2248  TYPED_ARRAYS(TYPED_ARRAY_CASE)
2249 #undef TYPED_ARRAY_CASE
2250  };
2251  STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2252 
2253  Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2254  arraysize(elements_kinds));
2255 
2256  BIND(&if_unknown_type);
2257  Unreachable();
2258 
2259 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2260  BIND(&if_##type##array); \
2261  { \
2262  var_result = CAST(LoadFixedTypedArrayElementAsTagged( \
2263  data_pointer, index, TYPE##_ELEMENTS, SMI_PARAMETERS)); \
2264  Goto(&done); \
2265  }
2266  TYPED_ARRAYS(TYPED_ARRAY_CASE)
2267 #undef TYPED_ARRAY_CASE
2268 
2269  BIND(&done);
2270  return var_result.value();
2271 }
2272 
2273 void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2274  TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2275  TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2276  ParameterMode parameter_mode) {
2277  TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2278  switch (elements_kind) {
2279  case UINT8_ELEMENTS:
2280  case UINT8_CLAMPED_ELEMENTS:
2281  case INT8_ELEMENTS:
2282  case UINT16_ELEMENTS:
2283  case INT16_ELEMENTS:
2284  StoreElement(data_pointer, elements_kind, index_node,
2285  SmiToInt32(CAST(value)), parameter_mode);
2286  break;
2287  case UINT32_ELEMENTS:
2288  case INT32_ELEMENTS:
2289  StoreElement(data_pointer, elements_kind, index_node,
2290  TruncateTaggedToWord32(context, value), parameter_mode);
2291  break;
2292  case FLOAT32_ELEMENTS:
2293  StoreElement(data_pointer, elements_kind, index_node,
2294  TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2295  parameter_mode);
2296  break;
2297  case FLOAT64_ELEMENTS:
2298  StoreElement(data_pointer, elements_kind, index_node,
2299  LoadHeapNumberValue(CAST(value)), parameter_mode);
2300  break;
2301  case BIGUINT64_ELEMENTS:
2302  case BIGINT64_ELEMENTS: {
2303  TNode<IntPtrT> offset =
2304  ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2305  EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2306  CAST(value));
2307  break;
2308  }
2309  default:
2310  UNREACHABLE();
2311  }
2312 }
2313 
2314 TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2315  Node* object, Node* slot_index_node, int additional_offset,
2316  ParameterMode parameter_mode) {
2317  CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2318  CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2319  int32_t header_size =
2320  FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2321  Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2322  parameter_mode, header_size);
2323  CSA_SLOW_ASSERT(
2324  this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2325  FeedbackVector::kHeaderSize));
2326  return UncheckedCast<MaybeObject>(
2327  Load(MachineType::AnyTagged(), object, offset));
2328 }
2329 
2330 template <typename Array>
2331 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2332  TNode<Array> object, int array_header_size, Node* index_node,
2333  int additional_offset, ParameterMode parameter_mode) {
2334  CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2335  DCHECK_EQ(additional_offset % kPointerSize, 0);
2336  int endian_correction = 0;
2337 #if V8_TARGET_LITTLE_ENDIAN
2338  if (SmiValuesAre32Bits()) endian_correction = kPointerSize / 2;
2339 #endif
2340  int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2341  endian_correction;
2342  Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2343  parameter_mode, header_size);
2344  CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(object),
2345  array_header_size + endian_correction));
2346  if (SmiValuesAre32Bits()) {
2347  return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2348  } else {
2349  return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2350  }
2351 }
2352 
2353 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2354  TNode<FixedArray> object, Node* index_node, int additional_offset,
2355  ParameterMode parameter_mode) {
2356  CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2357  return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2358  index_node, additional_offset,
2359  parameter_mode);
2360 }
2361 
2362 TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2363  TNode<WeakFixedArray> object, Node* index, int additional_offset,
2364  ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2365  return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2366  additional_offset, parameter_mode, needs_poisoning);
2367 }
2368 
2369 TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2370  SloppyTNode<FixedDoubleArray> object, Node* index_node,
2371  MachineType machine_type, int additional_offset,
2372  ParameterMode parameter_mode, Label* if_hole) {
2373  CSA_ASSERT(this, IsFixedDoubleArray(object));
2374  DCHECK_EQ(additional_offset % kPointerSize, 0);
2375  CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2376  int32_t header_size =
2377  FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2378  TNode<IntPtrT> offset = ElementOffsetFromIndex(
2379  index_node, HOLEY_DOUBLE_ELEMENTS, parameter_mode, header_size);
2380  CSA_ASSERT(this, IsOffsetInBounds(
2381  offset, LoadAndUntagFixedArrayBaseLength(object),
2382  FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2383  return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2384 }
2385 
2386 TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2387  TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2388  TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2389  TVARIABLE(Object, var_result);
2390  Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2391  if_holey_double(this), if_dictionary(this, Label::kDeferred);
2392 
2393  int32_t kinds[] = {// Handled by if_packed.
2394  PACKED_SMI_ELEMENTS, PACKED_ELEMENTS,
2395  // Handled by if_holey.
2396  HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS,
2397  // Handled by if_packed_double.
2398  PACKED_DOUBLE_ELEMENTS,
2399  // Handled by if_holey_double.
2400  HOLEY_DOUBLE_ELEMENTS};
2401  Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2402  &if_packed, &if_packed,
2403  // HOLEY_{SMI,}_ELEMENTS
2404  &if_holey, &if_holey,
2405  // PACKED_DOUBLE_ELEMENTS
2406  &if_packed_double,
2407  // HOLEY_DOUBLE_ELEMENTS
2408  &if_holey_double};
2409  Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2410 
2411  BIND(&if_packed);
2412  {
2413  var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2414  Goto(&done);
2415  }
2416 
2417  BIND(&if_holey);
2418  {
2419  var_result = LoadFixedArrayElement(CAST(elements), index);
2420  Branch(WordEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2421  }
2422 
2423  BIND(&if_packed_double);
2424  {
2425  var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2426  CAST(elements), index, MachineType::Float64()));
2427  Goto(&done);
2428  }
2429 
2430  BIND(&if_holey_double);
2431  {
2432  var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2433  CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS,
2434  if_hole));
2435  Goto(&done);
2436  }
2437 
2438  BIND(&if_dictionary);
2439  {
2440  CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2441  var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2442  if_accessor, if_hole);
2443  Goto(&done);
2444  }
2445 
2446  BIND(&done);
2447  return var_result.value();
2448 }
2449 
2450 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2451  SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
2452  MachineType machine_type) {
2453  if (if_hole) {
2454  // TODO(ishell): Compare only the upper part for the hole once the
2455  // compiler is able to fold addition of already complex |offset| with
2456  // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2457  if (Is64()) {
2458  Node* element = Load(MachineType::Uint64(), base, offset);
2459  GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2460  } else {
2461  Node* element_upper = Load(
2462  MachineType::Uint32(), base,
2463  IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2464  GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2465  if_hole);
2466  }
2467  }
2468  if (machine_type.IsNone()) {
2469  // This means the actual value is not needed.
2470  return TNode<Float64T>();
2471  }
2472  return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2473 }
2474 
2475 TNode<Object> CodeStubAssembler::LoadContextElement(
2476  SloppyTNode<Context> context, int slot_index) {
2477  int offset = Context::SlotOffset(slot_index);
2478  return UncheckedCast<Object>(
2479  Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2480 }
2481 
2482 TNode<Object> CodeStubAssembler::LoadContextElement(
2483  SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2484  Node* offset = ElementOffsetFromIndex(
2485  slot_index, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::SlotOffset(0));
2486  return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2487 }
2488 
2489 TNode<Object> CodeStubAssembler::LoadContextElement(TNode<Context> context,
2490  TNode<Smi> slot_index) {
2491  Node* offset = ElementOffsetFromIndex(slot_index, PACKED_ELEMENTS,
2492  SMI_PARAMETERS, Context::SlotOffset(0));
2493  return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2494 }
2495 
2496 void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2497  int slot_index,
2498  SloppyTNode<Object> value) {
2499  int offset = Context::SlotOffset(slot_index);
2500  Store(context, IntPtrConstant(offset), value);
2501 }
2502 
2503 void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2504  SloppyTNode<IntPtrT> slot_index,
2505  SloppyTNode<Object> value) {
2506  Node* offset = IntPtrAdd(TimesPointerSize(slot_index),
2507  IntPtrConstant(Context::SlotOffset(0)));
2508  Store(context, offset, value);
2509 }
2510 
2511 void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2512  SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2513  int offset = Context::SlotOffset(slot_index);
2514  StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2515  IntPtrConstant(offset), value);
2516 }
2517 
2518 TNode<Context> CodeStubAssembler::LoadNativeContext(
2519  SloppyTNode<Context> context) {
2520  return UncheckedCast<Context>(
2521  LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2522 }
2523 
2524 TNode<Context> CodeStubAssembler::LoadModuleContext(
2525  SloppyTNode<Context> context) {
2526  Node* module_map = LoadRoot(RootIndex::kModuleContextMap);
2527  Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2528  cur_context.Bind(context);
2529 
2530  Label context_found(this);
2531 
2532  Variable* context_search_loop_variables[1] = {&cur_context};
2533  Label context_search(this, 1, context_search_loop_variables);
2534 
2535  // Loop until cur_context->map() is module_map.
2536  Goto(&context_search);
2537  BIND(&context_search);
2538  {
2539  CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2540  GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2541 
2542  cur_context.Bind(
2543  LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2544  Goto(&context_search);
2545  }
2546 
2547  BIND(&context_found);
2548  return UncheckedCast<Context>(cur_context.value());
2549 }
2550 
2551 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2552  SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2553  CSA_ASSERT(this, IsFastElementsKind(kind));
2554  CSA_ASSERT(this, IsNativeContext(native_context));
2555  Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2556  ChangeInt32ToIntPtr(kind));
2557  return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2558 }
2559 
2560 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2561  ElementsKind kind, SloppyTNode<Context> native_context) {
2562  CSA_ASSERT(this, IsNativeContext(native_context));
2563  return UncheckedCast<Map>(
2564  LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2565 }
2566 
2567 TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2568  TNode<JSFunction> function) {
2569  TNode<SharedFunctionInfo> const shared_function_info =
2570  CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
2571 
2572  TNode<Uint32T> const function_kind =
2573  DecodeWord32<SharedFunctionInfo::FunctionKindBits>(LoadObjectField(
2574  shared_function_info, SharedFunctionInfo::kFlagsOffset,
2575  MachineType::Uint32()));
2576 
2577  return TNode<BoolT>::UncheckedCast(Word32Or(
2578  Word32Or(
2579  Word32Or(
2580  Word32Equal(function_kind,
2581  Int32Constant(FunctionKind::kAsyncGeneratorFunction)),
2582  Word32Equal(
2583  function_kind,
2584  Int32Constant(FunctionKind::kAsyncConciseGeneratorMethod))),
2585  Word32Equal(function_kind,
2586  Int32Constant(FunctionKind::kGeneratorFunction))),
2587  Word32Equal(function_kind,
2588  Int32Constant(FunctionKind::kConciseGeneratorMethod))));
2589 }
2590 
2591 TNode<BoolT> CodeStubAssembler::HasPrototypeProperty(TNode<JSFunction> function,
2592  TNode<Map> map) {
2593  // (has_prototype_slot() && IsConstructor()) ||
2594  // IsGeneratorFunction(shared()->kind())
2595  uint32_t mask =
2596  Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
2597  return TNode<BoolT>::UncheckedCast(
2598  Word32Or(IsAllSetWord32(LoadMapBitField(map), mask),
2599  IsGeneratorFunction(function)));
2600 }
2601 
2602 void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2603  TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2604  // !has_prototype_property() || has_non_instance_prototype()
2605  GotoIfNot(HasPrototypeProperty(function, map), runtime);
2606  GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(LoadMapBitField(map)),
2607  runtime);
2608 }
2609 
2610 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2611  Label* if_bailout) {
2612  CSA_ASSERT(this, TaggedIsNotSmi(function));
2613  CSA_ASSERT(this, IsJSFunction(function));
2614  CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2615  CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2616  LoadMapBitField(LoadMap(function))));
2617  Node* proto_or_map =
2618  LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2619  GotoIf(IsTheHole(proto_or_map), if_bailout);
2620 
2621  VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2622  Label done(this, &var_result);
2623  GotoIfNot(IsMap(proto_or_map), &done);
2624 
2625  var_result.Bind(LoadMapPrototype(proto_or_map));
2626  Goto(&done);
2627 
2628  BIND(&done);
2629  return var_result.value();
2630 }
2631 
2632 TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2633  SloppyTNode<SharedFunctionInfo> shared) {
2634  Node* function_data =
2635  LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2636 
2637  VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2638  Label done(this, &var_result);
2639 
2640  GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2641  Node* bytecode_array =
2642  LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2643  var_result.Bind(bytecode_array);
2644  Goto(&done);
2645 
2646  BIND(&done);
2647  return CAST(var_result.value());
2648 }
2649 
2650 void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2651  int offset,
2652  TNode<Word32T> value) {
2653  StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2654  IntPtrConstant(offset - kHeapObjectTag), value);
2655 }
2656 
2657 void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2658  SloppyTNode<Float64T> value) {
2659  StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2660  MachineRepresentation::kFloat64);
2661 }
2662 
2663 void CodeStubAssembler::StoreMutableHeapNumberValue(
2664  SloppyTNode<MutableHeapNumber> object, SloppyTNode<Float64T> value) {
2665  StoreObjectFieldNoWriteBarrier(object, MutableHeapNumber::kValueOffset, value,
2666  MachineRepresentation::kFloat64);
2667 }
2668 
2669 Node* CodeStubAssembler::StoreObjectField(
2670  Node* object, int offset, Node* value) {
2671  DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
2672  return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
2673 }
2674 
2675 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2676  Node* value) {
2677  int const_offset;
2678  if (ToInt32Constant(offset, const_offset)) {
2679  return StoreObjectField(object, const_offset, value);
2680  }
2681  return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
2682  value);
2683 }
2684 
2685 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2686  Node* object, int offset, Node* value, MachineRepresentation rep) {
2687  return StoreNoWriteBarrier(rep, object,
2688  IntPtrConstant(offset - kHeapObjectTag), value);
2689 }
2690 
2691 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2692  Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2693  int const_offset;
2694  if (ToInt32Constant(offset, const_offset)) {
2695  return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2696  }
2697  return StoreNoWriteBarrier(
2698  rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2699 }
2700 
2701 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
2702  CSA_SLOW_ASSERT(this, IsMap(map));
2703  return StoreWithMapWriteBarrier(
2704  object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
2705 }
2706 
2707 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object,
2708  RootIndex map_root_index) {
2709  return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2710 }
2711 
2712 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2713  CSA_SLOW_ASSERT(this, IsMap(map));
2714  return StoreNoWriteBarrier(
2715  MachineRepresentation::kTagged, object,
2716  IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
2717 }
2718 
2719 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2720  RootIndex root_index) {
2721  if (RootsTable::IsImmortalImmovable(root_index)) {
2722  return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2723  } else {
2724  return StoreObjectField(object, offset, LoadRoot(root_index));
2725  }
2726 }
2727 
2728 Node* CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2729  TNode<Smi> length) {
2730  return StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2731 }
2732 
2733 Node* CodeStubAssembler::StoreElements(TNode<Object> object,
2734  TNode<FixedArrayBase> elements) {
2735  return StoreObjectField(object, JSObject::kElementsOffset, elements);
2736 }
2737 
2738 void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2739  Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
2740  int additional_offset, ParameterMode parameter_mode) {
2741  CSA_SLOW_ASSERT(
2742  this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
2743  CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2744  DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2745  barrier_mode == UPDATE_WRITE_BARRIER);
2746  DCHECK_EQ(additional_offset % kPointerSize, 0);
2747  STATIC_ASSERT(FixedArray::kHeaderSize == PropertyArray::kHeaderSize);
2748  int header_size =
2749  FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2750  Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2751  parameter_mode, header_size);
2752  STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2753  static_cast<int>(WeakFixedArray::kLengthOffset));
2754  STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2755  static_cast<int>(PropertyArray::kLengthAndHashOffset));
2756  // Check that index_node + additional_offset <= object.length.
2757  // TODO(cbruni): Use proper LoadXXLength helpers
2758  CSA_ASSERT(
2759  this,
2760  IsOffsetInBounds(
2761  offset,
2762  Select<IntPtrT>(
2763  IsPropertyArray(object),
2764  [=] {
2765  TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2766  object, PropertyArray::kLengthAndHashOffset);
2767  return TNode<IntPtrT>::UncheckedCast(
2768  DecodeWord<PropertyArray::LengthField>(length_and_hash));
2769  },
2770  [=] {
2771  return LoadAndUntagObjectField(object,
2772  FixedArrayBase::kLengthOffset);
2773  }),
2774  FixedArray::kHeaderSize));
2775  if (barrier_mode == SKIP_WRITE_BARRIER) {
2776  StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2777  } else {
2778  Store(object, offset, value);
2779  }
2780 }
2781 
2782 void CodeStubAssembler::StoreFixedDoubleArrayElement(
2783  TNode<FixedDoubleArray> object, Node* index_node, TNode<Float64T> value,
2784  ParameterMode parameter_mode) {
2785  CSA_ASSERT(this, IsFixedDoubleArray(object));
2786  CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2787  FixedArrayBoundsCheck(object, index_node, 0, parameter_mode);
2788  Node* offset =
2789  ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2790  FixedArray::kHeaderSize - kHeapObjectTag);
2791  MachineRepresentation rep = MachineRepresentation::kFloat64;
2792  StoreNoWriteBarrier(rep, object, offset, value);
2793 }
2794 
2795 Node* CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2796  Node* slot_index_node,
2797  Node* value,
2798  WriteBarrierMode barrier_mode,
2799  int additional_offset,
2800  ParameterMode parameter_mode) {
2801  CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2802  CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2803  DCHECK_EQ(additional_offset % kPointerSize, 0);
2804  DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2805  barrier_mode == UPDATE_WRITE_BARRIER);
2806  int header_size =
2807  FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2808  Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2809  parameter_mode, header_size);
2810  // Check that slot_index_node <= object.length.
2811  CSA_ASSERT(this,
2812  IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2813  FeedbackVector::kHeaderSize));
2814  if (barrier_mode == SKIP_WRITE_BARRIER) {
2815  return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
2816  value);
2817  } else {
2818  return Store(object, offset, value);
2819  }
2820 }
2821 
2822 void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2823  Label* bailout) {
2824  // Don't support arrays in dictionary named property mode.
2825  GotoIf(IsDictionaryMap(map), bailout);
2826 
2827  // Check whether the length property is writable. The length property is the
2828  // only default named property on arrays. It's nonconfigurable, hence is
2829  // guaranteed to stay the first property.
2830  TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2831 
2832  int length_index = JSArray::kLengthDescriptorIndex;
2833 #ifdef DEBUG
2834  TNode<Name> maybe_length =
2835  LoadKeyByDescriptorEntry(descriptors, length_index);
2836  CSA_ASSERT(this,
2837  WordEqual(maybe_length, LoadRoot(RootIndex::klength_string)));
2838 #endif
2839 
2840  TNode<Uint32T> details =
2841  LoadDetailsByDescriptorEntry(descriptors, length_index);
2842  GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2843  bailout);
2844 }
2845 
2846 TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2847  Label* bailout) {
2848  // Disallow pushing onto prototypes. It might be the JSArray prototype.
2849  // Disallow pushing onto non-extensible objects.
2850  Comment("Disallow pushing onto prototypes");
2851  Node* bit_field2 = LoadMapBitField2(map);
2852  int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2853  Node* test = Word32And(bit_field2, Int32Constant(mask));
2854  GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2855  bailout);
2856 
2857  EnsureArrayLengthWritable(map, bailout);
2858 
2859  TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2860  return Signed(kind);
2861 }
2862 
2863 void CodeStubAssembler::PossiblyGrowElementsCapacity(
2864  ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2865  Variable* var_elements, Node* growth, Label* bailout) {
2866  Label fits(this, var_elements);
2867  Node* capacity =
2868  TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2869  // length and growth nodes are already in a ParameterMode appropriate
2870  // representation.
2871  Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2872  GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2873  Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2874  var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2875  kind, capacity, new_capacity, mode,
2876  bailout));
2877  Goto(&fits);
2878  BIND(&fits);
2879 }
2880 
2881 TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2882  SloppyTNode<JSArray> array,
2883  CodeStubArguments* args,
2884  TVariable<IntPtrT>* arg_index,
2885  Label* bailout) {
2886  CSA_SLOW_ASSERT(this, IsJSArray(array));
2887  Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
2888  Label pre_bailout(this);
2889  Label success(this);
2890  TVARIABLE(Smi, var_tagged_length);
2891  ParameterMode mode = OptimalParameterMode();
2892  VARIABLE(var_length, OptimalParameterRepresentation(),
2893  TaggedToParameter(LoadFastJSArrayLength(array), mode));
2894  VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2895 
2896  // Resize the capacity of the fixed array if it doesn't fit.
2897  TNode<IntPtrT> first = arg_index->value();
2898  Node* growth = IntPtrToParameter(
2899  IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2900  first),
2901  mode);
2902  PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2903  &var_elements, growth, &pre_bailout);
2904 
2905  // Push each argument onto the end of the array now that there is enough
2906  // capacity.
2907  CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2908  Node* elements = var_elements.value();
2909  args->ForEach(
2910  push_vars,
2911  [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2912  TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2913  var_length.value(), arg);
2914  Increment(&var_length, 1, mode);
2915  },
2916  first, nullptr);
2917  {
2918  TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2919  var_tagged_length = length;
2920  StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2921  Goto(&success);
2922  }
2923 
2924  BIND(&pre_bailout);
2925  {
2926  TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2927  var_tagged_length = length;
2928  Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2929  StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2930  *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2931  Goto(bailout);
2932  }
2933 
2934  BIND(&success);
2935  return var_tagged_length.value();
2936 }
2937 
2938 void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2939  ParameterMode mode, Label* bailout,
2940  Node* elements, Node* index,
2941  Node* value) {
2942  if (IsSmiElementsKind(kind)) {
2943  GotoIf(TaggedIsNotSmi(value), bailout);
2944  } else if (IsDoubleElementsKind(kind)) {
2945  GotoIfNotNumber(value, bailout);
2946  }
2947  if (IsDoubleElementsKind(kind)) value = ChangeNumberToFloat64(value);
2948  StoreElement(elements, kind, index, value, mode);
2949 }
2950 
2951 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
2952  Node* value, Label* bailout) {
2953  CSA_SLOW_ASSERT(this, IsJSArray(array));
2954  Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
2955  ParameterMode mode = OptimalParameterMode();
2956  VARIABLE(var_length, OptimalParameterRepresentation(),
2957  TaggedToParameter(LoadFastJSArrayLength(array), mode));
2958  VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2959 
2960  // Resize the capacity of the fixed array if it doesn't fit.
2961  Node* growth = IntPtrOrSmiConstant(1, mode);
2962  PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2963  &var_elements, growth, bailout);
2964 
2965  // Push each argument onto the end of the array now that there is enough
2966  // capacity.
2967  TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
2968  var_length.value(), value);
2969  Increment(&var_length, 1, mode);
2970 
2971  Node* length = ParameterToTagged(var_length.value(), mode);
2972  StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2973 }
2974 
2975 Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
2976  WriteBarrierMode mode) {
2977  Node* result = Allocate(Cell::kSize, kNone);
2978  StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
2979  StoreCellValue(result, value, mode);
2980  return result;
2981 }
2982 
2983 Node* CodeStubAssembler::LoadCellValue(Node* cell) {
2984  CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2985  return LoadObjectField(cell, Cell::kValueOffset);
2986 }
2987 
2988 Node* CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
2989  WriteBarrierMode mode) {
2990  CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2991  DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
2992 
2993  if (mode == UPDATE_WRITE_BARRIER) {
2994  return StoreObjectField(cell, Cell::kValueOffset, value);
2995  } else {
2996  return StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
2997  }
2998 }
2999 
3000 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3001  Node* result = Allocate(HeapNumber::kSize, kNone);
3002  RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3003  StoreMapNoWriteBarrier(result, heap_map_index);
3004  return UncheckedCast<HeapNumber>(result);
3005 }
3006 
3007 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3008  SloppyTNode<Float64T> value) {
3009  TNode<HeapNumber> result = AllocateHeapNumber();
3010  StoreHeapNumberValue(result, value);
3011  return result;
3012 }
3013 
3014 TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
3015  Node* result = Allocate(MutableHeapNumber::kSize, kNone);
3016  RootIndex heap_map_index = RootIndex::kMutableHeapNumberMap;
3017  StoreMapNoWriteBarrier(result, heap_map_index);
3018  return UncheckedCast<MutableHeapNumber>(result);
3019 }
3020 
3021 TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3022  TVARIABLE(Object, result, object);
3023  Label done(this);
3024 
3025  GotoIf(TaggedIsSmi(object), &done);
3026  GotoIfNot(IsMutableHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3027  {
3028  // Mutable heap number found --- allocate a clone.
3029  TNode<Float64T> value =
3030  LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3031  result = AllocateMutableHeapNumberWithValue(value);
3032  Goto(&done);
3033  }
3034 
3035  BIND(&done);
3036  return result.value();
3037 }
3038 
3039 TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumberWithValue(
3040  SloppyTNode<Float64T> value) {
3041  TNode<MutableHeapNumber> result = AllocateMutableHeapNumber();
3042  StoreMutableHeapNumberValue(result, value);
3043  return result;
3044 }
3045 
3046 TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3047  TNode<BigInt> result = AllocateRawBigInt(length);
3048  StoreBigIntBitfield(result, WordShl(length, BigInt::LengthBits::kShift));
3049  return result;
3050 }
3051 
3052 TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3053  // This is currently used only for 64-bit wide BigInts. If more general
3054  // applicability is required, a large-object check must be added.
3055  CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
3056 
3057  TNode<IntPtrT> size = IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3058  Signed(WordShl(length, kPointerSizeLog2)));
3059  Node* raw_result = Allocate(size, kNone);
3060  StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3061  return UncheckedCast<BigInt>(raw_result);
3062 }
3063 
3064 void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3065  TNode<WordT> bitfield) {
3066  StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
3067  MachineType::PointerRepresentation());
3068 }
3069 
3070 void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
3071  TNode<UintPtrT> digit) {
3072  StoreObjectFieldNoWriteBarrier(
3073  bigint, BigInt::kDigitsOffset + digit_index * kPointerSize, digit,
3074  UintPtrT::kMachineRepresentation);
3075 }
3076 
3077 TNode<WordT> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3078  return UncheckedCast<WordT>(
3079  LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::UintPtr()));
3080 }
3081 
3082 TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3083  int digit_index) {
3084  return UncheckedCast<UintPtrT>(LoadObjectField(
3085  bigint, BigInt::kDigitsOffset + digit_index * kPointerSize,
3086  MachineType::UintPtr()));
3087 }
3088 
3089 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3090  uint32_t length, AllocationFlags flags) {
3091  Comment("AllocateSeqOneByteString");
3092  if (length == 0) {
3093  return CAST(LoadRoot(RootIndex::kempty_string));
3094  }
3095  Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
3096  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3097  StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3098  StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3099  Uint32Constant(length),
3100  MachineRepresentation::kWord32);
3101  StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3102  Int32Constant(String::kEmptyHashField),
3103  MachineRepresentation::kWord32);
3104  return CAST(result);
3105 }
3106 
3107 TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3108  return Select<BoolT>(WordEqual(object, SmiConstant(0)),
3109  [=] { return Int32TrueConstant(); },
3110  [=] { return IsContext(CAST(object)); });
3111 }
3112 
3113 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3114  Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3115  Comment("AllocateSeqOneByteString");
3116  CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3117  VARIABLE(var_result, MachineRepresentation::kTagged);
3118 
3119  // Compute the SeqOneByteString size and check if it fits into new space.
3120  Label if_lengthiszero(this), if_sizeissmall(this),
3121  if_notsizeissmall(this, Label::kDeferred), if_join(this);
3122  GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3123 
3124  Node* raw_size = GetArrayAllocationSize(
3125  Signed(ChangeUint32ToWord(length)), UINT8_ELEMENTS, INTPTR_PARAMETERS,
3126  SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3127  TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3128  Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3129  &if_sizeissmall, &if_notsizeissmall);
3130 
3131  BIND(&if_sizeissmall);
3132  {
3133  // Just allocate the SeqOneByteString in new space.
3134  TNode<Object> result =
3135  AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3136  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3137  StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3138  StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3139  length, MachineRepresentation::kWord32);
3140  StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3141  Int32Constant(String::kEmptyHashField),
3142  MachineRepresentation::kWord32);
3143  var_result.Bind(result);
3144  Goto(&if_join);
3145  }
3146 
3147  BIND(&if_notsizeissmall);
3148  {
3149  // We might need to allocate in large object space, go to the runtime.
3150  Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
3151  ChangeUint32ToTagged(length));
3152  var_result.Bind(result);
3153  Goto(&if_join);
3154  }
3155 
3156  BIND(&if_lengthiszero);
3157  {
3158  var_result.Bind(LoadRoot(RootIndex::kempty_string));
3159  Goto(&if_join);
3160  }
3161 
3162  BIND(&if_join);
3163  return CAST(var_result.value());
3164 }
3165 
3166 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3167  uint32_t length, AllocationFlags flags) {
3168  Comment("AllocateSeqTwoByteString");
3169  if (length == 0) {
3170  return CAST(LoadRoot(RootIndex::kempty_string));
3171  }
3172  Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3173  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3174  StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3175  StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3176  Uint32Constant(length),
3177  MachineRepresentation::kWord32);
3178  StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3179  Int32Constant(String::kEmptyHashField),
3180  MachineRepresentation::kWord32);
3181  return CAST(result);
3182 }
3183 
3184 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3185  Node* context, TNode<Uint32T> length, AllocationFlags flags) {
3186  CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3187  Comment("AllocateSeqTwoByteString");
3188  VARIABLE(var_result, MachineRepresentation::kTagged);
3189 
3190  // Compute the SeqTwoByteString size and check if it fits into new space.
3191  Label if_lengthiszero(this), if_sizeissmall(this),
3192  if_notsizeissmall(this, Label::kDeferred), if_join(this);
3193  GotoIf(Word32Equal(length, Uint32Constant(0)), &if_lengthiszero);
3194 
3195  Node* raw_size = GetArrayAllocationSize(
3196  Signed(ChangeUint32ToWord(length)), UINT16_ELEMENTS, INTPTR_PARAMETERS,
3197  SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3198  TNode<WordT> size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3199  Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3200  &if_sizeissmall, &if_notsizeissmall);
3201 
3202  BIND(&if_sizeissmall);
3203  {
3204  // Just allocate the SeqTwoByteString in new space.
3205  TNode<Object> result =
3206  AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3207  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3208  StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3209  StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3210  length, MachineRepresentation::kWord32);
3211  StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3212  Int32Constant(String::kEmptyHashField),
3213  MachineRepresentation::kWord32);
3214  var_result.Bind(result);
3215  Goto(&if_join);
3216  }
3217 
3218  BIND(&if_notsizeissmall);
3219  {
3220  // We might need to allocate in large object space, go to the runtime.
3221  Node* result = CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
3222  ChangeUint32ToTagged(length));
3223  var_result.Bind(result);
3224  Goto(&if_join);
3225  }
3226 
3227  BIND(&if_lengthiszero);
3228  {
3229  var_result.Bind(LoadRoot(RootIndex::kempty_string));
3230  Goto(&if_join);
3231  }
3232 
3233  BIND(&if_join);
3234  return CAST(var_result.value());
3235 }
3236 
3237 TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3238  TNode<Uint32T> length,
3239  TNode<String> parent,
3240  TNode<Smi> offset) {
3241  DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
3242  map_root_index == RootIndex::kSlicedStringMap);
3243  Node* result = Allocate(SlicedString::kSize);
3244  DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3245  StoreMapNoWriteBarrier(result, map_root_index);
3246  StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
3247  Int32Constant(String::kEmptyHashField),
3248  MachineRepresentation::kWord32);
3249  StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
3250  MachineRepresentation::kWord32);
3251  StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
3252  MachineRepresentation::kTagged);
3253  StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
3254  MachineRepresentation::kTagged);
3255  return CAST(result);
3256 }
3257 
3258 TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3259  TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3260  return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3261  parent, offset);
3262 }
3263 
3264 TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3265  TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3266  return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3267  offset);
3268 }
3269 
3270 TNode<String> CodeStubAssembler::AllocateConsString(RootIndex map_root_index,
3271  TNode<Uint32T> length,
3272  TNode<String> first,
3273  TNode<String> second,
3274  AllocationFlags flags) {
3275  DCHECK(map_root_index == RootIndex::kConsOneByteStringMap ||
3276  map_root_index == RootIndex::kConsStringMap);
3277  Node* result = Allocate(ConsString::kSize, flags);
3278  DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3279  StoreMapNoWriteBarrier(result, map_root_index);
3280  StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
3281  MachineRepresentation::kWord32);
3282  StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
3283  Int32Constant(String::kEmptyHashField),
3284  MachineRepresentation::kWord32);
3285  bool const new_space = !(flags & kPretenured);
3286  if (new_space) {
3287  StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
3288  MachineRepresentation::kTagged);
3289  StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
3290  MachineRepresentation::kTagged);
3291  } else {
3292  StoreObjectField(result, ConsString::kFirstOffset, first);
3293  StoreObjectField(result, ConsString::kSecondOffset, second);
3294  }
3295  return CAST(result);
3296 }
3297 
3298 TNode<String> CodeStubAssembler::AllocateOneByteConsString(
3299  TNode<Uint32T> length, TNode<String> first, TNode<String> second,
3300  AllocationFlags flags) {
3301  return AllocateConsString(RootIndex::kConsOneByteStringMap, length, first,
3302  second, flags);
3303 }
3304 
3305 TNode<String> CodeStubAssembler::AllocateTwoByteConsString(
3306  TNode<Uint32T> length, TNode<String> first, TNode<String> second,
3307  AllocationFlags flags) {
3308  return AllocateConsString(RootIndex::kConsStringMap, length, first, second,
3309  flags);
3310 }
3311 
3312 TNode<String> CodeStubAssembler::NewConsString(TNode<Uint32T> length,
3313  TNode<String> left,
3314  TNode<String> right,
3315  AllocationFlags flags) {
3316  // Added string can be a cons string.
3317  Comment("Allocating ConsString");
3318  Node* left_instance_type = LoadInstanceType(left);
3319  Node* right_instance_type = LoadInstanceType(right);
3320 
3321  // Compute intersection and difference of instance types.
3322  Node* anded_instance_types =
3323  Word32And(left_instance_type, right_instance_type);
3324  Node* xored_instance_types =
3325  Word32Xor(left_instance_type, right_instance_type);
3326 
3327  // We create a one-byte cons string if
3328  // 1. both strings are one-byte, or
3329  // 2. at least one of the strings is two-byte, but happens to contain only
3330  // one-byte characters.
3331  // To do this, we check
3332  // 1. if both strings are one-byte, or if the one-byte data hint is set in
3333  // both strings, or
3334  // 2. if one of the strings has the one-byte data hint set and the other
3335  // string is one-byte.
3336  STATIC_ASSERT(kOneByteStringTag != 0);
3337  STATIC_ASSERT(kOneByteDataHintTag != 0);
3338  Label one_byte_map(this);
3339  Label two_byte_map(this);
3340  TVARIABLE(String, result);
3341  Label done(this, &result);
3342  GotoIf(IsSetWord32(anded_instance_types,
3343  kStringEncodingMask | kOneByteDataHintTag),
3344  &one_byte_map);
3345  Branch(Word32NotEqual(Word32And(xored_instance_types,
3346  Int32Constant(kStringEncodingMask |
3347  kOneByteDataHintMask)),
3348  Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
3349  &two_byte_map, &one_byte_map);
3350 
3351  BIND(&one_byte_map);
3352  Comment("One-byte ConsString");
3353  result = AllocateOneByteConsString(length, left, right, flags);
3354  Goto(&done);
3355 
3356  BIND(&two_byte_map);
3357  Comment("Two-byte ConsString");
3358  result = AllocateTwoByteConsString(length, left, right, flags);
3359  Goto(&done);
3360 
3361  BIND(&done);
3362 
3363  return result.value();
3364 }
3365 
3366 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3367  int at_least_space_for) {
3368  return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3369 }
3370 
3371 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3372  TNode<IntPtrT> at_least_space_for) {
3373  CSA_ASSERT(this, UintPtrLessThanOrEqual(
3374  at_least_space_for,
3375  IntPtrConstant(NameDictionary::kMaxCapacity)));
3376  TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3377  return AllocateNameDictionaryWithCapacity(capacity);
3378 }
3379 
3380 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3381  TNode<IntPtrT> capacity) {
3382  CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3383  CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3384  TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3385  TNode<IntPtrT> store_size = IntPtrAdd(
3386  TimesPointerSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3387 
3388  TNode<NameDictionary> result =
3389  UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
3390  Comment("Initialize NameDictionary");
3391  // Initialize FixedArray fields.
3392  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
3393  StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3394  StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3395  SmiFromIntPtr(length));
3396  // Initialized HashTable fields.
3397  TNode<Smi> zero = SmiConstant(0);
3398  StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3399  SKIP_WRITE_BARRIER);
3400  StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3401  zero, SKIP_WRITE_BARRIER);
3402  StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3403  SmiTag(capacity), SKIP_WRITE_BARRIER);
3404  // Initialize Dictionary fields.
3405  TNode<HeapObject> filler = UndefinedConstant();
3406  StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3407  SmiConstant(PropertyDetails::kInitialIndex),
3408  SKIP_WRITE_BARRIER);
3409  StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3410  SmiConstant(PropertyArray::kNoHashSentinel),
3411  SKIP_WRITE_BARRIER);
3412 
3413  // Initialize NameDictionary elements.
3414  TNode<WordT> result_word = BitcastTaggedToWord(result);
3415  TNode<WordT> start_address = IntPtrAdd(
3416  result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3417  NameDictionary::kElementsStartIndex) -
3418  kHeapObjectTag));
3419  TNode<WordT> end_address = IntPtrAdd(
3420  result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3421  StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3422  return result;
3423 }
3424 
3425 TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3426  TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3427  Comment("Copy boilerplate property dict");
3428  TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3429  CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3430  GotoIf(UintPtrGreaterThan(
3431  capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3432  large_object_fallback);
3433  TNode<NameDictionary> properties =
3434  AllocateNameDictionaryWithCapacity(capacity);
3435  TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3436  CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3437  SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3438  return properties;
3439 }
3440 
3441 template <typename CollectionType>
3442 Node* CodeStubAssembler::AllocateOrderedHashTable() {
3443  static const int kCapacity = CollectionType::kMinCapacity;
3444  static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3445  static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3446  static const int kFixedArrayLength =
3447  CollectionType::kHashTableStartIndex + kBucketCount + kDataTableLength;
3448  static const int kDataTableStartIndex =
3449  CollectionType::kHashTableStartIndex + kBucketCount;
3450 
3451  STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3452  STATIC_ASSERT(kCapacity <= CollectionType::kMaxCapacity);
3453 
3454  // Allocate the table and add the proper map.
3455  const ElementsKind elements_kind = HOLEY_ELEMENTS;
3456  TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3457  TNode<Map> fixed_array_map =
3458  CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3459  TNode<FixedArray> table =
3460  CAST(AllocateFixedArray(elements_kind, length_intptr,
3461  kAllowLargeObjectAllocation, fixed_array_map));
3462 
3463  // Initialize the OrderedHashTable fields.
3464  const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3465  StoreFixedArrayElement(table, CollectionType::kNumberOfElementsIndex,
3466  SmiConstant(0), barrier_mode);
3467  StoreFixedArrayElement(table, CollectionType::kNumberOfDeletedElementsIndex,
3468  SmiConstant(0), barrier_mode);
3469  StoreFixedArrayElement(table, CollectionType::kNumberOfBucketsIndex,
3470  SmiConstant(kBucketCount), barrier_mode);
3471 
3472  // Fill the buckets with kNotFound.
3473  TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3474  STATIC_ASSERT(CollectionType::kHashTableStartIndex ==
3475  CollectionType::kNumberOfBucketsIndex + 1);
3476  STATIC_ASSERT((CollectionType::kHashTableStartIndex + kBucketCount) ==
3477  kDataTableStartIndex);
3478  for (int i = 0; i < kBucketCount; i++) {
3479  StoreFixedArrayElement(table, CollectionType::kHashTableStartIndex + i,
3480  not_found, barrier_mode);
3481  }
3482 
3483  // Fill the data table with undefined.
3484  STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3485  for (int i = 0; i < kDataTableLength; i++) {
3486  StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3487  barrier_mode);
3488  }
3489 
3490  return table;
3491 }
3492 
3493 template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3494 template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3495 
3496 template <typename CollectionType>
3497 TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
3498  TNode<IntPtrT> capacity) {
3499  CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3500  CSA_ASSERT(this, IntPtrLessThan(
3501  capacity, IntPtrConstant(CollectionType::kMaxCapacity)));
3502 
3503  TNode<IntPtrT> data_table_start_offset =
3504  IntPtrConstant(CollectionType::kDataTableStartOffset);
3505 
3506  TNode<IntPtrT> data_table_size = IntPtrMul(
3507  capacity, IntPtrConstant(CollectionType::kEntrySize * kPointerSize));
3508 
3509  TNode<Int32T> hash_table_size =
3510  Int32Div(TruncateIntPtrToInt32(capacity),
3511  Int32Constant(CollectionType::kLoadFactor));
3512 
3513  TNode<IntPtrT> hash_table_start_offset =
3514  IntPtrAdd(data_table_start_offset, data_table_size);
3515 
3516  TNode<IntPtrT> hash_table_and_chain_table_size =
3517  IntPtrAdd(ChangeInt32ToIntPtr(hash_table_size), capacity);
3518 
3519  TNode<IntPtrT> total_size =
3520  IntPtrAdd(hash_table_start_offset, hash_table_and_chain_table_size);
3521 
3522  TNode<IntPtrT> total_size_word_aligned =
3523  IntPtrAdd(total_size, IntPtrConstant(kPointerSize - 1));
3524  total_size_word_aligned = ChangeInt32ToIntPtr(
3525  Int32Div(TruncateIntPtrToInt32(total_size_word_aligned),
3526  Int32Constant(kPointerSize)));
3527  total_size_word_aligned =
3528  UncheckedCast<IntPtrT>(TimesPointerSize(total_size_word_aligned));
3529 
3530  // Allocate the table and add the proper map.
3531  TNode<Map> small_ordered_hash_map =
3532  CAST(LoadRoot(CollectionType::GetMapRootIndex()));
3533  TNode<Object> table_obj = AllocateInNewSpace(total_size_word_aligned);
3534  StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
3535  TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
3536 
3537  // Initialize the SmallOrderedHashTable fields.
3538  StoreObjectByteNoWriteBarrier(
3539  table, CollectionType::kNumberOfBucketsOffset,
3540  Word32And(Int32Constant(0xFF), hash_table_size));
3541  StoreObjectByteNoWriteBarrier(table, CollectionType::kNumberOfElementsOffset,
3542  Int32Constant(0));
3543  StoreObjectByteNoWriteBarrier(
3544  table, CollectionType::kNumberOfDeletedElementsOffset, Int32Constant(0));
3545 
3546  TNode<IntPtrT> table_address =
3547  IntPtrSub(BitcastTaggedToWord(table), IntPtrConstant(kHeapObjectTag));
3548  TNode<IntPtrT> hash_table_start_address =
3549  IntPtrAdd(table_address, hash_table_start_offset);
3550 
3551  // Initialize the HashTable part.
3552  Node* memset = ExternalConstant(ExternalReference::libc_memset_function());
3553  CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
3554  MachineType::IntPtr(), MachineType::UintPtr(), memset,
3555  hash_table_start_address, IntPtrConstant(0xFF),
3556  hash_table_and_chain_table_size);
3557 
3558  // Initialize the DataTable part.
3559  TNode<HeapObject> filler = TheHoleConstant();
3560  TNode<WordT> data_table_start_address =
3561  IntPtrAdd(table_address, data_table_start_offset);
3562  TNode<WordT> data_table_end_address =
3563  IntPtrAdd(data_table_start_address, data_table_size);
3564  StoreFieldsNoWriteBarrier(data_table_start_address, data_table_end_address,
3565  filler);
3566 
3567  return table;
3568 }
3569 
3570 template TNode<SmallOrderedHashMap>
3571 CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
3572  TNode<IntPtrT> capacity);
3573 template TNode<SmallOrderedHashSet>
3574 CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
3575  TNode<IntPtrT> capacity);
3576 
3577 template <typename CollectionType>
3578 void CodeStubAssembler::FindOrderedHashTableEntry(
3579  Node* table, Node* hash,
3580  const std::function<void(Node*, Label*, Label*)>& key_compare,
3581  Variable* entry_start_position, Label* entry_found, Label* not_found) {
3582  // Get the index of the bucket.
3583  Node* const number_of_buckets = SmiUntag(CAST(LoadFixedArrayElement(
3584  CAST(table), CollectionType::kNumberOfBucketsIndex)));
3585  Node* const bucket =
3586  WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3587  Node* const first_entry = SmiUntag(CAST(LoadFixedArrayElement(
3588  CAST(table), bucket,
3589  CollectionType::kHashTableStartIndex * kPointerSize)));
3590 
3591  // Walk the bucket chain.
3592  Node* entry_start;
3593  Label if_key_found(this);
3594  {
3595  VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3596  Label loop(this, {&var_entry, entry_start_position}),
3597  continue_next_entry(this);
3598  Goto(&loop);
3599  BIND(&loop);
3600 
3601  // If the entry index is the not-found sentinel, we are done.
3602  GotoIf(
3603  WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3604  not_found);
3605 
3606  // Make sure the entry index is within range.
3607  CSA_ASSERT(
3608  this, UintPtrLessThan(
3609  var_entry.value(),
3610  SmiUntag(SmiAdd(
3611  CAST(LoadFixedArrayElement(
3612  CAST(table), CollectionType::kNumberOfElementsIndex)),
3613  CAST(LoadFixedArrayElement(
3614  CAST(table),
3615  CollectionType::kNumberOfDeletedElementsIndex))))));
3616 
3617  // Compute the index of the entry relative to kHashTableStartIndex.
3618  entry_start =
3619  IntPtrAdd(IntPtrMul(var_entry.value(),
3620  IntPtrConstant(CollectionType::kEntrySize)),
3621  number_of_buckets);
3622 
3623  // Load the key from the entry.
3624  Node* const candidate_key = LoadFixedArrayElement(
3625  CAST(table), entry_start,
3626  CollectionType::kHashTableStartIndex * kPointerSize);
3627 
3628  key_compare(candidate_key, &if_key_found, &continue_next_entry);
3629 
3630  BIND(&continue_next_entry);
3631  // Load the index of the next entry in the bucket chain.
3632  var_entry.Bind(SmiUntag(CAST(LoadFixedArrayElement(
3633  CAST(table), entry_start,
3634  (CollectionType::kHashTableStartIndex + CollectionType::kChainOffset) *
3635  kPointerSize))));
3636 
3637  Goto(&loop);
3638  }
3639 
3640  BIND(&if_key_found);
3641  entry_start_position->Bind(entry_start);
3642  Goto(entry_found);
3643 }
3644 
3645 template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3646  Node* table, Node* hash,
3647  const std::function<void(Node*, Label*, Label*)>& key_compare,
3648  Variable* entry_start_position, Label* entry_found, Label* not_found);
3649 template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3650  Node* table, Node* hash,
3651  const std::function<void(Node*, Label*, Label*)>& key_compare,
3652  Variable* entry_start_position, Label* entry_found, Label* not_found);
3653 
3654 Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3655  Comment("AllocateStruct");
3656  CSA_ASSERT(this, IsMap(map));
3657  TNode<IntPtrT> size = TimesPointerSize(LoadMapInstanceSizeInWords(map));
3658  TNode<Object> object = Allocate(size, flags);
3659  StoreMapNoWriteBarrier(object, map);
3660  InitializeStructBody(object, map, size, Struct::kHeaderSize);
3661  return object;
3662 }
3663 
3664 void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3665  Node* size, int start_offset) {
3666  CSA_SLOW_ASSERT(this, IsMap(map));
3667  Comment("InitializeStructBody");
3668  Node* filler = UndefinedConstant();
3669  // Calculate the untagged field addresses.
3670  object = BitcastTaggedToWord(object);
3671  Node* start_address =
3672  IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3673  Node* end_address =
3674  IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3675  StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3676 }
3677 
3678 Node* CodeStubAssembler::AllocateJSObjectFromMap(
3679  Node* map, Node* properties, Node* elements, AllocationFlags flags,
3680  SlackTrackingMode slack_tracking_mode) {
3681  CSA_ASSERT(this, IsMap(map));
3682  CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3683  CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3684  JS_GLOBAL_OBJECT_TYPE)));
3685  TNode<IntPtrT> instance_size =
3686  TimesPointerSize(LoadMapInstanceSizeInWords(map));
3687  TNode<Object> object = AllocateInNewSpace(instance_size, flags);
3688  StoreMapNoWriteBarrier(object, map);
3689  InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3690  slack_tracking_mode);
3691  return object;
3692 }
3693 
3694 void CodeStubAssembler::InitializeJSObjectFromMap(
3695  Node* object, Node* map, Node* instance_size, Node* properties,
3696  Node* elements, SlackTrackingMode slack_tracking_mode) {
3697  CSA_SLOW_ASSERT(this, IsMap(map));
3698  // This helper assumes that the object is in new-space, as guarded by the
3699  // check in AllocatedJSObjectFromMap.
3700  if (properties == nullptr) {
3701  CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3702  StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3703  RootIndex::kEmptyFixedArray);
3704  } else {
3705  CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3706  IsNameDictionary(properties)),
3707  IsEmptyFixedArray(properties)));
3708  StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3709  properties);
3710  }
3711  if (elements == nullptr) {
3712  StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3713  RootIndex::kEmptyFixedArray);
3714  } else {
3715  CSA_ASSERT(this, IsFixedArray(elements));
3716  StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3717  }
3718  if (slack_tracking_mode == kNoSlackTracking) {
3719  InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3720  } else {
3721  DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3722  InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3723  }
3724 }
3725 
3726 void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3727  Node* object, Node* map, Node* instance_size, int start_offset) {
3728  STATIC_ASSERT(Map::kNoSlackTracking == 0);
3729  CSA_ASSERT(
3730  this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3731  InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3732  RootIndex::kUndefinedValue);
3733 }
3734 
3735 void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3736  Node* object, Node* map, Node* instance_size) {
3737  CSA_SLOW_ASSERT(this, IsMap(map));
3738  Comment("InitializeJSObjectBodyNoSlackTracking");
3739 
3740  // Perform in-object slack tracking if requested.
3741  int start_offset = JSObject::kHeaderSize;
3742  Node* bit_field3 = LoadMapBitField3(map);
3743  Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3744  STATIC_ASSERT(Map::kNoSlackTracking == 0);
3745  GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3746  &slack_tracking);
3747  Comment("No slack tracking");
3748  InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3749  Goto(&end);
3750 
3751  BIND(&slack_tracking);
3752  {
3753  Comment("Decrease construction counter");
3754  // Slack tracking is only done on initial maps.
3755  CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3756  STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3757  Node* new_bit_field3 = Int32Sub(
3758  bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3759  StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3760  MachineRepresentation::kWord32);
3761  STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3762 
3763  // The object still has in-object slack therefore the |unsed_or_unused|
3764  // field contain the "used" value.
3765  Node* used_size = TimesPointerSize(ChangeUint32ToWord(
3766  LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3767  MachineType::Uint8())));
3768 
3769  Comment("iInitialize filler fields");
3770  InitializeFieldsWithRoot(object, used_size, instance_size,
3771  RootIndex::kOnePointerFillerMap);
3772 
3773  Comment("Initialize undefined fields");
3774  InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3775  RootIndex::kUndefinedValue);
3776 
3777  STATIC_ASSERT(Map::kNoSlackTracking == 0);
3778  GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3779  &complete);
3780  Goto(&end);
3781  }
3782 
3783  // Finalize the instance size.
3784  BIND(&complete);
3785  {
3786  // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3787  // context.
3788  CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3789  NoContextConstant(), map);
3790  Goto(&end);
3791  }
3792 
3793  BIND(&end);
3794 }
3795 
3796 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3797  Node* end_address,
3798  Node* value) {
3799  Comment("StoreFieldsNoWriteBarrier");
3800  CSA_ASSERT(this, WordIsWordAligned(start_address));
3801  CSA_ASSERT(this, WordIsWordAligned(end_address));
3802  BuildFastLoop(start_address, end_address,
3803  [this, value](Node* current) {
3804  StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
3805  value);
3806  },
3807  kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3808 }
3809 
3810 TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3811  Node* capacity, ParameterMode capacity_mode) {
3812  return UncheckedCast<BoolT>(
3813  UintPtrLessThanOrEqual(ParameterToIntPtr(capacity, capacity_mode),
3814  IntPtrConstant(JSArray::kMaxFastArrayLength)));
3815 }
3816 
3817 TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
3818  TNode<Map> array_map, TNode<Smi> length, Node* allocation_site) {
3819  Comment("begin allocation of JSArray without elements");
3820  CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3821 
3822  int base_size = JSArray::kSize;
3823  if (allocation_site != nullptr) {
3824  base_size += AllocationMemento::kSize;
3825  }
3826 
3827  TNode<IntPtrT> size = IntPtrConstant(base_size);
3828  return AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3829 }
3830 
3831 std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
3832 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3833  ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
3834  Node* allocation_site, Node* capacity, ParameterMode capacity_mode,
3835  AllocationFlags allocation_flags) {
3836  Comment("begin allocation of JSArray with elements");
3837  CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
3838  CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3839 
3840  int base_size = JSArray::kSize;
3841  if (allocation_site != nullptr) base_size += AllocationMemento::kSize;
3842 
3843  const int elements_offset = base_size;
3844 
3845  // Compute space for elements
3846  base_size += FixedArray::kHeaderSize;
3847  TNode<IntPtrT> size =
3848  ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3849 
3850  TVARIABLE(JSArray, array);
3851  TVARIABLE(FixedArrayBase, elements);
3852 
3853  Label out(this);
3854 
3855  // For very large arrays in which the requested allocation exceeds the
3856  // maximal size of a regular heap object, we cannot use the allocation
3857  // folding trick. Instead, we first allocate the elements in large object
3858  // space, and then allocate the JSArray (and possibly the allocation memento)
3859  // in new space.
3860  if (allocation_flags & kAllowLargeObjectAllocation) {
3861  Label next(this);
3862  GotoIf(IsRegularHeapObjectSize(size), &next);
3863 
3864  CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity, capacity_mode));
3865 
3866  // Allocate and initialize the elements first. Full initialization is needed
3867  // because the upcoming JSArray allocation could trigger GC.
3868  elements =
3869  AllocateFixedArray(kind, capacity, capacity_mode, allocation_flags);
3870 
3871  if (IsDoubleElementsKind(kind)) {
3872  FillFixedDoubleArrayWithZero(CAST(elements.value()),
3873  ParameterToIntPtr(capacity, capacity_mode));
3874  } else {
3875  FillFixedArrayWithSmiZero(CAST(elements.value()),
3876  ParameterToIntPtr(capacity, capacity_mode));
3877  }
3878 
3879  // The JSArray and possibly allocation memento next. Note that
3880  // allocation_flags are *not* passed on here and the resulting JSArray will
3881  // always be in new space.
3882  array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3883  allocation_site);
3884  StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3885  elements.value());
3886 
3887  Goto(&out);
3888 
3889  BIND(&next);
3890  }
3891 
3892  // Fold all objects into a single new space allocation.
3893  array =
3894  AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3895  elements = UncheckedCast<FixedArrayBase>(
3896  InnerAllocate(array.value(), elements_offset));
3897 
3898  StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3899  elements.value());
3900 
3901  // Setup elements object.
3902  STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
3903  RootIndex elements_map_index = IsDoubleElementsKind(kind)
3904  ? RootIndex::kFixedDoubleArrayMap
3905  : RootIndex::kFixedArrayMap;
3906  DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
3907  StoreMapNoWriteBarrier(elements.value(), elements_map_index);
3908 
3909  TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3910  CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3911  StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
3912  capacity_smi);
3913  Goto(&out);
3914 
3915  BIND(&out);
3916  return {array.value(), elements.value()};
3917 }
3918 
3919 TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
3920  TNode<Map> array_map, TNode<Smi> length, Node* allocation_site,
3921  TNode<IntPtrT> size_in_bytes) {
3922  CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3923 
3924  // Allocate space for the JSArray and the elements FixedArray in one go.
3925  TNode<Object> array = AllocateInNewSpace(size_in_bytes);
3926 
3927  StoreMapNoWriteBarrier(array, array_map);
3928  StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3929  StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3930  RootIndex::kEmptyFixedArray);
3931 
3932  if (allocation_site != nullptr) {
3933  InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3934  allocation_site);
3935  }
3936 
3937  return CAST(array);
3938 }
3939 
3940 TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3941  ElementsKind kind, TNode<Map> array_map, Node* capacity, TNode<Smi> length,
3942  Node* allocation_site, ParameterMode capacity_mode,
3943  AllocationFlags allocation_flags) {
3944  CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3945  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3946 
3947  TNode<JSArray> array;
3948  TNode<FixedArrayBase> elements;
3949  int capacity_as_constant;
3950 
3951  if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3952  // Array is empty. Use the shared empty fixed array instead of allocating a
3953  // new one.
3954  array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3955  allocation_site);
3956  StoreObjectFieldRoot(array, JSArray::kElementsOffset,
3957  RootIndex::kEmptyFixedArray);
3958  } else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
3959  capacity_mode)) {
3960  CHECK_GT(capacity_as_constant, 0);
3961  // Allocate both array and elements object, and initialize the JSArray.
3962  std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3963  kind, array_map, length, allocation_site, capacity, capacity_mode,
3964  allocation_flags);
3965  // Fill in the elements with holes.
3966  FillFixedArrayWithValue(kind, elements,
3967  IntPtrOrSmiConstant(0, capacity_mode), capacity,
3968  RootIndex::kTheHoleValue, capacity_mode);
3969  } else {
3970  Label out(this), empty(this), nonempty(this);
3971  TVARIABLE(JSArray, var_array);
3972 
3973  Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3974  &empty, &nonempty);
3975 
3976  BIND(&empty);
3977  {
3978  // Array is empty. Use the shared empty fixed array instead of allocating
3979  // a new one.
3980  var_array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3981  allocation_site);
3982  StoreObjectFieldRoot(var_array.value(), JSArray::kElementsOffset,
3983  RootIndex::kEmptyFixedArray);
3984  Goto(&out);
3985  }
3986 
3987  BIND(&nonempty);
3988  {
3989  // Allocate both array and elements object, and initialize the JSArray.
3990  TNode<JSArray> array;
3991  std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3992  kind, array_map, length, allocation_site, capacity, capacity_mode,
3993  allocation_flags);
3994  var_array = array;
3995  // Fill in the elements with holes.
3996  FillFixedArrayWithValue(kind, elements,
3997  IntPtrOrSmiConstant(0, capacity_mode), capacity,
3998  RootIndex::kTheHoleValue, capacity_mode);
3999  Goto(&out);
4000  }
4001 
4002  BIND(&out);
4003  array = var_array.value();
4004  }
4005 
4006  return array;
4007 }
4008 
4009 Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
4010  Node* begin, Node* count,
4011  ParameterMode mode, Node* capacity,
4012  Node* allocation_site) {
4013  Node* original_array_map = LoadMap(array);
4014  Node* elements_kind = LoadMapElementsKind(original_array_map);
4015 
4016  // Use the cannonical map for the Array's ElementsKind
4017  Node* native_context = LoadNativeContext(context);
4018  TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
4019 
4020  Node* new_elements = ExtractFixedArray(
4021  LoadElements(array), begin, count, capacity,
4022  ExtractFixedArrayFlag::kAllFixedArrays, mode, nullptr, elements_kind);
4023 
4024  TNode<Object> result = AllocateUninitializedJSArrayWithoutElements(
4025  array_map, ParameterToTagged(count, mode), allocation_site);
4026  StoreObjectField(result, JSObject::kElementsOffset, new_elements);
4027  return result;
4028 }
4029 
4030 Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
4031  ParameterMode mode,
4032  Node* allocation_site,
4033  HoleConversionMode convert_holes) {
4034  // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
4035  // function is also used to copy boilerplates even when the no-elements
4036  // protector is invalid. This function should be renamed to reflect its uses.
4037  CSA_ASSERT(this, IsJSArray(array));
4038 
4039  Node* length = LoadJSArrayLength(array);
4040  Node* new_elements = nullptr;
4041  VARIABLE(var_new_elements, MachineRepresentation::kTagged);
4042  TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
4043 
4044  Label allocate_jsarray(this), holey_extract(this);
4045 
4046  bool need_conversion =
4047  convert_holes == HoleConversionMode::kConvertToUndefined;
4048  if (need_conversion) {
4049  // We need to take care of holes, if the array is of holey elements kind.
4050  GotoIf(IsHoleyFastElementsKind(var_elements_kind.value()), &holey_extract);
4051  }
4052 
4053  // Simple extraction that preserves holes.
4054  new_elements =
4055  ExtractFixedArray(LoadElements(array), IntPtrOrSmiConstant(0, mode),
4056  TaggedToParameter(length, mode), nullptr,
4057  ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode,
4058  nullptr, var_elements_kind.value());
4059  var_new_elements.Bind(new_elements);
4060  Goto(&allocate_jsarray);
4061 
4062  if (need_conversion) {
4063  BIND(&holey_extract);
4064  // Convert holes to undefined.
4065  TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
4066  // Copy |array|'s elements store. The copy will be compatible with the
4067  // original elements kind unless there are holes in the source. Any holes
4068  // get converted to undefined, hence in that case the copy is compatible
4069  // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
4070  // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
4071  // ExtractFixedArrayFlag::kDontCopyCOW.
4072  new_elements = ExtractFixedArray(
4073  LoadElements(array), IntPtrOrSmiConstant(0, mode),
4074  TaggedToParameter(length, mode), nullptr,
4075  ExtractFixedArrayFlag::kAllFixedArrays, mode, &var_holes_converted);
4076  var_new_elements.Bind(new_elements);
4077  // If the array type didn't change, use the original elements kind.
4078  GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
4079  // Otherwise use PACKED_ELEMENTS for the target's elements kind.
4080  var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4081  Goto(&allocate_jsarray);
4082  }
4083 
4084  BIND(&allocate_jsarray);
4085  // Use the cannonical map for the chosen elements kind.
4086  Node* native_context = LoadNativeContext(context);
4087  TNode<Map> array_map =
4088  LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
4089 
4090  TNode<Object> result = AllocateUninitializedJSArrayWithoutElements(
4091  array_map, CAST(length), allocation_site);
4092  StoreObjectField(result, JSObject::kElementsOffset, var_new_elements.value());
4093  return result;
4094 }
4095 
4096 TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
4097  ElementsKind kind, Node* capacity, ParameterMode mode,
4098  AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
4099  Comment("AllocateFixedArray");
4100  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4101  CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
4102  IntPtrOrSmiConstant(0, mode), mode));
4103  TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
4104 
4105  if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
4106  // Allocate both array and elements object, and initialize the JSArray.
4107  Node* array = Allocate(total_size, flags);
4108  if (fixed_array_map != nullptr) {
4109  // Conservatively only skip the write barrier if there are no allocation
4110  // flags, this ensures that the object hasn't ended up in LOS. Note that the
4111  // fixed array map is currently always immortal and technically wouldn't
4112  // need the write barrier even in LOS, but it's better to not take chances
4113  // in case this invariant changes later, since it's difficult to enforce
4114  // locally here.
4115  if (flags == CodeStubAssembler::kNone) {
4116  StoreMapNoWriteBarrier(array, fixed_array_map);
4117  } else {
4118  StoreMap(array, fixed_array_map);
4119  }
4120  } else {
4121  RootIndex map_index = IsDoubleElementsKind(kind)
4122  ? RootIndex::kFixedDoubleArrayMap
4123  : RootIndex::kFixedArrayMap;
4124  DCHECK(RootsTable::IsImmortalImmovable(map_index));
4125  StoreMapNoWriteBarrier(array, map_index);
4126  }
4127  StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
4128  ParameterToTagged(capacity, mode));
4129  return UncheckedCast<FixedArray>(array);
4130 }
4131 
4132 TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
4133  Node* source, Node* first, Node* count, Node* capacity, Node* source_map,
4134  ElementsKind from_kind, AllocationFlags allocation_flags,
4135  ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4136  HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
4137  Node* source_elements_kind) {
4138  DCHECK_NE(first, nullptr);
4139  DCHECK_NE(count, nullptr);
4140  DCHECK_NE(capacity, nullptr);
4141  DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
4142  CSA_ASSERT(this,
4143  WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity));
4144  CSA_ASSERT(this, WordEqual(source_map, LoadMap(source)));
4145 
4146  VARIABLE(var_result, MachineRepresentation::kTagged);
4147  VARIABLE(var_target_map, MachineRepresentation::kTagged, source_map);
4148 
4149  Label done(this, {&var_result}), is_cow(this),
4150  new_space_check(this, {&var_target_map});
4151 
4152  // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
4153  // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
4154  // source_map as the target map.
4155  if (IsDoubleElementsKind(from_kind)) {
4156  CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4157  var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4158  Goto(&new_space_check);
4159  } else {
4160  CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
4161  Branch(WordEqual(var_target_map.value(),
4162  LoadRoot(RootIndex::kFixedCOWArrayMap)),
4163  &is_cow, &new_space_check);
4164 
4165  BIND(&is_cow);
4166  {
4167  // |source| is a COW array, so we don't actually need to allocate a new
4168  // array unless:
4169  // 1) |extract_flags| forces us to, or
4170  // 2) we're asked to extract only part of the |source| (|first| != 0).
4171  if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4172  Branch(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
4173  &new_space_check, [&] {
4174  var_result.Bind(source);
4175  Goto(&done);
4176  });
4177  } else {
4178  var_target_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
4179  Goto(&new_space_check);
4180  }
4181  }
4182  }
4183 
4184  BIND(&new_space_check);
4185  {
4186  bool handle_old_space = true;
4187  if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
4188  handle_old_space = false;
4189  CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
4190  count, FixedArray::kHeaderSize, parameter_mode)));
4191  } else {
4192  int constant_count;
4193  handle_old_space =
4194  !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
4195  parameter_mode) ||
4196  (constant_count >
4197  FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
4198  }
4199 
4200  Label old_space(this, Label::kDeferred);
4201  if (handle_old_space) {
4202  GotoIfFixedArraySizeDoesntFitInNewSpace(
4203  capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
4204  }
4205 
4206  Comment("Copy FixedArray new space");
4207  // We use PACKED_ELEMENTS to tell AllocateFixedArray and
4208  // CopyFixedArrayElements that we want a FixedArray.
4209  const ElementsKind to_kind = PACKED_ELEMENTS;
4210  TNode<FixedArrayBase> to_elements =
4211  AllocateFixedArray(to_kind, capacity, parameter_mode,
4212  AllocationFlag::kNone, var_target_map.value());
4213  var_result.Bind(to_elements);
4214 
4215  if (convert_holes == HoleConversionMode::kDontConvert &&
4216  !IsDoubleElementsKind(from_kind)) {
4217  // We can use CopyElements (memcpy) because we don't need to replace or
4218  // convert any values. Since {to_elements} is in new-space, CopyElements
4219  // will efficiently use memcpy.
4220  FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4221  RootIndex::kTheHoleValue, parameter_mode);
4222  CopyElements(to_kind, to_elements, IntPtrConstant(0), CAST(source),
4223  ParameterToIntPtr(first, parameter_mode),
4224  ParameterToIntPtr(count, parameter_mode),
4225  SKIP_WRITE_BARRIER);
4226  } else {
4227  CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4228  count, capacity, SKIP_WRITE_BARRIER,
4229  parameter_mode, convert_holes,
4230  var_holes_converted);
4231  }
4232  Goto(&done);
4233 
4234  if (handle_old_space) {
4235  BIND(&old_space);
4236  {
4237  Comment("Copy FixedArray old space");
4238  Label copy_one_by_one(this);
4239 
4240  // Try to use memcpy if we don't need to convert holes to undefined.
4241  if (convert_holes == HoleConversionMode::kDontConvert &&
4242  source_elements_kind != nullptr) {
4243  // Only try memcpy if we're not copying object pointers.
4244  GotoIfNot(IsFastSmiElementsKind(source_elements_kind),
4245  &copy_one_by_one);
4246 
4247  const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
4248  to_elements =
4249  AllocateFixedArray(to_smi_kind, capacity, parameter_mode,
4250  allocation_flags, var_target_map.value());
4251  var_result.Bind(to_elements);
4252 
4253  FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
4254  RootIndex::kTheHoleValue, parameter_mode);
4255  // CopyElements will try to use memcpy if it's not conflicting with
4256  // GC. Otherwise it will copy elements by elements, but skip write
4257  // barriers (since we're copying smis to smis).
4258  CopyElements(to_smi_kind, to_elements, IntPtrConstant(0),
4259  CAST(source), ParameterToIntPtr(first, parameter_mode),
4260  ParameterToIntPtr(count, parameter_mode),
4261  SKIP_WRITE_BARRIER);
4262  Goto(&done);
4263  } else {
4264  Goto(&copy_one_by_one);
4265  }
4266 
4267  BIND(&copy_one_by_one);
4268  {
4269  to_elements =
4270  AllocateFixedArray(to_kind, capacity, parameter_mode,
4271  allocation_flags, var_target_map.value());
4272  var_result.Bind(to_elements);
4273  CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4274  count, capacity, UPDATE_WRITE_BARRIER,
4275  parameter_mode, convert_holes,
4276  var_holes_converted);
4277  Goto(&done);
4278  }
4279  }
4280  }
4281  }
4282 
4283  BIND(&done);
4284  return UncheckedCast<FixedArray>(var_result.value());
4285 }
4286 
4287 TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4288  Node* from_array, Node* first, Node* count, Node* capacity,
4289  Node* fixed_array_map, TVariable<BoolT>* var_holes_converted,
4290  AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
4291  ParameterMode mode) {
4292  DCHECK_NE(first, nullptr);
4293  DCHECK_NE(count, nullptr);
4294  DCHECK_NE(capacity, nullptr);
4295  DCHECK_NE(var_holes_converted, nullptr);
4296  CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
4297 
4298  VARIABLE(var_result, MachineRepresentation::kTagged);
4299  const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4300  Node* to_elements = AllocateFixedArray(kind, capacity, mode, allocation_flags,
4301  fixed_array_map);
4302  var_result.Bind(to_elements);
4303  // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4304  // |var_holes_converted| is set to False preliminarily.
4305  *var_holes_converted = Int32FalseConstant();
4306 
4307  // The construction of the loop and the offsets for double elements is
4308  // extracted from CopyFixedArrayElements.
4309  CSA_SLOW_ASSERT(this, MatchesParameterMode(count, mode));
4310  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4311  CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, kind));
4312  STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4313 
4314  Comment("[ ExtractFixedDoubleArrayFillingHoles");
4315 
4316  // This copy can trigger GC, so we pre-initialize the array with holes.
4317  FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant(0, mode),
4318  capacity, RootIndex::kTheHoleValue, mode);
4319 
4320  const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4321  Node* first_from_element_offset =
4322  ElementOffsetFromIndex(first, kind, mode, 0);
4323  Node* limit_offset = IntPtrAdd(first_from_element_offset,
4324  IntPtrConstant(first_element_offset));
4325  VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
4326  ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count, mode), kind,
4327  mode, first_element_offset));
4328 
4329  Label decrement(this, {&var_from_offset}), done(this);
4330  Node* to_array_adjusted =
4331  IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4332 
4333  Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4334 
4335  BIND(&decrement);
4336  {
4337  Node* from_offset =
4338  IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4339  var_from_offset.Bind(from_offset);
4340 
4341  Node* to_offset = from_offset;
4342 
4343  Label if_hole(this);
4344 
4345  Node* value = LoadElementAndPrepareForStore(
4346  from_array, var_from_offset.value(), kind, kind, &if_hole);
4347 
4348  StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4349  to_offset, value);
4350 
4351  Node* compare = WordNotEqual(from_offset, limit_offset);
4352  Branch(compare, &decrement, &done);
4353 
4354  BIND(&if_hole);
4355  // We are unlucky: there are holes! We need to restart the copy, this time
4356  // we will copy the FixedDoubleArray to a new FixedArray with undefined
4357  // replacing holes. We signal this to the caller through
4358  // |var_holes_converted|.
4359  *var_holes_converted = Int32TrueConstant();
4360  to_elements =
4361  ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4362  kind, allocation_flags, extract_flags, mode,
4363  HoleConversionMode::kConvertToUndefined);
4364  var_result.Bind(to_elements);
4365  Goto(&done);
4366  }
4367 
4368  BIND(&done);
4369  Comment("] ExtractFixedDoubleArrayFillingHoles");
4370  return UncheckedCast<FixedArrayBase>(var_result.value());
4371 }
4372 
4373 TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4374  Node* source, Node* first, Node* count, Node* capacity,
4375  ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
4376  TVariable<BoolT>* var_holes_converted, Node* source_runtime_kind) {
4377  DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
4378  extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
4379  // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should not
4380  // be used, because that disables the iteration which detects holes.
4381  DCHECK_IMPLIES(var_holes_converted != nullptr,
4382  !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
4383  HoleConversionMode convert_holes =
4384  var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4385  : HoleConversionMode::kDontConvert;
4386  VARIABLE(var_result, MachineRepresentation::kTagged);
4387  const AllocationFlags allocation_flags =
4388  (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4389  ? CodeStubAssembler::kNone
4390  : CodeStubAssembler::kAllowLargeObjectAllocation;
4391  if (first == nullptr) {
4392  first = IntPtrOrSmiConstant(0, parameter_mode);
4393  }
4394  if (count == nullptr) {
4395  count = IntPtrOrSmiSub(
4396  TaggedToParameter(LoadFixedArrayBaseLength(source), parameter_mode),
4397  first, parameter_mode);
4398 
4399  CSA_ASSERT(
4400  this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
4401  count, parameter_mode));
4402  }
4403  if (capacity == nullptr) {
4404  capacity = count;
4405  } else {
4406  CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4407  IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
4408  parameter_mode)));
4409  }
4410 
4411  Label if_fixed_double_array(this), empty(this), done(this, {&var_result});
4412  Node* source_map = LoadMap(source);
4413  GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
4414 
4415  if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4416  if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4417  GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4418  } else {
4419  CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4420  }
4421  }
4422 
4423  if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4424  // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4425  // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4426  Node* to_elements = ExtractToFixedArray(
4427  source, first, count, capacity, source_map, PACKED_ELEMENTS,
4428  allocation_flags, extract_flags, parameter_mode, convert_holes,
4429  var_holes_converted, source_runtime_kind);
4430  var_result.Bind(to_elements);
4431  Goto(&done);
4432  }
4433 
4434  if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4435  BIND(&if_fixed_double_array);
4436  Comment("Copy FixedDoubleArray");
4437 
4438  if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4439  Node* to_elements = ExtractFixedDoubleArrayFillingHoles(
4440  source, first, count, capacity, source_map, var_holes_converted,
4441  allocation_flags, extract_flags, parameter_mode);
4442  var_result.Bind(to_elements);
4443  } else {
4444  // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4445  // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4446  // matter.
4447  ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4448  Node* to_elements = AllocateFixedArray(kind, capacity, parameter_mode,
4449  allocation_flags, source_map);
4450  var_result.Bind(to_elements);
4451  CopyFixedArrayElements(kind, source, kind, to_elements, first, count,
4452  capacity, SKIP_WRITE_BARRIER, parameter_mode);
4453  }
4454 
4455  Goto(&done);
4456  }
4457 
4458  BIND(&empty);
4459  {
4460  Comment("Copy empty array");
4461 
4462  var_result.Bind(EmptyFixedArrayConstant());
4463  Goto(&done);
4464  }
4465 
4466  BIND(&done);
4467  return UncheckedCast<FixedArray>(var_result.value());
4468 }
4469 
4470 void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
4471  Node* length,
4472  ParameterMode mode) {
4473  CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
4474  CSA_ASSERT(
4475  this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
4476  CSA_ASSERT(
4477  this,
4478  IntPtrOrSmiLessThanOrEqual(
4479  length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
4480  mode));
4481  StoreObjectFieldNoWriteBarrier(
4482  property_array, PropertyArray::kLengthAndHashOffset,
4483  ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
4484 }
4485 
4486 Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
4487  ParameterMode mode,
4488  AllocationFlags flags) {
4489  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
4490  CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
4491  IntPtrOrSmiConstant(0, mode), mode));
4492  TNode<IntPtrT> total_size =
4493  GetPropertyArrayAllocationSize(capacity_node, mode);
4494 
4495  TNode<Object> array = Allocate(total_size, flags);
4496  RootIndex map_index = RootIndex::kPropertyArrayMap;
4497  DCHECK(RootsTable::IsImmortalImmovable(map_index));
4498  StoreMapNoWriteBarrier(array, map_index);
4499  InitializePropertyArrayLength(array, capacity_node, mode);
4500  return array;
4501 }
4502 
4503 void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
4504  Node* from_node,
4505  Node* to_node,
4506  ParameterMode mode) {
4507  CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4508  CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4509  CSA_SLOW_ASSERT(this, IsPropertyArray(array));
4510  ElementsKind kind = PACKED_ELEMENTS;
4511  Node* value = UndefinedConstant();
4512  BuildFastFixedArrayForEach(array, kind, from_node, to_node,
4513  [this, value](Node* array, Node* offset) {
4514  StoreNoWriteBarrier(
4515  MachineRepresentation::kTagged, array,
4516  offset, value);
4517  },
4518  mode);
4519 }
4520 
4521 void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind, Node* array,
4522  Node* from_node, Node* to_node,
4523  RootIndex value_root_index,
4524  ParameterMode mode) {
4525  CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4526  CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4527  CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4528  DCHECK(value_root_index == RootIndex::kTheHoleValue ||
4529  value_root_index == RootIndex::kUndefinedValue);
4530 
4531  // Determine the value to initialize the {array} based
4532  // on the {value_root_index} and the elements {kind}.
4533  Node* value = LoadRoot(value_root_index);
4534  if (IsDoubleElementsKind(kind)) {
4535  value = LoadHeapNumberValue(value);
4536  }
4537 
4538  BuildFastFixedArrayForEach(
4539  array, kind, from_node, to_node,
4540  [this, value, kind](Node* array, Node* offset) {
4541  if (IsDoubleElementsKind(kind)) {
4542  StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4543  value);
4544  } else {
4545  StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4546  value);
4547  }
4548  },
4549  mode);
4550 }
4551 
4552 void CodeStubAssembler::StoreFixedDoubleArrayHole(
4553  TNode<FixedDoubleArray> array, Node* index, ParameterMode parameter_mode) {
4554  CSA_SLOW_ASSERT(this, MatchesParameterMode(index, parameter_mode));
4555  Node* offset =
4556  ElementOffsetFromIndex(index, PACKED_DOUBLE_ELEMENTS, parameter_mode,
4557  FixedArray::kHeaderSize - kHeapObjectTag);
4558  CSA_ASSERT(this, IsOffsetInBounds(
4559  offset, LoadAndUntagFixedArrayBaseLength(array),
4560  FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
4561  Node* double_hole =
4562  Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4563  : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4564  // TODO(danno): When we have a Float32/Float64 wrapper class that
4565  // preserves double bits during manipulation, remove this code/change
4566  // this to an indexed Float64 store.
4567  if (Is64()) {
4568  StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
4569  double_hole);
4570  } else {
4571  StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
4572  double_hole);
4573  StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
4574  IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
4575  double_hole);
4576  }
4577 }
4578 
4579 void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4580  TNode<IntPtrT> length) {
4581  CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4582 
4583  TNode<IntPtrT> byte_length = TimesPointerSize(length);
4584  CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4585 
4586  static const int32_t fa_base_data_offset =
4587  FixedArray::kHeaderSize - kHeapObjectTag;
4588  TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4589  IntPtrConstant(fa_base_data_offset));
4590 
4591  // Call out to memset to perform initialization.
4592  TNode<ExternalReference> memset =
4593  ExternalConstant(ExternalReference::libc_memset_function());
4594  STATIC_ASSERT(kSizetSize == kIntptrSize);
4595  CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4596  MachineType::IntPtr(), MachineType::UintPtr(), memset,
4597  backing_store, IntPtrConstant(0), byte_length);
4598 }
4599 
4600 void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4601  TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4602  CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4603 
4604  TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4605  CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4606 
4607  static const int32_t fa_base_data_offset =
4608  FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4609  TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4610  IntPtrConstant(fa_base_data_offset));
4611 
4612  // Call out to memset to perform initialization.
4613  TNode<ExternalReference> memset =
4614  ExternalConstant(ExternalReference::libc_memset_function());
4615  STATIC_ASSERT(kSizetSize == kIntptrSize);
4616  CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4617  MachineType::IntPtr(), MachineType::UintPtr(), memset,
4618  backing_store, IntPtrConstant(0), byte_length);
4619 }
4620 
4621 void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4622  TNode<Object> object, Label* interesting) {
4623  Label finished(this);
4624  TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4625  TNode<IntPtrT> object_page = PageFromAddress(object_word);
4626  TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4627  MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4628  Branch(
4629  WordEqual(WordAnd(page_flags,
4630  IntPtrConstant(
4631  MemoryChunk::kPointersFromHereAreInterestingMask)),
4632  IntPtrConstant(0)),
4633  &finished, interesting);
4634  BIND(&finished);
4635 }
4636 
4637 void CodeStubAssembler::MoveElements(ElementsKind kind,
4638  TNode<FixedArrayBase> elements,
4639  TNode<IntPtrT> dst_index,
4640  TNode<IntPtrT> src_index,
4641  TNode<IntPtrT> length) {
4642  Label finished(this);
4643  Label needs_barrier(this);
4644  const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4645 
4646  DCHECK(IsFastElementsKind(kind));
4647  CSA_ASSERT(this, IsFixedArrayWithKind(elements, kind));
4648  CSA_ASSERT(this,
4649  IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
4650  LoadAndUntagFixedArrayBaseLength(elements)));
4651  CSA_ASSERT(this,
4652  IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
4653  LoadAndUntagFixedArrayBaseLength(elements)));
4654 
4655  // The write barrier can be ignored if {dst_elements} is in new space, or if
4656  // the elements pointer is FixedDoubleArray.
4657  if (needs_barrier_check) {
4658  JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4659  }
4660 
4661  const TNode<IntPtrT> source_byte_length =
4662  IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4663  static const int32_t fa_base_data_offset =
4664  FixedArrayBase::kHeaderSize - kHeapObjectTag;
4665  TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4666  TNode<IntPtrT> target_data_ptr =
4667  IntPtrAdd(elements_intptr,
4668  ElementOffsetFromIndex(dst_index, kind, INTPTR_PARAMETERS,
4669  fa_base_data_offset));
4670  TNode<IntPtrT> source_data_ptr =
4671  IntPtrAdd(elements_intptr,
4672  ElementOffsetFromIndex(src_index, kind, INTPTR_PARAMETERS,
4673  fa_base_data_offset));
4674  TNode<ExternalReference> memmove =
4675  ExternalConstant(ExternalReference::libc_memmove_function());
4676  CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4677  MachineType::Pointer(), MachineType::UintPtr(), memmove,
4678  target_data_ptr, source_data_ptr, source_byte_length);
4679 
4680  if (needs_barrier_check) {
4681  Goto(&finished);
4682 
4683  BIND(&needs_barrier);
4684  {
4685  const TNode<IntPtrT> begin = src_index;
4686  const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4687 
4688  // If dst_index is less than src_index, then walk forward.
4689  const TNode<IntPtrT> delta =
4690  IntPtrMul(IntPtrSub(dst_index, begin),
4691  IntPtrConstant(ElementsKindToByteSize(kind)));
4692  auto loop_body = [&](Node* array, Node* offset) {
4693  Node* const element = Load(MachineType::AnyTagged(), array, offset);
4694  Node* const delta_offset = IntPtrAdd(offset, delta);
4695  Store(array, delta_offset, element);
4696  };
4697 
4698  Label iterate_forward(this);
4699  Label iterate_backward(this);
4700  Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4701  &iterate_backward);
4702  BIND(&iterate_forward);
4703  {
4704  // Make a loop for the stores.
4705  BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4706  INTPTR_PARAMETERS,
4707  ForEachDirection::kForward);
4708  Goto(&finished);
4709  }
4710 
4711  BIND(&iterate_backward);
4712  {
4713  BuildFastFixedArrayForEach(elements, kind, begin, end, loop_body,
4714  INTPTR_PARAMETERS,
4715  ForEachDirection::kReverse);
4716  Goto(&finished);
4717  }
4718  }
4719  BIND(&finished);
4720  }
4721 }
4722 
4723 void CodeStubAssembler::CopyElements(ElementsKind kind,
4724  TNode<FixedArrayBase> dst_elements,
4725  TNode<IntPtrT> dst_index,
4726  TNode<FixedArrayBase> src_elements,
4727  TNode<IntPtrT> src_index,
4728  TNode<IntPtrT> length,
4729  WriteBarrierMode write_barrier) {
4730  Label finished(this);
4731  Label needs_barrier(this);
4732  const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4733 
4734  DCHECK(IsFastElementsKind(kind));
4735  CSA_ASSERT(this, IsFixedArrayWithKind(dst_elements, kind));
4736  CSA_ASSERT(this, IsFixedArrayWithKind(src_elements, kind));
4737  CSA_ASSERT(this, IntPtrLessThanOrEqual(
4738  IntPtrAdd(dst_index, length),
4739  LoadAndUntagFixedArrayBaseLength(dst_elements)));
4740  CSA_ASSERT(this, IntPtrLessThanOrEqual(
4741  IntPtrAdd(src_index, length),
4742  LoadAndUntagFixedArrayBaseLength(src_elements)));
4743  CSA_ASSERT(this, Word32Or(WordNotEqual(dst_elements, src_elements),
4744  WordEqual(length, IntPtrConstant(0))));
4745 
4746  // The write barrier can be ignored if {dst_elements} is in new space, or if
4747  // the elements pointer is FixedDoubleArray.
4748  if (needs_barrier_check) {
4749  JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4750  }
4751 
4752  TNode<IntPtrT> source_byte_length =
4753  IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4754  static const int32_t fa_base_data_offset =
4755  FixedArrayBase::kHeaderSize - kHeapObjectTag;
4756  TNode<IntPtrT> src_offset_start = ElementOffsetFromIndex(
4757  src_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4758  TNode<IntPtrT> dst_offset_start = ElementOffsetFromIndex(
4759  dst_index, kind, INTPTR_PARAMETERS, fa_base_data_offset);
4760  TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4761  TNode<IntPtrT> source_data_ptr =
4762  IntPtrAdd(src_elements_intptr, src_offset_start);
4763  TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4764  TNode<IntPtrT> dst_data_ptr =
4765  IntPtrAdd(dst_elements_intptr, dst_offset_start);
4766  TNode<ExternalReference> memcpy =
4767  ExternalConstant(ExternalReference::libc_memcpy_function());
4768  CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4769  MachineType::Pointer(), MachineType::UintPtr(), memcpy,
4770  dst_data_ptr, source_data_ptr, source_byte_length);
4771 
4772  if (needs_barrier_check) {
4773  Goto(&finished);
4774 
4775  BIND(&needs_barrier);
4776  {
4777  const TNode<IntPtrT> begin = src_index;
4778  const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4779  const TNode<IntPtrT> delta =
4780  IntPtrMul(IntPtrSub(dst_index, src_index),
4781  IntPtrConstant(ElementsKindToByteSize(kind)));
4782  BuildFastFixedArrayForEach(
4783  src_elements, kind, begin, end,
4784  [&](Node* array, Node* offset) {
4785  Node* const element = Load(MachineType::AnyTagged(), array, offset);
4786  Node* const delta_offset = IntPtrAdd(offset, delta);
4787  if (write_barrier == SKIP_WRITE_BARRIER) {
4788  StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4789  delta_offset, element);
4790  } else {
4791  Store(dst_elements, delta_offset, element);
4792  }
4793  },
4794  INTPTR_PARAMETERS, ForEachDirection::kForward);
4795  Goto(&finished);
4796  }
4797  BIND(&finished);
4798  }
4799 }
4800 
4801 void CodeStubAssembler::CopyFixedArrayElements(
4802  ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
4803  Node* to_array, Node* first_element, Node* element_count, Node* capacity,
4804  WriteBarrierMode barrier_mode, ParameterMode mode,
4805  HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
4806  DCHECK_IMPLIES(var_holes_converted != nullptr,
4807  convert_holes == HoleConversionMode::kConvertToUndefined);
4808  CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
4809  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4810  CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4811  CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4812  STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4813  const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4814  Comment("[ CopyFixedArrayElements");
4815 
4816  // Typed array elements are not supported.
4817  DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
4818  DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
4819 
4820  Label done(this);
4821  bool from_double_elements = IsDoubleElementsKind(from_kind);
4822  bool to_double_elements = IsDoubleElementsKind(to_kind);
4823  bool doubles_to_objects_conversion =
4824  IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4825  bool needs_write_barrier =
4826  doubles_to_objects_conversion ||
4827  (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4828  bool element_offset_matches =
4829  !needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
4830  IsDoubleElementsKind(to_kind));
4831  Node* double_hole =
4832  Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4833  : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4834 
4835  // If copying might trigger a GC, we pre-initialize the FixedArray such that
4836  // it's always in a consistent state.
4837  if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4838  DCHECK(IsObjectElementsKind(to_kind));
4839  // Use undefined for the part that we copy and holes for the rest.
4840  // Later if we run into a hole in the source we can just skip the writing
4841  // to the target and are still guaranteed that we get an undefined.
4842  FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4843  element_count, RootIndex::kUndefinedValue, mode);
4844  FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4845  RootIndex::kTheHoleValue, mode);
4846  } else if (doubles_to_objects_conversion) {
4847  // Pre-initialized the target with holes so later if we run into a hole in
4848  // the source we can just skip the writing to the target.
4849  FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4850  capacity, RootIndex::kTheHoleValue, mode);
4851  } else if (element_count != capacity) {
4852  FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4853  RootIndex::kTheHoleValue, mode);
4854  }
4855 
4856  Node* first_from_element_offset =
4857  ElementOffsetFromIndex(first_element, from_kind, mode, 0);
4858  Node* limit_offset = IntPtrAdd(first_from_element_offset,
4859  IntPtrConstant(first_element_offset));
4860  VARIABLE(
4861  var_from_offset, MachineType::PointerRepresentation(),
4862  ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
4863  from_kind, mode, first_element_offset));
4864  // This second variable is used only when the element sizes of source and
4865  // destination arrays do not match.
4866  VARIABLE(var_to_offset, MachineType::PointerRepresentation());
4867  if (element_offset_matches) {
4868  var_to_offset.Bind(var_from_offset.value());
4869  } else {
4870  var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
4871  first_element_offset));
4872  }
4873 
4874  Variable* vars[] = {&var_from_offset, &var_to_offset, var_holes_converted};
4875  int num_vars =
4876  var_holes_converted != nullptr ? arraysize(vars) : arraysize(vars) - 1;
4877  Label decrement(this, num_vars, vars);
4878 
4879  Node* to_array_adjusted =
4880  element_offset_matches
4881  ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4882  : to_array;
4883 
4884  Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4885 
4886  BIND(&decrement);
4887  {
4888  Node* from_offset = IntPtrSub(
4889  var_from_offset.value(),
4890  IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
4891  var_from_offset.Bind(from_offset);
4892 
4893  Node* to_offset;
4894  if (element_offset_matches) {
4895  to_offset = from_offset;
4896  } else {
4897  to_offset = IntPtrSub(
4898  var_to_offset.value(),
4899  IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
4900  var_to_offset.Bind(to_offset);
4901  }
4902 
4903  Label next_iter(this), store_double_hole(this), signal_hole(this);
4904  Label* if_hole;
4905  if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4906  // The target elements array is already preinitialized with undefined
4907  // so we only need to signal that a hole was found and continue the loop.
4908  if_hole = &signal_hole;
4909  } else if (doubles_to_objects_conversion) {
4910  // The target elements array is already preinitialized with holes, so we
4911  // can just proceed with the next iteration.
4912  if_hole = &next_iter;
4913  } else if (IsDoubleElementsKind(to_kind)) {
4914  if_hole = &store_double_hole;
4915  } else {
4916  // In all the other cases don't check for holes and copy the data as is.
4917  if_hole = nullptr;
4918  }
4919 
4920  Node* value = LoadElementAndPrepareForStore(
4921  from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4922 
4923  if (needs_write_barrier) {
4924  CHECK_EQ(to_array, to_array_adjusted);
4925  Store(to_array_adjusted, to_offset, value);
4926  } else if (to_double_elements) {
4927  StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4928  to_offset, value);
4929  } else {
4930  StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
4931  to_offset, value);
4932  }
4933  Goto(&next_iter);
4934 
4935  if (if_hole == &store_double_hole) {
4936  BIND(&store_double_hole);
4937  // Don't use doubles to store the hole double, since manipulating the
4938  // signaling NaN used for the hole in C++, e.g. with bit_cast, will
4939  // change its value on ia32 (the x87 stack is used to return values
4940  // and stores to the stack silently clear the signalling bit).
4941  //
4942  // TODO(danno): When we have a Float32/Float64 wrapper class that
4943  // preserves double bits during manipulation, remove this code/change
4944  // this to an indexed Float64 store.
4945  if (Is64()) {
4946  StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
4947  to_offset, double_hole);
4948  } else {
4949  StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4950  to_offset, double_hole);
4951  StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4952  IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
4953  double_hole);
4954  }
4955  Goto(&next_iter);
4956  } else if (if_hole == &signal_hole) {
4957  // This case happens only when IsObjectElementsKind(to_kind).
4958  BIND(&signal_hole);
4959  if (var_holes_converted != nullptr) {
4960  *var_holes_converted = Int32TrueConstant();
4961  }
4962  Goto(&next_iter);
4963  }
4964 
4965  BIND(&next_iter);
4966  Node* compare = WordNotEqual(from_offset, limit_offset);
4967  Branch(compare, &decrement, &done);
4968  }
4969 
4970  BIND(&done);
4971  Comment("] CopyFixedArrayElements");
4972 }
4973 
4974 TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
4975  TNode<HeapObject> base, Label* cast_fail) {
4976  Label fixed_array(this);
4977  TNode<Map> map = LoadMap(base);
4978  GotoIf(WordEqual(map, LoadRoot(RootIndex::kFixedArrayMap)), &fixed_array);
4979  GotoIf(WordNotEqual(map, LoadRoot(RootIndex::kFixedCOWArrayMap)), cast_fail);
4980  Goto(&fixed_array);
4981  BIND(&fixed_array);
4982  return UncheckedCast<FixedArray>(base);
4983 }
4984 
4985 void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
4986  Node* to_array,
4987  Node* property_count,
4988  WriteBarrierMode barrier_mode,
4989  ParameterMode mode,
4990  DestroySource destroy_source) {
4991  CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
4992  CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
4993  IsEmptyFixedArray(from_array)));
4994  CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
4995  Comment("[ CopyPropertyArrayValues");
4996 
4997  bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
4998 
4999  if (destroy_source == DestroySource::kNo) {
5000  // PropertyArray may contain MutableHeapNumbers, which will be cloned on the
5001  // heap, requiring a write barrier.
5002  needs_write_barrier = true;
5003  }
5004 
5005  Node* start = IntPtrOrSmiConstant(0, mode);
5006  ElementsKind kind = PACKED_ELEMENTS;
5007  BuildFastFixedArrayForEach(
5008  from_array, kind, start, property_count,
5009  [this, to_array, needs_write_barrier, destroy_source](Node* array,
5010  Node* offset) {
5011  Node* value = Load(MachineType::AnyTagged(), array, offset);
5012 
5013  if (destroy_source == DestroySource::kNo) {
5014  value = CloneIfMutablePrimitive(CAST(value));
5015  }
5016 
5017  if (needs_write_barrier) {
5018  Store(to_array, offset, value);
5019  } else {
5020  StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
5021  value);
5022  }
5023  },
5024  mode);
5025 
5026 #ifdef DEBUG
5027  // Zap {from_array} if the copying above has made it invalid.
5028  if (destroy_source == DestroySource::kYes) {
5029  Label did_zap(this);
5030  GotoIf(IsEmptyFixedArray(from_array), &did_zap);
5031  FillPropertyArrayWithUndefined(from_array, start, property_count, mode);
5032 
5033  Goto(&did_zap);
5034  BIND(&did_zap);
5035  }
5036 #endif
5037  Comment("] CopyPropertyArrayValues");
5038 }
5039 
5040 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
5041  TNode<IntPtrT> from_index,
5042  TNode<IntPtrT> to_index,
5043  TNode<IntPtrT> character_count,
5044  String::Encoding from_encoding,
5045  String::Encoding to_encoding) {
5046  // Cannot assert IsString(from_string) and IsString(to_string) here because
5047  // CSA::SubString can pass in faked sequential strings when handling external
5048  // subject strings.
5049  bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
5050  bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
5051  DCHECK_IMPLIES(to_one_byte, from_one_byte);
5052  Comment("CopyStringCharacters %s -> %s",
5053  from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
5054  to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
5055 
5056  ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5057  ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
5058  STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
5059  int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
5060  Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
5061  INTPTR_PARAMETERS, header_size);
5062  Node* to_offset =
5063  ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
5064  Node* byte_count =
5065  ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
5066  Node* limit_offset = IntPtrAdd(from_offset, byte_count);
5067 
5068  // Prepare the fast loop
5069  MachineType type =
5070  from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
5071  MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
5072  : MachineRepresentation::kWord16;
5073  int from_increment = 1 << ElementsKindToShiftSize(from_kind);
5074  int to_increment = 1 << ElementsKindToShiftSize(to_kind);
5075 
5076  VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
5077  VariableList vars({&current_to_offset}, zone());
5078  int to_index_constant = 0, from_index_constant = 0;
5079  bool index_same = (from_encoding == to_encoding) &&
5080  (from_index == to_index ||
5081  (ToInt32Constant(from_index, from_index_constant) &&
5082  ToInt32Constant(to_index, to_index_constant) &&
5083  from_index_constant == to_index_constant));
5084  BuildFastLoop(vars, from_offset, limit_offset,
5085  [this, from_string, to_string, &current_to_offset, to_increment,
5086  type, rep, index_same](Node* offset) {
5087  Node* value = Load(type, from_string, offset);
5088  StoreNoWriteBarrier(
5089  rep, to_string,
5090  index_same ? offset : current_to_offset.value(), value);
5091  if (!index_same) {
5092  Increment(&current_to_offset, to_increment);
5093  }
5094  },
5095  from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
5096 }
5097 
5098 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
5099  Node* offset,
5100  ElementsKind from_kind,
5101  ElementsKind to_kind,
5102  Label* if_hole) {
5103  CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
5104  if (IsDoubleElementsKind(from_kind)) {
5105  Node* value =
5106  LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
5107  if (!IsDoubleElementsKind(to_kind)) {
5108  value = AllocateHeapNumberWithValue(value);
5109  }
5110  return value;
5111 
5112  } else {
5113  Node* value = Load(MachineType::AnyTagged(), array, offset);
5114  if (if_hole) {
5115  GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
5116  }
5117  if (IsDoubleElementsKind(to_kind)) {
5118  if (IsSmiElementsKind(from_kind)) {
5119  value = SmiToFloat64(value);
5120  } else {
5121  value = LoadHeapNumberValue(value);
5122  }
5123  }
5124  return value;
5125  }
5126 }
5127 
5128 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
5129  ParameterMode mode) {
5130  CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
5131  Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
5132  Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
5133  Node* padding =
5134  IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
5135  return IntPtrOrSmiAdd(new_capacity, padding, mode);
5136 }
5137 
5138 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5139  ElementsKind kind, Node* key,
5140  Label* bailout) {
5141  CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5142  CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5143  CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
5144  Node* capacity = LoadFixedArrayBaseLength(elements);
5145 
5146  ParameterMode mode = OptimalParameterMode();
5147  capacity = TaggedToParameter(capacity, mode);
5148  key = TaggedToParameter(key, mode);
5149 
5150  return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
5151  bailout);
5152 }
5153 
5154 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
5155  ElementsKind kind, Node* key,
5156  Node* capacity,
5157  ParameterMode mode,
5158  Label* bailout) {
5159  Comment("TryGrowElementsCapacity");
5160  CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5161  CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
5162  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5163  CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
5164 
5165  // If the gap growth is too big, fall back to the runtime.
5166  Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
5167  Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
5168  GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
5169 
5170  // Calculate the capacity of the new backing store.
5171  Node* new_capacity = CalculateNewElementsCapacity(
5172  IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
5173  return GrowElementsCapacity(object, elements, kind, kind, capacity,
5174  new_capacity, mode, bailout);
5175 }
5176 
5177 Node* CodeStubAssembler::GrowElementsCapacity(
5178  Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
5179  Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
5180  Comment("[ GrowElementsCapacity");
5181  CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
5182  CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
5183  CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
5184  CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
5185 
5186  // If size of the allocation for the new capacity doesn't fit in a page
5187  // that we can bump-pointer allocate from, fall back to the runtime.
5188  int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
5189  GotoIf(UintPtrOrSmiGreaterThanOrEqual(
5190  new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
5191  bailout);
5192 
5193  // Allocate the new backing store.
5194  Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
5195 
5196  // Copy the elements from the old elements store to the new.
5197  // The size-check above guarantees that the |new_elements| is allocated
5198  // in new space so we can skip the write barrier.
5199  CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
5200  new_capacity, SKIP_WRITE_BARRIER, mode);
5201 
5202  StoreObjectField(object, JSObject::kElementsOffset, new_elements);
5203  Comment("] GrowElementsCapacity");
5204  return new_elements;
5205 }
5206 
5207 void CodeStubAssembler::InitializeAllocationMemento(Node* base,
5208  Node* base_allocation_size,
5209  Node* allocation_site) {
5210  Comment("[Initialize AllocationMemento");
5211  TNode<Object> memento =
5212  InnerAllocate(CAST(base), UncheckedCast<IntPtrT>(base_allocation_size));
5213  StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
5214  StoreObjectFieldNoWriteBarrier(
5215  memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
5216  if (FLAG_allocation_site_pretenuring) {
5217  TNode<Int32T> count = UncheckedCast<Int32T>(LoadObjectField(
5218  allocation_site, AllocationSite::kPretenureCreateCountOffset,
5219  MachineType::Int32()));
5220 
5221  TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
5222  StoreObjectFieldNoWriteBarrier(
5223  allocation_site, AllocationSite::kPretenureCreateCountOffset,
5224  incremented_count, MachineRepresentation::kWord32);
5225  }
5226  Comment("]");
5227 }
5228 
5229 Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
5230  Label* if_valueisnotnumber) {
5231  Label out(this);
5232  VARIABLE(var_result, MachineRepresentation::kFloat64);
5233 
5234  // Check if the {value} is a Smi or a HeapObject.
5235  Label if_valueissmi(this), if_valueisnotsmi(this);
5236  Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5237 
5238  BIND(&if_valueissmi);
5239  {
5240  // Convert the Smi {value}.
5241  var_result.Bind(SmiToFloat64(value));
5242  Goto(&out);
5243  }
5244 
5245  BIND(&if_valueisnotsmi);
5246  {
5247  // Check if {value} is a HeapNumber.
5248  Label if_valueisheapnumber(this);
5249  Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
5250 
5251  BIND(&if_valueisheapnumber);
5252  {
5253  // Load the floating point value.
5254  var_result.Bind(LoadHeapNumberValue(value));
5255  Goto(&out);
5256  }
5257  }
5258  BIND(&out);
5259  return var_result.value();
5260 }
5261 
5262 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
5263  // We might need to loop once due to ToNumber conversion.
5264  VARIABLE(var_value, MachineRepresentation::kTagged);
5265  VARIABLE(var_result, MachineRepresentation::kFloat64);
5266  Label loop(this, &var_value), done_loop(this, &var_result);
5267  var_value.Bind(value);
5268  Goto(&loop);
5269  BIND(&loop);
5270  {
5271  Label if_valueisnotnumber(this, Label::kDeferred);
5272 
5273  // Load the current {value}.
5274  value = var_value.value();
5275 
5276  // Convert {value} to Float64 if it is a number and convert it to a number
5277  // otherwise.
5278  Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5279  var_result.Bind(result);
5280  Goto(&done_loop);
5281 
5282  BIND(&if_valueisnotnumber);
5283  {
5284  // Convert the {value} to a Number first.
5285  var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
5286  Goto(&loop);
5287  }
5288  }
5289  BIND(&done_loop);
5290  return var_result.value();
5291 }
5292 
5293 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
5294  VARIABLE(var_result, MachineRepresentation::kWord32);
5295  Label done(this);
5296  TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
5297  &done, &var_result);
5298  BIND(&done);
5299  return var_result.value();
5300 }
5301 
5302 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5303 // or find that it is a BigInt and jump to {if_bigint}.
5304 void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
5305  Label* if_number,
5306  Variable* var_word32,
5307  Label* if_bigint,
5308  Variable* var_bigint) {
5309  TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5310  context, value, if_number, var_word32, if_bigint, var_bigint);
5311 }
5312 
5313 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5314 // or find that it is a BigInt and jump to {if_bigint}. In either case,
5315 // store the type feedback in {var_feedback}.
5316 void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5317  Node* context, Node* value, Label* if_number, Variable* var_word32,
5318  Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5319  TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5320  context, value, if_number, var_word32, if_bigint, var_bigint,
5321  var_feedback);
5322 }
5323 
5324 template <Object::Conversion conversion>
5325 void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5326  Node* context, Node* value, Label* if_number, Variable* var_word32,
5327  Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
5328  DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
5329  DCHECK(var_bigint == nullptr ||
5330  var_bigint->rep() == MachineRepresentation::kTagged);
5331  DCHECK(var_feedback == nullptr ||
5332  var_feedback->rep() == MachineRepresentation::kTaggedSigned);
5333 
5334  // We might need to loop after conversion.
5335  VARIABLE(var_value, MachineRepresentation::kTagged, value);
5336  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5337  Variable* loop_vars[] = {&var_value, var_feedback};
5338  int num_vars =
5339  var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
5340  Label loop(this, num_vars, loop_vars);
5341  Goto(&loop);
5342  BIND(&loop);
5343  {
5344  value = var_value.value();
5345  Label not_smi(this), is_heap_number(this), is_oddball(this),
5346  is_bigint(this);
5347  GotoIf(TaggedIsNotSmi(value), &not_smi);
5348 
5349  // {value} is a Smi.
5350  var_word32->Bind(SmiToInt32(value));
5351  CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5352  Goto(if_number);
5353 
5354  BIND(&not_smi);
5355  Node* map = LoadMap(value);
5356  GotoIf(IsHeapNumberMap(map), &is_heap_number);
5357  Node* instance_type = LoadMapInstanceType(map);
5358  if (conversion == Object::Conversion::kToNumeric) {
5359  GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5360  }
5361 
5362  // Not HeapNumber (or BigInt if conversion == kToNumeric).
5363  {
5364  if (var_feedback != nullptr) {
5365  // We do not require an Or with earlier feedback here because once we
5366  // convert the value to a Numeric, we cannot reach this path. We can
5367  // only reach this path on the first pass when the feedback is kNone.
5368  CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
5369  SmiConstant(BinaryOperationFeedback::kNone)));
5370  }
5371  GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5372  // Not an oddball either -> convert.
5373  auto builtin = conversion == Object::Conversion::kToNumeric
5374  ? Builtins::kNonNumberToNumeric
5375  : Builtins::kNonNumberToNumber;
5376  var_value.Bind(CallBuiltin(builtin, context, value));
5377  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5378  Goto(&loop);
5379 
5380  BIND(&is_oddball);
5381  var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
5382  OverwriteFeedback(var_feedback,
5383  BinaryOperationFeedback::kNumberOrOddball);
5384  Goto(&loop);
5385  }
5386 
5387  BIND(&is_heap_number);
5388  var_word32->Bind(TruncateHeapNumberValueToWord32(value));
5389  CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5390  Goto(if_number);
5391 
5392  if (conversion == Object::Conversion::kToNumeric) {
5393  BIND(&is_bigint);
5394  var_bigint->Bind(value);
5395  CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5396  Goto(if_bigint);
5397  }
5398  }
5399 }
5400 
5401 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
5402  Node* value = LoadHeapNumberValue(object);
5403  return TruncateFloat64ToWord32(value);
5404 }
5405 
5406 void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5407  TVariable<Smi>& var_result_smi,
5408  Label* if_smi) {
5409  TNode<Float64T> value = LoadHeapNumberValue(number);
5410  TryFloat64ToSmi(value, var_result_smi, if_smi);
5411 }
5412 
5413 void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5414  TVariable<Smi>& var_result_smi,
5415  Label* if_smi) {
5416  TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5417  TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5418 
5419  Label if_int32(this), if_heap_number(this, Label::kDeferred);
5420 
5421  GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5422  GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5423  Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5424  Int32Constant(0)),
5425  &if_heap_number, &if_int32);
5426 
5427  TVARIABLE(Number, var_result);
5428  BIND(&if_int32);
5429  {
5430  if (SmiValuesAre32Bits()) {
5431  var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5432  } else {
5433  DCHECK(SmiValuesAre31Bits());
5434  TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5435  TNode<BoolT> overflow = Projection<1>(pair);
5436  GotoIf(overflow, &if_heap_number);
5437  var_result_smi =
5438  BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5439  }
5440  Goto(if_smi);
5441  }
5442  BIND(&if_heap_number);
5443 }
5444 
5445 TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
5446  SloppyTNode<Float64T> value) {
5447  Label if_smi(this), done(this);
5448  TVARIABLE(Smi, var_smi_result);
5449  TVARIABLE(Number, var_result);
5450  TryFloat64ToSmi(value, var_smi_result, &if_smi);
5451 
5452  var_result = AllocateHeapNumberWithValue(value);
5453  Goto(&done);
5454 
5455  BIND(&if_smi);
5456  {
5457  var_result = var_smi_result.value();
5458  Goto(&done);
5459  }
5460  BIND(&done);
5461  return var_result.value();
5462 }
5463 
5464 TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
5465  SloppyTNode<Int32T> value) {
5466  if (SmiValuesAre32Bits()) {
5467  return SmiTag(ChangeInt32ToIntPtr(value));
5468  }
5469  DCHECK(SmiValuesAre31Bits());
5470  TVARIABLE(Number, var_result);
5471  TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5472  TNode<BoolT> overflow = Projection<1>(pair);
5473  Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5474  if_join(this);
5475  Branch(overflow, &if_overflow, &if_notoverflow);
5476  BIND(&if_overflow);
5477  {
5478  TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5479  TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5480  var_result = result;
5481  Goto(&if_join);
5482  }
5483  BIND(&if_notoverflow);
5484  {
5485  TNode<IntPtrT> almost_tagged_value =
5486  ChangeInt32ToIntPtr(Projection<0>(pair));
5487  TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5488  var_result = result;
5489  Goto(&if_join);
5490  }
5491  BIND(&if_join);
5492  return var_result.value();
5493 }
5494 
5495 TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
5496  SloppyTNode<Uint32T> value) {
5497  Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5498  if_join(this);
5499  TVARIABLE(Number, var_result);
5500  // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5501  Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5502  &if_not_overflow);
5503 
5504  BIND(&if_not_overflow);
5505  {
5506  // The {value} is definitely in valid Smi range.
5507  var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5508  }
5509  Goto(&if_join);
5510 
5511  BIND(&if_overflow);
5512  {
5513  TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5514  var_result = AllocateHeapNumberWithValue(float64_value);
5515  }
5516  Goto(&if_join);
5517 
5518  BIND(&if_join);
5519  return var_result.value();
5520 }
5521 
5522 TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5523  Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5524  if_join(this);
5525  TVARIABLE(Number, var_result);
5526  // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5527  Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5528  &if_not_overflow);
5529 
5530  BIND(&if_not_overflow);
5531  {
5532  // The {value} is definitely in valid Smi range.
5533  var_result = SmiTag(Signed(value));
5534  }
5535  Goto(&if_join);
5536 
5537  BIND(&if_overflow);
5538  {
5539  TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5540  var_result = AllocateHeapNumberWithValue(float64_value);
5541  }
5542  Goto(&if_join);
5543 
5544  BIND(&if_join);
5545  return var_result.value();
5546 }
5547 
5548 TNode<String> CodeStubAssembler::ToThisString(Node* context, Node* value,
5549  char const* method_name) {
5550  VARIABLE(var_value, MachineRepresentation::kTagged, value);
5551 
5552  // Check if the {value} is a Smi or a HeapObject.
5553  Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5554  if_valueisstring(this);
5555  Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5556  BIND(&if_valueisnotsmi);
5557  {
5558  // Load the instance type of the {value}.
5559  Node* value_instance_type = LoadInstanceType(value);
5560 
5561  // Check if the {value} is already String.
5562  Label if_valueisnotstring(this, Label::kDeferred);
5563  Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5564  &if_valueisnotstring);
5565  BIND(&if_valueisnotstring);
5566  {
5567  // Check if the {value} is null.
5568  Label if_valueisnullorundefined(this, Label::kDeferred);
5569  GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5570  // Convert the {value} to a String.
5571  var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
5572  Goto(&if_valueisstring);
5573 
5574  BIND(&if_valueisnullorundefined);
5575  {
5576  // The {value} is either null or undefined.
5577  ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5578  method_name);
5579  }
5580  }
5581  }
5582  BIND(&if_valueissmi);
5583  {
5584  // The {value} is a Smi, convert it to a String.
5585  var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
5586  Goto(&if_valueisstring);
5587  }
5588  BIND(&if_valueisstring);
5589  return CAST(var_value.value());
5590 }
5591 
5592 TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5593  TVARIABLE(Uint32T, var_result);
5594  Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5595  Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5596  BIND(&if_smi);
5597  {
5598  var_result = Unsigned(SmiToInt32(CAST(value)));
5599  Goto(&done);
5600  }
5601  BIND(&if_heapnumber);
5602  {
5603  var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5604  Goto(&done);
5605  }
5606  BIND(&done);
5607  return var_result.value();
5608 }
5609 
5610 TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
5611  SloppyTNode<Number> value) {
5612  // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
5613  CSA_SLOW_ASSERT(this, IsNumber(value));
5614  TVARIABLE(Float64T, result);
5615  Label smi(this);
5616  Label done(this, &result);
5617  GotoIf(TaggedIsSmi(value), &smi);
5618  result = LoadHeapNumberValue(CAST(value));
5619  Goto(&done);
5620 
5621  BIND(&smi);
5622  {
5623  result = SmiToFloat64(CAST(value));
5624  Goto(&done);
5625  }
5626 
5627  BIND(&done);
5628  return result.value();
5629 }
5630 
5631 TNode<UintPtrT> CodeStubAssembler::ChangeNonnegativeNumberToUintPtr(
5632  TNode<Number> value) {
5633  TVARIABLE(UintPtrT, result);
5634  Label done(this, &result);
5635  Branch(TaggedIsSmi(value),
5636  [&] {
5637  TNode<Smi> value_smi = CAST(value);
5638  CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
5639  result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
5640  Goto(&done);
5641  },
5642  [&] {
5643  TNode<HeapNumber> value_hn = CAST(value);
5644  result = ChangeFloat64ToUintPtr(LoadHeapNumberValue(value_hn));
5645  Goto(&done);
5646  });
5647 
5648  BIND(&done);
5649  return result.value();
5650 }
5651 
5652 TNode<WordT> CodeStubAssembler::TimesPointerSize(SloppyTNode<WordT> value) {
5653  return WordShl(value, kPointerSizeLog2);
5654 }
5655 
5656 TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5657  return WordShl(value, kDoubleSizeLog2);
5658 }
5659 
5660 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
5661  PrimitiveType primitive_type,
5662  char const* method_name) {
5663  // We might need to loop once due to JSValue unboxing.
5664  VARIABLE(var_value, MachineRepresentation::kTagged, value);
5665  Label loop(this, &var_value), done_loop(this),
5666  done_throw(this, Label::kDeferred);
5667  Goto(&loop);
5668  BIND(&loop);
5669  {
5670  // Load the current {value}.
5671  value = var_value.value();
5672 
5673  // Check if the {value} is a Smi or a HeapObject.
5674  GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
5675  ? &done_loop
5676  : &done_throw);
5677 
5678  // Load the map of the {value}.
5679  Node* value_map = LoadMap(value);
5680 
5681  // Load the instance type of the {value}.
5682  Node* value_instance_type = LoadMapInstanceType(value_map);
5683 
5684  // Check if {value} is a JSValue.
5685  Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
5686  Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
5687  &if_valueisvalue, &if_valueisnotvalue);
5688 
5689  BIND(&if_valueisvalue);
5690  {
5691  // Load the actual value from the {value}.
5692  var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
5693  Goto(&loop);
5694  }
5695 
5696  BIND(&if_valueisnotvalue);
5697  {
5698  switch (primitive_type) {
5699  case PrimitiveType::kBoolean:
5700  GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
5701  break;
5702  case PrimitiveType::kNumber:
5703  GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
5704  break;
5705  case PrimitiveType::kString:
5706  GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5707  break;
5708  case PrimitiveType::kSymbol:
5709  GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
5710  break;
5711  }
5712  Goto(&done_throw);
5713  }
5714  }
5715 
5716  BIND(&done_throw);
5717  {
5718  const char* primitive_name = nullptr;
5719  switch (primitive_type) {
5720  case PrimitiveType::kBoolean:
5721  primitive_name = "Boolean";
5722  break;
5723  case PrimitiveType::kNumber:
5724  primitive_name = "Number";
5725  break;
5726  case PrimitiveType::kString:
5727  primitive_name = "String";
5728  break;
5729  case PrimitiveType::kSymbol:
5730  primitive_name = "Symbol";
5731  break;
5732  }
5733  CHECK_NOT_NULL(primitive_name);
5734 
5735  // The {value} is not a compatible receiver for this method.
5736  ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5737  primitive_name);
5738  }
5739 
5740  BIND(&done_loop);
5741  return var_value.value();
5742 }
5743 
5744 Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
5745  InstanceType instance_type,
5746  char const* method_name) {
5747  Label out(this), throw_exception(this, Label::kDeferred);
5748  VARIABLE(var_value_map, MachineRepresentation::kTagged);
5749 
5750  GotoIf(TaggedIsSmi(value), &throw_exception);
5751 
5752  // Load the instance type of the {value}.
5753  var_value_map.Bind(LoadMap(value));
5754  Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5755 
5756  Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5757  &throw_exception);
5758 
5759  // The {value} is not a compatible receiver for this method.
5760  BIND(&throw_exception);
5761  ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5762  StringConstant(method_name), value);
5763 
5764  BIND(&out);
5765  return var_value_map.value();
5766 }
5767 
5768 Node* CodeStubAssembler::ThrowIfNotJSReceiver(Node* context, Node* value,
5769  MessageTemplate msg_template,
5770  const char* method_name) {
5771  Label out(this), throw_exception(this, Label::kDeferred);
5772  VARIABLE(var_value_map, MachineRepresentation::kTagged);
5773 
5774  GotoIf(TaggedIsSmi(value), &throw_exception);
5775 
5776  // Load the instance type of the {value}.
5777  var_value_map.Bind(LoadMap(value));
5778  Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5779 
5780  Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
5781 
5782  // The {value} is not a compatible receiver for this method.
5783  BIND(&throw_exception);
5784  ThrowTypeError(context, msg_template, method_name);
5785 
5786  BIND(&out);
5787  return var_value_map.value();
5788 }
5789 
5790 void CodeStubAssembler::ThrowRangeError(Node* context, MessageTemplate message,
5791  Node* arg0, Node* arg1, Node* arg2) {
5792  Node* template_index = SmiConstant(static_cast<int>(message));
5793  if (arg0 == nullptr) {
5794  CallRuntime(Runtime::kThrowRangeError, context, template_index);
5795  } else if (arg1 == nullptr) {
5796  CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
5797  } else if (arg2 == nullptr) {
5798  CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
5799  } else {
5800  CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
5801  arg2);
5802  }
5803  Unreachable();
5804 }
5805 
5806 void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5807  char const* arg0, char const* arg1) {
5808  Node* arg0_node = nullptr;
5809  if (arg0) arg0_node = StringConstant(arg0);
5810  Node* arg1_node = nullptr;
5811  if (arg1) arg1_node = StringConstant(arg1);
5812  ThrowTypeError(context, message, arg0_node, arg1_node);
5813 }
5814 
5815 void CodeStubAssembler::ThrowTypeError(Node* context, MessageTemplate message,
5816  Node* arg0, Node* arg1, Node* arg2) {
5817  Node* template_index = SmiConstant(static_cast<int>(message));
5818  if (arg0 == nullptr) {
5819  CallRuntime(Runtime::kThrowTypeError, context, template_index);
5820  } else if (arg1 == nullptr) {
5821  CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
5822  } else if (arg2 == nullptr) {
5823  CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
5824  } else {
5825  CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
5826  arg2);
5827  }
5828  Unreachable();
5829 }
5830 
5831 TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5832  SloppyTNode<Int32T> instance_type, int type) {
5833  return Word32Equal(instance_type, Int32Constant(type));
5834 }
5835 
5836 TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
5837  CSA_SLOW_ASSERT(this, IsMap(map));
5838  Node* bit_field3 = LoadMapBitField3(map);
5839  return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
5840 }
5841 
5842 TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
5843  CSA_ASSERT(this, IsMap(map));
5844  return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
5845 }
5846 
5847 TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
5848  int kMask = Map::IsExtensibleBit::kMask | Map::IsPrototypeMapBit::kMask;
5849  int kExpected = Map::IsExtensibleBit::kMask;
5850  return Word32Equal(Word32And(LoadMapBitField2(map), Int32Constant(kMask)),
5851  Int32Constant(kExpected));
5852 }
5853 
5854 TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
5855  CSA_ASSERT(this, IsMap(map));
5856  return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
5857 }
5858 
5859 TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
5860  CSA_ASSERT(this, IsMap(map));
5861  return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
5862 }
5863 
5864 TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
5865  CSA_ASSERT(this, IsMap(map));
5866  return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
5867 }
5868 
5869 TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5870  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5871  Node* cell = LoadRoot(RootIndex::kNoElementsProtector);
5872  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5873  return WordEqual(cell_value, invalid);
5874 }
5875 
5876 TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
5877  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5878  Node* cell = LoadRoot(RootIndex::kArrayIteratorProtector);
5879  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5880  return WordEqual(cell_value, invalid);
5881 }
5882 
5883 TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5884  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5885  Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
5886  Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
5887  return WordEqual(cell_value, invalid);
5888 }
5889 
5890 TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5891  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5892  Node* cell = LoadRoot(RootIndex::kPromiseThenProtector);
5893  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5894  return WordEqual(cell_value, invalid);
5895 }
5896 
5897 TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5898  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5899  Node* cell = LoadRoot(RootIndex::kArraySpeciesProtector);
5900  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5901  return WordEqual(cell_value, invalid);
5902 }
5903 
5904 TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5905  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5906  Node* cell = LoadRoot(RootIndex::kTypedArraySpeciesProtector);
5907  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5908  return WordEqual(cell_value, invalid);
5909 }
5910 
5911 TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
5912  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5913  Node* cell = LoadRoot(RootIndex::kRegExpSpeciesProtector);
5914  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5915  return WordEqual(cell_value, invalid);
5916 }
5917 
5918 TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5919  Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5920  Node* cell = LoadRoot(RootIndex::kPromiseSpeciesProtector);
5921  Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5922  return WordEqual(cell_value, invalid);
5923 }
5924 
5925 TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5926  SloppyTNode<Context> context, SloppyTNode<Map> map) {
5927  Node* const native_context = LoadNativeContext(context);
5928  Node* const initial_array_prototype = LoadContextElement(
5929  native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5930  Node* proto = LoadMapPrototype(map);
5931  return WordEqual(proto, initial_array_prototype);
5932 }
5933 
5934 TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
5935  SloppyTNode<Context> context, SloppyTNode<Map> map) {
5936  TNode<Context> const native_context = LoadNativeContext(context);
5937  TNode<Object> const typed_array_prototype =
5938  LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
5939  TNode<HeapObject> proto = LoadMapPrototype(map);
5940  TNode<HeapObject> proto_of_proto = Select<HeapObject>(
5941  IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
5942  [=] { return NullConstant(); });
5943  return WordEqual(proto_of_proto, typed_array_prototype);
5944 }
5945 
5946 TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
5947  TNode<Context> context, TNode<Map> map) {
5948  TNode<Context> const native_context = LoadNativeContext(context);
5949  TNode<Object> const arguments_map = LoadContextElement(
5950  native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
5951  return WordEqual(arguments_map, map);
5952 }
5953 
5954 TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
5955  TNode<Context> context, TNode<Map> map) {
5956  TNode<Context> const native_context = LoadNativeContext(context);
5957  TNode<Object> const arguments_map = LoadContextElement(
5958  native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
5959  return WordEqual(arguments_map, map);
5960 }
5961 
5962 TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
5963  TNode<Map> map) {
5964  TNode<Context> const native_context = LoadNativeContext(context);
5965  TNode<Object> const arguments_map =
5966  LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
5967  return WordEqual(arguments_map, map);
5968 }
5969 
5970 TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
5971  TNode<Map> map) {
5972  TNode<Context> const native_context = LoadNativeContext(context);
5973  TNode<Object> const arguments_map =
5974  LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
5975  return WordEqual(arguments_map, map);
5976 }
5977 
5978 TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
5979  return Select<BoolT>(
5980  TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
5981  [=] {
5982  return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
5983  });
5984 }
5985 
5986 TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
5987  return IsCallableMap(LoadMap(object));
5988 }
5989 
5990 TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
5991  return WordEqual(LoadMap(object), LoadRoot(RootIndex::kCellMap));
5992 }
5993 
5994 TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
5995  return HasInstanceType(object, CODE_TYPE);
5996 }
5997 
5998 TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
5999  CSA_ASSERT(this, IsMap(map));
6000  return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
6001 }
6002 
6003 TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
6004  return IsConstructorMap(LoadMap(object));
6005 }
6006 
6007 TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
6008  SloppyTNode<Map> map) {
6009  CSA_ASSERT(this, IsMap(map));
6010  return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
6011 }
6012 
6013 TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
6014  TNode<Int32T> instance_type) {
6015  STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
6016  return Int32LessThanOrEqual(instance_type,
6017  Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
6018 }
6019 
6020 TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
6021  TNode<Int32T> instance_type) {
6022  return Int32LessThanOrEqual(instance_type,
6023  Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
6024 }
6025 
6026 TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
6027  SloppyTNode<Int32T> instance_type) {
6028  STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
6029  return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
6030 }
6031 
6032 TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
6033  SloppyTNode<Int32T> instance_type) {
6034  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6035  return Word32Equal(
6036  Word32And(instance_type, Int32Constant(kStringEncodingMask)),
6037  Int32Constant(kOneByteStringTag));
6038 }
6039 
6040 TNode<BoolT> CodeStubAssembler::HasOnlyOneByteChars(
6041  TNode<Int32T> instance_type) {
6042  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6043  return IsSetWord32(instance_type, kStringEncodingMask | kOneByteDataHintMask);
6044 }
6045 
6046 TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
6047  SloppyTNode<Int32T> instance_type) {
6048  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6049  return Word32Equal(
6050  Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6051  Int32Constant(kSeqStringTag));
6052 }
6053 
6054 TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
6055  SloppyTNode<Int32T> instance_type) {
6056  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6057  return Word32Equal(
6058  Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6059  Int32Constant(kConsStringTag));
6060 }
6061 
6062 TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
6063  SloppyTNode<Int32T> instance_type) {
6064  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6065  STATIC_ASSERT(kIsIndirectStringMask == 0x1);
6066  STATIC_ASSERT(kIsIndirectStringTag == 0x1);
6067  return UncheckedCast<BoolT>(
6068  Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
6069 }
6070 
6071 TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
6072  SloppyTNode<Int32T> instance_type) {
6073  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6074  return Word32Equal(
6075  Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6076  Int32Constant(kExternalStringTag));
6077 }
6078 
6079 TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
6080  SloppyTNode<Int32T> instance_type) {
6081  CSA_ASSERT(this, IsStringInstanceType(instance_type));
6082  STATIC_ASSERT(kUncachedExternalStringTag != 0);
6083  return IsSetWord32(instance_type, kUncachedExternalStringMask);
6084 }
6085 
6086 TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
6087  SloppyTNode<Int32T> instance_type) {
6088  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6089  return Int32GreaterThanOrEqual(instance_type,
6090  Int32Constant(FIRST_JS_RECEIVER_TYPE));
6091 }
6092 
6093 TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
6094  return IsJSReceiverInstanceType(LoadMapInstanceType(map));
6095 }
6096 
6097 TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
6098  return IsJSReceiverMap(LoadMap(object));
6099 }
6100 
6101 TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
6102  SloppyTNode<HeapObject> object) {
6103  return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
6104 }
6105 
6106 TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
6107  return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
6108 }
6109 
6110 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
6111  SloppyTNode<Int32T> instance_type) {
6112  return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
6113 }
6114 
6115 TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
6116  SloppyTNode<Int32T> instance_type) {
6117  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
6118  return Int32GreaterThanOrEqual(instance_type,
6119  Int32Constant(FIRST_JS_OBJECT_TYPE));
6120 }
6121 
6122 TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
6123  CSA_ASSERT(this, IsMap(map));
6124  return IsJSObjectInstanceType(LoadMapInstanceType(map));
6125 }
6126 
6127 TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
6128  return IsJSObjectMap(LoadMap(object));
6129 }
6130 
6131 TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
6132  CSA_ASSERT(this, IsMap(map));
6133  return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
6134 }
6135 
6136 TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
6137  return IsJSPromiseMap(LoadMap(object));
6138 }
6139 
6140 TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
6141  return HasInstanceType(object, JS_PROXY_TYPE);
6142 }
6143 
6144 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
6145  SloppyTNode<HeapObject> object) {
6146  return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
6147 }
6148 
6149 TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
6150  return IsMetaMap(LoadMap(map));
6151 }
6152 
6153 TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
6154  SloppyTNode<Int32T> instance_type) {
6155  return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
6156 }
6157 
6158 TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
6159  return IsJSValueMap(LoadMap(object));
6160 }
6161 
6162 TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
6163  return IsJSValueInstanceType(LoadMapInstanceType(map));
6164 }
6165 
6166 TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6167  SloppyTNode<Int32T> instance_type) {
6168  return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6169 }
6170 
6171 TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
6172  return IsJSArrayMap(LoadMap(object));
6173 }
6174 
6175 TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
6176  return IsJSArrayInstanceType(LoadMapInstanceType(map));
6177 }
6178 
6179 TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
6180  SloppyTNode<HeapObject> object) {
6181  return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6182 }
6183 
6184 TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6185  SloppyTNode<HeapObject> object) {
6186  return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6187 }
6188 
6189 TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
6190  Node* instance_type = LoadInstanceType(object);
6191  return UncheckedCast<BoolT>(Word32And(
6192  Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
6193  Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
6194 }
6195 
6196 TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
6197  return HasInstanceType(object, FIXED_ARRAY_TYPE);
6198 }
6199 
6200 TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
6201  SloppyTNode<HeapObject> object) {
6202  Node* instance_type = LoadInstanceType(object);
6203  return UncheckedCast<BoolT>(
6204  Word32And(Int32GreaterThanOrEqual(instance_type,
6205  Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6206  Int32LessThanOrEqual(instance_type,
6207  Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6208 }
6209 
6210 TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6211  SloppyTNode<HeapObject> object) {
6212  Node* instance_type = LoadInstanceType(object);
6213  return UncheckedCast<BoolT>(Word32Or(
6214  Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6215  Int32GreaterThan(instance_type,
6216  Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6217 }
6218 
6219 TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
6220  SloppyTNode<HeapObject> object) {
6221  return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
6222 }
6223 
6224 TNode<BoolT> CodeStubAssembler::IsPropertyArray(
6225  SloppyTNode<HeapObject> object) {
6226  return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6227 }
6228 
6229 // This complicated check is due to elements oddities. If a smi array is empty
6230 // after Array.p.shift, it is replaced by the empty array constant. If it is
6231 // later filled with a double element, we try to grow it but pass in a double
6232 // elements kind. Usually this would cause a size mismatch (since the source
6233 // fixed array has HOLEY_ELEMENTS and destination has
6234 // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6235 // source array is empty.
6236 // TODO(jgruber): It might we worth creating an empty_double_array constant to
6237 // simplify this case.
6238 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6239  SloppyTNode<HeapObject> object, ElementsKind kind) {
6240  Label out(this);
6241  TVARIABLE(BoolT, var_result, Int32TrueConstant());
6242 
6243  GotoIf(IsFixedArrayWithKind(object, kind), &out);
6244 
6245  TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
6246  GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6247 
6248  var_result = Int32FalseConstant();
6249  Goto(&out);
6250 
6251  BIND(&out);
6252  return var_result.value();
6253 }
6254 
6255 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
6256  SloppyTNode<HeapObject> object, ElementsKind kind) {
6257  if (IsDoubleElementsKind(kind)) {
6258  return IsFixedDoubleArray(object);
6259  } else {
6260  DCHECK(IsSmiOrObjectElementsKind(kind));
6261  return IsFixedArraySubclass(object);
6262  }
6263 }
6264 
6265 TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
6266  return IsBooleanMap(LoadMap(object));
6267 }
6268 
6269 TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
6270  return IsPropertyCellMap(LoadMap(object));
6271 }
6272 
6273 TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
6274  return IsAccessorInfoMap(LoadMap(object));
6275 }
6276 
6277 TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
6278  return IsAccessorPairMap(LoadMap(object));
6279 }
6280 
6281 TNode<BoolT> CodeStubAssembler::IsAllocationSite(
6282  SloppyTNode<HeapObject> object) {
6283  return IsAllocationSiteInstanceType(LoadInstanceType(object));
6284 }
6285 
6286 TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
6287  SloppyTNode<HeapObject> object) {
6288  return UncheckedCast<BoolT>(
6289  Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
6290 }
6291 
6292 TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
6293  return IsHeapNumberMap(LoadMap(object));
6294 }
6295 
6296 TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6297  SloppyTNode<Int32T> instance_type) {
6298  return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6299 }
6300 
6301 TNode<BoolT> CodeStubAssembler::IsOddball(SloppyTNode<HeapObject> object) {
6302  return IsOddballInstanceType(LoadInstanceType(object));
6303 }
6304 
6305 TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6306  SloppyTNode<Int32T> instance_type) {
6307  return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6308 }
6309 
6310 TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
6311  SloppyTNode<HeapObject> object) {
6312  return IsMutableHeapNumberMap(LoadMap(object));
6313 }
6314 
6315 TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
6316  return HasInstanceType(object, FEEDBACK_CELL_TYPE);
6317 }
6318 
6319 TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
6320  SloppyTNode<HeapObject> object) {
6321  return IsFeedbackVectorMap(LoadMap(object));
6322 }
6323 
6324 TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
6325  return IsNameInstanceType(LoadInstanceType(object));
6326 }
6327 
6328 TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6329  SloppyTNode<Int32T> instance_type) {
6330  return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6331 }
6332 
6333 TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
6334  return IsStringInstanceType(LoadInstanceType(object));
6335 }
6336 
6337 TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6338  SloppyTNode<Int32T> instance_type) {
6339  return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6340 }
6341 
6342 TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
6343  return IsSymbolMap(LoadMap(object));
6344 }
6345 
6346 TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6347  SloppyTNode<Int32T> instance_type) {
6348  return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6349 }
6350 
6351 TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
6352  return IsBigIntInstanceType(LoadInstanceType(object));
6353 }
6354 
6355 TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6356  SloppyTNode<Int32T> instance_type) {
6357  return Int32LessThanOrEqual(instance_type,
6358  Int32Constant(LAST_PRIMITIVE_TYPE));
6359 }
6360 
6361 TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
6362  SloppyTNode<HeapObject> object) {
6363  return Select<BoolT>(IsSymbol(object),
6364  [=] {
6365  TNode<Symbol> symbol = CAST(object);
6366  TNode<Uint32T> flags = LoadObjectField<Uint32T>(
6367  symbol, Symbol::kFlagsOffset);
6368  return IsSetWord32<Symbol::IsPrivateBit>(flags);
6369  },
6370  [=] { return Int32FalseConstant(); });
6371 }
6372 
6373 TNode<BoolT> CodeStubAssembler::IsNativeContext(
6374  SloppyTNode<HeapObject> object) {
6375  return WordEqual(LoadMap(object), LoadRoot(RootIndex::kNativeContextMap));
6376 }
6377 
6378 TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
6379  SloppyTNode<HeapObject> object) {
6380  return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
6381 }
6382 
6383 TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
6384  Node* instance_type = LoadInstanceType(object);
6385  return UncheckedCast<BoolT>(
6386  Word32And(Int32GreaterThanOrEqual(instance_type,
6387  Int32Constant(FIRST_HASH_TABLE_TYPE)),
6388  Int32LessThanOrEqual(instance_type,
6389  Int32Constant(LAST_HASH_TABLE_TYPE))));
6390 }
6391 
6392 TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(
6393  SloppyTNode<HeapObject> object) {
6394  return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6395 }
6396 
6397 TNode<BoolT> CodeStubAssembler::IsNameDictionary(
6398  SloppyTNode<HeapObject> object) {
6399  return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6400 }
6401 
6402 TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(
6403  SloppyTNode<HeapObject> object) {
6404  return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6405 }
6406 
6407 TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
6408  SloppyTNode<HeapObject> object) {
6409  return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6410 }
6411 
6412 TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
6413  SloppyTNode<HeapObject> object) {
6414  return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6415 }
6416 
6417 TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6418  SloppyTNode<Int32T> instance_type) {
6419  return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
6420 }
6421 
6422 TNode<BoolT> CodeStubAssembler::IsAllocationSiteInstanceType(
6423  SloppyTNode<Int32T> instance_type) {
6424  return InstanceTypeEqual(instance_type, ALLOCATION_SITE_TYPE);
6425 }
6426 
6427 TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
6428  return IsJSFunctionMap(LoadMap(object));
6429 }
6430 
6431 TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
6432  return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6433 }
6434 
6435 TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
6436  return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
6437 }
6438 
6439 TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
6440  SloppyTNode<HeapObject> object) {
6441  return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6442 }
6443 
6444 TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6445  return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6446 }
6447 
6448 TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
6449  SloppyTNode<HeapObject> object) {
6450  TNode<Int32T> instance_type = LoadInstanceType(object);
6451  return UncheckedCast<BoolT>(Word32And(
6452  Int32GreaterThanOrEqual(instance_type,
6453  Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
6454  Int32LessThanOrEqual(instance_type,
6455  Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
6456 }
6457 
6458 TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
6459  return HasInstanceType(object, JS_REGEXP_TYPE);
6460 }
6461 
6462 TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
6463  return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6464  [=] { return IsHeapNumber(CAST(object)); });
6465 }
6466 
6467 TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
6468  return Select<BoolT>(
6469  TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6470  [=] {
6471  return UncheckedCast<BoolT>(
6472  Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
6473  });
6474 }
6475 
6476 TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
6477  TVARIABLE(BoolT, var_result, Int32TrueConstant());
6478  Label out(this);
6479 
6480  GotoIf(TaggedIsSmi(number), &out);
6481 
6482  TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6483  TNode<Float64T> smi_min =
6484  Float64Constant(static_cast<double>(Smi::kMinValue));
6485  TNode<Float64T> smi_max =
6486  Float64Constant(static_cast<double>(Smi::kMaxValue));
6487 
6488  GotoIf(Float64LessThan(value, smi_min), &out);
6489  GotoIf(Float64GreaterThan(value, smi_max), &out);
6490  GotoIfNot(Float64Equal(value, value), &out); // NaN.
6491 
6492  var_result = Int32FalseConstant();
6493  Goto(&out);
6494 
6495  BIND(&out);
6496  return var_result.value();
6497 }
6498 
6499 TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
6500  return Select<BoolT>(TaggedIsSmi(number),
6501  [=] { return TaggedIsPositiveSmi(number); },
6502  [=] { return IsHeapNumberPositive(CAST(number)); });
6503 }
6504 
6505 // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
6506 TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6507  TNode<Float64T> value = LoadHeapNumberValue(number);
6508  TNode<Float64T> float_zero = Float64Constant(0.);
6509  return Float64GreaterThanOrEqual(value, float_zero);
6510 }
6511 
6512 TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6513  TNode<Number> number) {
6514  return Select<BoolT>(
6515  // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6516  TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6517  [=] {
6518  TNode<HeapNumber> heap_number = CAST(number);
6519  return Select<BoolT>(IsInteger(heap_number),
6520  [=] { return IsHeapNumberPositive(heap_number); },
6521  [=] { return Int32FalseConstant(); });
6522  });
6523 }
6524 
6525 TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6526  return Select<BoolT>(
6527  TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6528  [=] {
6529  return Select<BoolT>(
6530  IsHeapNumber(CAST(number)),
6531  [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6532  [=] { return Int32FalseConstant(); });
6533  });
6534 }
6535 
6536 TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6537  // Load the actual value of {number}.
6538  TNode<Float64T> number_value = LoadHeapNumberValue(number);
6539  // Truncate the value of {number} to an integer (or an infinity).
6540  TNode<Float64T> integer = Float64Trunc(number_value);
6541 
6542  return Select<BoolT>(
6543  // Check if {number}s value matches the integer (ruling out the
6544  // infinities).
6545  Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6546  [=] {
6547  // Check if the {integer} value is in safe integer range.
6548  return Float64LessThanOrEqual(Float64Abs(integer),
6549  Float64Constant(kMaxSafeInteger));
6550  },
6551  [=] { return Int32FalseConstant(); });
6552 }
6553 
6554 TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6555  return Select<BoolT>(
6556  TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6557  [=] {
6558  return Select<BoolT>(
6559  IsHeapNumber(CAST(number)),
6560  [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6561  [=] { return Int32FalseConstant(); });
6562  });
6563 }
6564 
6565 TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6566  TNode<Float64T> number_value = LoadHeapNumberValue(number);
6567  // Truncate the value of {number} to an integer (or an infinity).
6568  TNode<Float64T> integer = Float64Trunc(number_value);
6569  // Check if {number}s value matches the integer (ruling out the infinities).
6570  return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6571 }
6572 
6573 TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6574  // Check that the HeapNumber is a valid uint32
6575  return Select<BoolT>(
6576  IsHeapNumberPositive(number),
6577  [=] {
6578  TNode<Float64T> value = LoadHeapNumberValue(number);
6579  TNode<Uint32T> int_value = Unsigned(TruncateFloat64ToWord32(value));
6580  return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6581  },
6582  [=] { return Int32FalseConstant(); });
6583 }
6584 
6585 TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6586  return Select<BoolT>(TaggedIsSmi(number),
6587  [=] { return TaggedIsPositiveSmi(number); },
6588  [=] { return IsHeapNumberUint32(CAST(number)); });
6589 }
6590 
6591 Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
6592  int base_size,
6593  ParameterMode mode) {
6594  int max_newspace_elements =
6595  (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
6596  return IntPtrOrSmiGreaterThan(
6597  element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
6598 }
6599 
6600 TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
6601  SloppyTNode<IntPtrT> index) {
6602  CSA_ASSERT(this, IsString(string));
6603 
6604  CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
6605  CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
6606 
6607  TVARIABLE(Int32T, var_result);
6608 
6609  Label return_result(this), if_runtime(this, Label::kDeferred),
6610  if_stringistwobyte(this), if_stringisonebyte(this);
6611 
6612  ToDirectStringAssembler to_direct(state(), string);
6613  to_direct.TryToDirect(&if_runtime);
6614  Node* const offset = IntPtrAdd(index, to_direct.offset());
6615  Node* const instance_type = to_direct.instance_type();
6616 
6617  Node* const string_data = to_direct.PointerToData(&if_runtime);
6618 
6619  // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
6620  Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
6621  &if_stringistwobyte);
6622 
6623  BIND(&if_stringisonebyte);
6624  {
6625  var_result =
6626  UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
6627  Goto(&return_result);
6628  }
6629 
6630  BIND(&if_stringistwobyte);
6631  {
6632  var_result =
6633  UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
6634  WordShl(offset, IntPtrConstant(1))));
6635  Goto(&return_result);
6636  }
6637 
6638  BIND(&if_runtime);
6639  {
6640  Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
6641  string, SmiTag(index));
6642  var_result = SmiToInt32(result);
6643  Goto(&return_result);
6644  }
6645 
6646  BIND(&return_result);
6647  return var_result.value();
6648 }
6649 
6650 TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
6651  VARIABLE(var_result, MachineRepresentation::kTagged);
6652 
6653  // Check if the {code} is a one-byte char code.
6654  Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
6655  if_done(this);
6656  Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6657  &if_codeisonebyte, &if_codeistwobyte);
6658  BIND(&if_codeisonebyte);
6659  {
6660  // Load the isolate wide single character string cache.
6661  TNode<FixedArray> cache =
6662  CAST(LoadRoot(RootIndex::kSingleCharacterStringCache));
6663  TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6664 
6665  // Check if we have an entry for the {code} in the single character string
6666  // cache already.
6667  Label if_entryisundefined(this, Label::kDeferred),
6668  if_entryisnotundefined(this);
6669  Node* entry = LoadFixedArrayElement(cache, code_index);
6670  Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6671 
6672  BIND(&if_entryisundefined);
6673  {
6674  // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6675  TNode<String> result = AllocateSeqOneByteString(1);
6676  StoreNoWriteBarrier(
6677  MachineRepresentation::kWord8, result,
6678  IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6679  StoreFixedArrayElement(cache, code_index, result);
6680  var_result.Bind(result);
6681  Goto(&if_done);
6682  }
6683 
6684  BIND(&if_entryisnotundefined);
6685  {
6686  // Return the entry from the {cache}.
6687  var_result.Bind(entry);
6688  Goto(&if_done);
6689  }
6690  }
6691 
6692  BIND(&if_codeistwobyte);
6693  {
6694  // Allocate a new SeqTwoByteString for {code}.
6695  Node* result = AllocateSeqTwoByteString(1);
6696  StoreNoWriteBarrier(
6697  MachineRepresentation::kWord16, result,
6698  IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6699  var_result.Bind(result);
6700  Goto(&if_done);
6701  }
6702 
6703  BIND(&if_done);
6704  CSA_ASSERT(this, IsString(var_result.value()));
6705  return CAST(var_result.value());
6706 }
6707 
6708 // A wrapper around CopyStringCharacters which determines the correct string
6709 // encoding, allocates a corresponding sequential string, and then copies the
6710 // given character range using CopyStringCharacters.
6711 // |from_string| must be a sequential string.
6712 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
6713 TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
6714  Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
6715  TNode<IntPtrT> character_count) {
6716  Label end(this), one_byte_sequential(this), two_byte_sequential(this);
6717  TVARIABLE(String, var_result);
6718 
6719  Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
6720  &two_byte_sequential);
6721 
6722  // The subject string is a sequential one-byte string.
6723  BIND(&one_byte_sequential);
6724  {
6725  TNode<String> result = AllocateSeqOneByteString(
6726  NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6727  CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6728  character_count, String::ONE_BYTE_ENCODING,
6729  String::ONE_BYTE_ENCODING);
6730  var_result = result;
6731  Goto(&end);
6732  }
6733 
6734  // The subject string is a sequential two-byte string.
6735  BIND(&two_byte_sequential);
6736  {
6737  TNode<String> result = AllocateSeqTwoByteString(
6738  NoContextConstant(), Unsigned(TruncateIntPtrToInt32(character_count)));
6739  CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6740  character_count, String::TWO_BYTE_ENCODING,
6741  String::TWO_BYTE_ENCODING);
6742  var_result = result;
6743  Goto(&end);
6744  }
6745 
6746  BIND(&end);
6747  return var_result.value();
6748 }
6749 
6750 TNode<String> CodeStubAssembler::SubString(TNode<String> string,
6751  TNode<IntPtrT> from,
6752  TNode<IntPtrT> to) {
6753  TVARIABLE(String, var_result);
6754  ToDirectStringAssembler to_direct(state(), string);
6755  Label end(this), runtime(this);
6756 
6757  TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
6758  TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
6759 
6760  // Begin dispatching based on substring length.
6761 
6762  Label original_string_or_invalid_length(this);
6763  GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
6764  &original_string_or_invalid_length);
6765 
6766  // A real substring (substr_length < string_length).
6767 
6768  Label single_char(this);
6769  GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
6770 
6771  // TODO(jgruber): Add an additional case for substring of length == 0?
6772 
6773  // Deal with different string types: update the index if necessary
6774  // and extract the underlying string.
6775 
6776  TNode<String> direct_string = to_direct.TryToDirect(&runtime);
6777  TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
6778  Node* const instance_type = to_direct.instance_type();
6779 
6780  // The subject string can only be external or sequential string of either
6781  // encoding at this point.
6782  Label external_string(this);
6783  {
6784  if (FLAG_string_slices) {
6785  Label next(this);
6786 
6787  // Short slice. Copy instead of slicing.
6788  GotoIf(IntPtrLessThan(substr_length,
6789  IntPtrConstant(SlicedString::kMinLength)),
6790  &next);
6791 
6792  // Allocate new sliced string.
6793 
6794  Counters* counters = isolate()->counters();
6795  IncrementCounter(counters->sub_string_native(), 1);
6796 
6797  Label one_byte_slice(this), two_byte_slice(this);
6798  Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
6799  &one_byte_slice, &two_byte_slice);
6800 
6801  BIND(&one_byte_slice);
6802  {
6803  var_result = AllocateSlicedOneByteString(
6804  Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6805  SmiTag(offset));
6806  Goto(&end);
6807  }
6808 
6809  BIND(&two_byte_slice);
6810  {
6811  var_result = AllocateSlicedTwoByteString(
6812  Unsigned(TruncateIntPtrToInt32(substr_length)), direct_string,
6813  SmiTag(offset));
6814  Goto(&end);
6815  }
6816 
6817  BIND(&next);
6818  }
6819 
6820  // The subject string can only be external or sequential string of either
6821  // encoding at this point.
6822  GotoIf(to_direct.is_external(), &external_string);
6823 
6824  var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
6825  offset, substr_length);
6826 
6827  Counters* counters = isolate()->counters();
6828  IncrementCounter(counters->sub_string_native(), 1);
6829 
6830  Goto(&end);
6831  }
6832 
6833  // Handle external string.
6834  BIND(&external_string);
6835  {
6836  Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
6837 
6838  var_result = AllocAndCopyStringCharacters(
6839  fake_sequential_string, instance_type, offset, substr_length);
6840 
6841  Counters* counters = isolate()->counters();
6842  IncrementCounter(counters->sub_string_native(), 1);
6843 
6844  Goto(&end);
6845  }
6846 
6847  // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
6848  BIND(&single_char);
6849  {
6850  TNode<Int32T> char_code = StringCharCodeAt(string, from);
6851  var_result = StringFromSingleCharCode(char_code);
6852  Goto(&end);
6853  }
6854 
6855  BIND(&original_string_or_invalid_length);
6856  {
6857  CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
6858 
6859  // Equal length - check if {from, to} == {0, str.length}.
6860  GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
6861 
6862  // Return the original string (substr_length == string_length).
6863 
6864  Counters* counters = isolate()->counters();
6865  IncrementCounter(counters->sub_string_native(), 1);
6866 
6867  var_result = string;
6868  Goto(&end);
6869  }
6870 
6871  // Fall back to a runtime call.
6872  BIND(&runtime);
6873  {
6874  var_result =
6875  CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
6876  SmiTag(from), SmiTag(to)));
6877  Goto(&end);
6878  }
6879 
6880  BIND(&end);
6881  return var_result.value();
6882 }
6883 
6884 ToDirectStringAssembler::ToDirectStringAssembler(
6885  compiler::CodeAssemblerState* state, Node* string, Flags flags)
6886  : CodeStubAssembler(state),
6887  var_string_(this, MachineRepresentation::kTagged, string),
6888  var_instance_type_(this, MachineRepresentation::kWord32),
6889  var_offset_(this, MachineType::PointerRepresentation()),
6890  var_is_external_(this, MachineRepresentation::kWord32),
6891  flags_(flags) {
6892  CSA_ASSERT(this, TaggedIsNotSmi(string));
6893  CSA_ASSERT(this, IsString(string));
6894 
6895  var_string_.Bind(string);
6896  var_offset_.Bind(IntPtrConstant(0));
6897  var_instance_type_.Bind(LoadInstanceType(string));
6898  var_is_external_.Bind(Int32Constant(0));
6899 }
6900 
6901 TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6902  VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
6903  Label dispatch(this, vars);
6904  Label if_iscons(this);
6905  Label if_isexternal(this);
6906  Label if_issliced(this);
6907  Label if_isthin(this);
6908  Label out(this);
6909 
6910  Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6911  &dispatch);
6912 
6913  // Dispatch based on string representation.
6914  BIND(&dispatch);
6915  {
6916  int32_t values[] = {
6917  kSeqStringTag, kConsStringTag, kExternalStringTag,
6918  kSlicedStringTag, kThinStringTag,
6919  };
6920  Label* labels[] = {
6921  &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
6922  };
6923  STATIC_ASSERT(arraysize(values) == arraysize(labels));
6924 
6925  Node* const representation = Word32And(
6926  var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
6927  Switch(representation, if_bailout, values, labels, arraysize(values));
6928  }
6929 
6930  // Cons string. Check whether it is flat, then fetch first part.
6931  // Flat cons strings have an empty second part.
6932  BIND(&if_iscons);
6933  {
6934  Node* const string = var_string_.value();
6935  GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
6936  if_bailout);
6937 
6938  Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
6939  var_string_.Bind(lhs);
6940  var_instance_type_.Bind(LoadInstanceType(lhs));
6941 
6942  Goto(&dispatch);
6943  }
6944 
6945  // Sliced string. Fetch parent and correct start index by offset.
6946  BIND(&if_issliced);
6947  {
6948  if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
6949  Goto(if_bailout);
6950  } else {
6951  Node* const string = var_string_.value();
6952  Node* const sliced_offset =
6953  LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
6954  var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
6955 
6956  Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
6957  var_string_.Bind(parent);
6958  var_instance_type_.Bind(LoadInstanceType(parent));
6959 
6960  Goto(&dispatch);
6961  }
6962  }
6963 
6964  // Thin string. Fetch the actual string.
6965  BIND(&if_isthin);
6966  {
6967  Node* const string = var_string_.value();
6968  Node* const actual_string =
6969  LoadObjectField(string, ThinString::kActualOffset);
6970  Node* const actual_instance_type = LoadInstanceType(actual_string);
6971 
6972  var_string_.Bind(actual_string);
6973  var_instance_type_.Bind(actual_instance_type);
6974 
6975  Goto(&dispatch);
6976  }
6977 
6978  // External string.
6979  BIND(&if_isexternal);
6980  var_is_external_.Bind(Int32Constant(1));
6981  Goto(&out);
6982 
6983  BIND(&out);
6984  return CAST(var_string_.value());
6985 }
6986 
6987 TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
6988  StringPointerKind ptr_kind, Label* if_bailout) {
6989  CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
6990 
6991  TVARIABLE(RawPtrT, var_result);
6992  Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
6993  Branch(is_external(), &if_isexternal, &if_issequential);
6994 
6995  BIND(&if_issequential);
6996  {
6997  STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
6998  SeqTwoByteString::kHeaderSize);
6999  TNode<IntPtrT> result = BitcastTaggedToWord(var_string_.value());
7000  if (ptr_kind == PTR_TO_DATA) {
7001  result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7002  kHeapObjectTag));
7003  }
7004  var_result = ReinterpretCast<RawPtrT>(result);
7005  Goto(&out);
7006  }
7007 
7008  BIND(&if_isexternal);
7009  {
7010  GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
7011  if_bailout);
7012 
7013  TNode<String> string = CAST(var_string_.value());
7014  TNode<IntPtrT> result =
7015  LoadObjectField<IntPtrT>(string, ExternalString::kResourceDataOffset);
7016  if (ptr_kind == PTR_TO_STRING) {
7017  result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7018  kHeapObjectTag));
7019  }
7020  var_result = ReinterpretCast<RawPtrT>(result);
7021  Goto(&out);
7022  }
7023 
7024  BIND(&out);
7025  return var_result.value();
7026 }
7027 
7028 void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
7029  Node* instance_type,
7030  Label* can_deref,
7031  Label* cannot_deref) {
7032  CSA_ASSERT(this, IsString(string));
7033  Node* representation =
7034  Word32And(instance_type, Int32Constant(kStringRepresentationMask));
7035  GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
7036  GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
7037  cannot_deref);
7038  // Cons string.
7039  Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
7040  GotoIf(IsEmptyString(rhs), can_deref);
7041  Goto(cannot_deref);
7042 }
7043 
7044 Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
7045  TNode<Int32T> instance_type,
7046  Label* cannot_deref) {
7047  Label deref(this);
7048  BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
7049  BIND(&deref);
7050  STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7051  static_cast<int>(ConsString::kFirstOffset));
7052  return LoadObjectField(string, ThinString::kActualOffset);
7053 }
7054 
7055 void CodeStubAssembler::DerefIndirectString(Variable* var_string,
7056  Node* instance_type) {
7057 #ifdef DEBUG
7058  Label can_deref(this), cannot_deref(this);
7059  BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
7060  &cannot_deref);
7061  BIND(&cannot_deref);
7062  DebugBreak(); // Should be able to dereference string.
7063  Goto(&can_deref);
7064  BIND(&can_deref);
7065 #endif // DEBUG
7066 
7067  STATIC_ASSERT(static_cast<int>(ThinString::kActualOffset) ==
7068  static_cast<int>(ConsString::kFirstOffset));
7069  var_string->Bind(
7070  LoadObjectField(var_string->value(), ThinString::kActualOffset));
7071 }
7072 
7073 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
7074  Node* instance_type,
7075  Label* did_deref,
7076  Label* cannot_deref) {
7077  Label deref(this);
7078  BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
7079  cannot_deref);
7080 
7081  BIND(&deref);
7082  {
7083  DerefIndirectString(var_string, instance_type);
7084  Goto(did_deref);
7085  }
7086 }
7087 
7088 void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
7089  Node* left_instance_type,
7090  Variable* var_right,
7091  Node* right_instance_type,
7092  Label* did_something) {
7093  Label did_nothing_left(this), did_something_left(this),
7094  didnt_do_anything(this);
7095  MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
7096  &did_nothing_left);
7097 
7098  BIND(&did_something_left);
7099  {
7100  MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7101  did_something);
7102  }
7103 
7104  BIND(&did_nothing_left);
7105  {
7106  MaybeDerefIndirectString(var_right, right_instance_type, did_something,
7107  &didnt_do_anything);
7108  }
7109 
7110  BIND(&didnt_do_anything);
7111  // Fall through if neither string was an indirect string.
7112 }
7113 
7114 TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
7115  TNode<String> right,
7116  AllocationFlags flags) {
7117  TVARIABLE(String, result);
7118  Label check_right(this), runtime(this, Label::kDeferred), cons(this),
7119  done(this, &result), done_native(this, &result);
7120  Counters* counters = isolate()->counters();
7121 
7122  TNode<Uint32T> left_length = LoadStringLengthAsWord32(left);
7123  GotoIfNot(Word32Equal(left_length, Uint32Constant(0)), &check_right);
7124  result = right;
7125  Goto(&done_native);
7126 
7127  BIND(&check_right);
7128  TNode<Uint32T> right_length = LoadStringLengthAsWord32(right);
7129  GotoIfNot(Word32Equal(right_length, Uint32Constant(0)), &cons);
7130  result = left;
7131  Goto(&done_native);
7132 
7133  BIND(&cons);
7134  {
7135  TNode<Uint32T> new_length = Uint32Add(left_length, right_length);
7136 
7137  // If new length is greater than String::kMaxLength, goto runtime to
7138  // throw. Note: we also need to invalidate the string length protector, so
7139  // can't just throw here directly.
7140  GotoIf(Uint32GreaterThan(new_length, Uint32Constant(String::kMaxLength)),
7141  &runtime);
7142 
7143  TVARIABLE(String, var_left, left);
7144  TVARIABLE(String, var_right, right);
7145  Variable* input_vars[2] = {&var_left, &var_right};
7146  Label non_cons(this, 2, input_vars);
7147  Label slow(this, Label::kDeferred);
7148  GotoIf(Uint32LessThan(new_length, Uint32Constant(ConsString::kMinLength)),
7149  &non_cons);
7150 
7151  result =
7152  NewConsString(new_length, var_left.value(), var_right.value(), flags);
7153  Goto(&done_native);
7154 
7155  BIND(&non_cons);
7156 
7157  Comment("Full string concatenate");
7158  Node* left_instance_type = LoadInstanceType(var_left.value());
7159  Node* right_instance_type = LoadInstanceType(var_right.value());
7160  // Compute intersection and difference of instance types.
7161 
7162  Node* ored_instance_types =
7163  Word32Or(left_instance_type, right_instance_type);
7164  Node* xored_instance_types =
7165  Word32Xor(left_instance_type, right_instance_type);
7166 
7167  // Check if both strings have the same encoding and both are sequential.
7168  GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
7169  GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
7170 
7171  TNode<IntPtrT> word_left_length = Signed(ChangeUint32ToWord(left_length));
7172  TNode<IntPtrT> word_right_length = Signed(ChangeUint32ToWord(right_length));
7173 
7174  Label two_byte(this);
7175  GotoIf(Word32Equal(Word32And(ored_instance_types,
7176  Int32Constant(kStringEncodingMask)),
7177  Int32Constant(kTwoByteStringTag)),
7178  &two_byte);
7179  // One-byte sequential string case
7180  result = AllocateSeqOneByteString(context, new_length);
7181  CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7182  IntPtrConstant(0), word_left_length,
7183  String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7184  CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7185  word_left_length, word_right_length,
7186  String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
7187  Goto(&done_native);
7188 
7189  BIND(&two_byte);
7190  {
7191  // Two-byte sequential string case
7192  result = AllocateSeqTwoByteString(context, new_length);
7193  CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
7194  IntPtrConstant(0), word_left_length,
7195  String::TWO_BYTE_ENCODING,
7196  String::TWO_BYTE_ENCODING);
7197  CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
7198  word_left_length, word_right_length,
7199  String::TWO_BYTE_ENCODING,
7200  String::TWO_BYTE_ENCODING);
7201  Goto(&done_native);
7202  }
7203 
7204  BIND(&slow);
7205  {
7206  // Try to unwrap indirect strings, restart the above attempt on success.
7207  MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
7208  right_instance_type, &non_cons);
7209  Goto(&runtime);
7210  }
7211  }
7212  BIND(&runtime);
7213  {
7214  result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
7215  Goto(&done);
7216  }
7217 
7218  BIND(&done_native);
7219  {
7220  IncrementCounter(counters->string_add_native(), 1);
7221  Goto(&done);
7222  }
7223 
7224  BIND(&done);
7225  return result.value();
7226 }
7227 
7228 TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
7229  TNode<Int32T> codepoint, UnicodeEncoding encoding) {
7230  VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
7231 
7232  Label if_isword16(this), if_isword32(this), return_result(this);
7233 
7234  Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
7235  &if_isword32);
7236 
7237  BIND(&if_isword16);
7238  {
7239  var_result.Bind(StringFromSingleCharCode(codepoint));
7240  Goto(&return_result);
7241  }
7242 
7243  BIND(&if_isword32);
7244  {
7245  switch (encoding) {
7246  case UnicodeEncoding::UTF16:
7247  break;
7248  case UnicodeEncoding::UTF32: {
7249  // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
7250  Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
7251 
7252  // lead = (codepoint >> 10) + LEAD_OFFSET
7253  Node* lead =
7254  Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
7255 
7256  // trail = (codepoint & 0x3FF) + 0xDC00;
7257  Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
7258  Int32Constant(0xDC00));
7259 
7260  // codpoint = (trail << 16) | lead;
7261  codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
7262  break;
7263  }
7264  }
7265 
7266  Node* value = AllocateSeqTwoByteString(2);
7267  StoreNoWriteBarrier(
7268  MachineRepresentation::kWord32, value,
7269  IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
7270  codepoint);
7271  var_result.Bind(value);
7272  Goto(&return_result);
7273  }
7274 
7275  BIND(&return_result);
7276  return CAST(var_result.value());
7277 }
7278 
7279 TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
7280  Label runtime(this, Label::kDeferred);
7281  Label end(this);
7282 
7283  TVARIABLE(Number, var_result);
7284 
7285  // Check if string has a cached array index.
7286  TNode<Uint32T> hash = LoadNameHashField(input);
7287  GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7288  &runtime);
7289 
7290  var_result =
7291  SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
7292  Goto(&end);
7293 
7294  BIND(&runtime);
7295  {
7296  var_result =
7297  CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
7298  Goto(&end);
7299  }
7300 
7301  BIND(&end);
7302  return var_result.value();
7303 }
7304 
7305 TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
7306  TVARIABLE(String, result);
7307  TVARIABLE(Smi, smi_input);
7308  Label runtime(this, Label::kDeferred), if_smi(this), if_heap_number(this),
7309  done(this, &result);
7310 
7311  // Load the number string cache.
7312  Node* number_string_cache = LoadRoot(RootIndex::kNumberStringCache);
7313 
7314  // Make the hash mask from the length of the number string cache. It
7315  // contains two elements (number and string) for each cache entry.
7316  // TODO(ishell): cleanup mask handling.
7317  Node* mask =
7318  BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
7319  TNode<IntPtrT> one = IntPtrConstant(1);
7320  mask = IntPtrSub(mask, one);
7321 
7322  GotoIfNot(TaggedIsSmi(input), &if_heap_number);
7323  smi_input = CAST(input);
7324  Goto(&if_smi);
7325 
7326  BIND(&if_heap_number);
7327  {
7328  TNode<HeapNumber> heap_number_input = CAST(input);
7329  // Try normalizing the HeapNumber.
7330  TryHeapNumberToSmi(heap_number_input, smi_input, &if_smi);
7331 
7332  // Make a hash from the two 32-bit values of the double.
7333  TNode<Int32T> low =
7334  LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
7335  TNode<Int32T> high = LoadObjectField<Int32T>(
7336  heap_number_input, HeapNumber::kValueOffset + kIntSize);
7337  TNode<Word32T> hash = Word32Xor(low, high);
7338  TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
7339  TNode<WordT> index =
7340  WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
7341 
7342  // Cache entry's key must be a heap number
7343  Node* number_key = LoadFixedArrayElement(CAST(number_string_cache), index);
7344  GotoIf(TaggedIsSmi(number_key), &runtime);
7345  GotoIfNot(IsHeapNumber(number_key), &runtime);
7346 
7347  // Cache entry's key must match the heap number value we're looking for.
7348  Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
7349  MachineType::Int32());
7350  Node* high_compare = LoadObjectField(
7351  number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
7352  GotoIfNot(Word32Equal(low, low_compare), &runtime);
7353  GotoIfNot(Word32Equal(high, high_compare), &runtime);
7354 
7355  // Heap number match, return value from cache entry.
7356  result = CAST(
7357  LoadFixedArrayElement(CAST(number_string_cache), index, kPointerSize));
7358  Goto(&done);
7359  }
7360 
7361  BIND(&if_smi);
7362  {
7363  // Load the smi key, make sure it matches the smi we're looking for.
7364  Node* smi_index = BitcastWordToTagged(
7365  WordAnd(WordShl(BitcastTaggedToWord(smi_input.value()), one), mask));
7366  Node* smi_key = LoadFixedArrayElement(CAST(number_string_cache), smi_index,
7367  0, SMI_PARAMETERS);
7368  GotoIf(WordNotEqual(smi_key, smi_input.value()), &runtime);
7369 
7370  // Smi match, return value from cache entry.
7371  result = CAST(LoadFixedArrayElement(CAST(number_string_cache), smi_index,
7372  kPointerSize, SMI_PARAMETERS));
7373  Goto(&done);
7374  }
7375 
7376  BIND(&runtime);
7377  {
7378  // No cache entry, go to the runtime.
7379  result =
7380  CAST(CallRuntime(Runtime::kNumberToString, NoContextConstant(), input));
7381  Goto(&done);
7382  }
7383  BIND(&done);
7384  return result.value();
7385 }
7386 
7387 Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
7388  Node* context, Node* input, Object::Conversion mode,
7389  BigIntHandling bigint_handling) {
7390  CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
7391  CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
7392 
7393  // We might need to loop once here due to ToPrimitive conversions.
7394  VARIABLE(var_input, MachineRepresentation::kTagged, input);
7395  VARIABLE(var_result, MachineRepresentation::kTagged);
7396  Label loop(this, &var_input);
7397  Label end(this);
7398  Goto(&loop);
7399  BIND(&loop);
7400  {
7401  // Load the current {input} value (known to be a HeapObject).
7402  Node* input = var_input.value();
7403 
7404  // Dispatch on the {input} instance type.
7405  Node* input_instance_type = LoadInstanceType(input);
7406  Label if_inputisstring(this), if_inputisoddball(this),
7407  if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
7408  if_inputisother(this, Label::kDeferred);
7409  GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
7410  GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
7411  GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
7412  &if_inputisoddball);
7413  Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
7414  &if_inputisother);
7415 
7416  BIND(&if_inputisstring);
7417  {
7418  // The {input} is a String, use the fast stub to convert it to a Number.
7419  TNode<String> string_input = CAST(input);
7420  var_result.Bind(StringToNumber(string_input));
7421  Goto(&end);
7422  }
7423 
7424  BIND(&if_inputisbigint);
7425  if (mode == Object::Conversion::kToNumeric) {
7426  var_result.Bind(input);
7427  Goto(&end);
7428  } else {
7429  DCHECK_EQ(mode, Object::Conversion::kToNumber);
7430  if (bigint_handling == BigIntHandling::kThrow) {
7431  Goto(&if_inputisother);
7432  } else {
7433  DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
7434  var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
7435  Goto(&end);
7436  }
7437  }
7438 
7439  BIND(&if_inputisoddball);
7440  {
7441  // The {input} is an Oddball, we just need to load the Number value of it.
7442  var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
7443  Goto(&end);
7444  }
7445 
7446  BIND(&if_inputisreceiver);
7447  {
7448  // The {input} is a JSReceiver, we need to convert it to a Primitive first
7449  // using the ToPrimitive type conversion, preferably yielding a Number.
7450  Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7451  isolate(), ToPrimitiveHint::kNumber);
7452  Node* result = CallStub(callable, context, input);
7453 
7454  // Check if the {result} is already a Number/Numeric.
7455  Label if_done(this), if_notdone(this);
7456  Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
7457  : IsNumeric(result),
7458  &if_done, &if_notdone);
7459 
7460  BIND(&if_done);
7461  {
7462  // The ToPrimitive conversion already gave us a Number/Numeric, so we're
7463  // done.
7464  var_result.Bind(result);
7465  Goto(&end);
7466  }
7467 
7468  BIND(&if_notdone);
7469  {
7470  // We now have a Primitive {result}, but it's not yet a Number/Numeric.
7471  var_input.Bind(result);
7472  Goto(&loop);
7473  }
7474  }
7475 
7476  BIND(&if_inputisother);
7477  {
7478  // The {input} is something else (e.g. Symbol), let the runtime figure
7479  // out the correct exception.
7480  // Note: We cannot tail call to the runtime here, as js-to-wasm
7481  // trampolines also use this code currently, and they declare all
7482  // outgoing parameters as untagged, while we would push a tagged
7483  // object here.
7484  auto function_id = mode == Object::Conversion::kToNumber
7485  ? Runtime::kToNumber
7486  : Runtime::kToNumeric;
7487  var_result.Bind(CallRuntime(function_id, context, input));
7488  Goto(&end);
7489  }
7490  }
7491 
7492  BIND(&end);
7493  if (mode == Object::Conversion::kToNumeric) {
7494  CSA_ASSERT(this, IsNumeric(var_result.value()));
7495  } else {
7496  DCHECK_EQ(mode, Object::Conversion::kToNumber);
7497  CSA_ASSERT(this, IsNumber(var_result.value()));
7498  }
7499  return var_result.value();
7500 }
7501 
7502 TNode<Number> CodeStubAssembler::NonNumberToNumber(
7503  SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
7504  BigIntHandling bigint_handling) {
7505  return CAST(NonNumberToNumberOrNumeric(
7506  context, input, Object::Conversion::kToNumber, bigint_handling));
7507 }
7508 
7509 TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
7510  SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
7511  Node* result = NonNumberToNumberOrNumeric(context, input,
7512  Object::Conversion::kToNumeric);
7513  CSA_SLOW_ASSERT(this, IsNumeric(result));
7514  return UncheckedCast<Numeric>(result);
7515 }
7516 
7517 TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
7518  SloppyTNode<Object> input) {
7519  TVARIABLE(Number, var_result);
7520  Label end(this), not_smi(this, Label::kDeferred);
7521 
7522  GotoIfNot(TaggedIsSmi(input), &not_smi);
7523  var_result = CAST(input);
7524  Goto(&end);
7525 
7526  BIND(&not_smi);
7527  {
7528  var_result =
7529  Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
7530  [=] {
7531  return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
7532  context, input));
7533  });
7534  Goto(&end);
7535  }
7536 
7537  BIND(&end);
7538  return var_result.value();
7539 }
7540 
7541 TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
7542  SloppyTNode<Object> input,
7543  BigIntHandling bigint_handling) {
7544  TVARIABLE(Number, var_result);
7545  Label end(this);
7546 
7547  Label not_smi(this, Label::kDeferred);
7548  GotoIfNot(TaggedIsSmi(input), &not_smi);
7549  TNode<Smi> input_smi = CAST(input);
7550  var_result = input_smi;
7551  Goto(&end);
7552 
7553  BIND(&not_smi);
7554  {
7555  Label not_heap_number(this, Label::kDeferred);
7556  TNode<HeapObject> input_ho = CAST(input);
7557  GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
7558 
7559  TNode<HeapNumber> input_hn = CAST(input_ho);
7560  var_result = input_hn;
7561  Goto(&end);
7562 
7563  BIND(&not_heap_number);
7564  {
7565  var_result = NonNumberToNumber(context, input_ho, bigint_handling);
7566  Goto(&end);
7567  }
7568  }
7569 
7570  BIND(&end);
7571  return var_result.value();
7572 }
7573 
7574 TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
7575  SloppyTNode<Object> input) {
7576  TVARIABLE(BigInt, var_result);
7577  Label if_bigint(this), done(this), if_throw(this);
7578 
7579  GotoIf(TaggedIsSmi(input), &if_throw);
7580  GotoIf(IsBigInt(CAST(input)), &if_bigint);
7581  var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
7582  Goto(&done);
7583 
7584  BIND(&if_bigint);
7585  var_result = CAST(input);
7586  Goto(&done);
7587 
7588  BIND(&if_throw);
7589  ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7590 
7591  BIND(&done);
7592  return var_result.value();
7593 }
7594 
7595 void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7596  Variable* var_numeric) {
7597  TaggedToNumeric(context, value, done, var_numeric, nullptr);
7598 }
7599 
7600 void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
7601  Label* done,
7602  Variable* var_numeric,
7603  Variable* var_feedback) {
7604  DCHECK_NOT_NULL(var_feedback);
7605  TaggedToNumeric(context, value, done, var_numeric, var_feedback);
7606 }
7607 
7608 void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
7609  Variable* var_numeric,
7610  Variable* var_feedback) {
7611  var_numeric->Bind(value);
7612  Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
7613  GotoIf(TaggedIsSmi(value), &if_smi);
7614  Node* map = LoadMap(value);
7615  GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7616  Node* instance_type = LoadMapInstanceType(map);
7617  GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7618 
7619  // {value} is not a Numeric yet.
7620  GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7621  var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
7622  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7623  Goto(done);
7624 
7625  BIND(&if_smi);
7626  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7627  Goto(done);
7628 
7629  BIND(&if_heapnumber);
7630  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7631  Goto(done);
7632 
7633  BIND(&if_bigint);
7634  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7635  Goto(done);
7636 
7637  BIND(&if_oddball);
7638  OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7639  var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
7640  Goto(done);
7641 }
7642 
7643 // ES#sec-touint32
7644 TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
7645  SloppyTNode<Object> input) {
7646  Node* const float_zero = Float64Constant(0.0);
7647  Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
7648 
7649  Label out(this);
7650 
7651  VARIABLE(var_result, MachineRepresentation::kTagged, input);
7652 
7653  // Early exit for positive smis.
7654  {
7655  // TODO(jgruber): This branch and the recheck below can be removed once we
7656  // have a ToNumber with multiple exits.
7657  Label next(this, Label::kDeferred);
7658  Branch(TaggedIsPositiveSmi(input), &out, &next);
7659  BIND(&next);
7660  }
7661 
7662  Node* const number = ToNumber(context, input);
7663  var_result.Bind(number);
7664 
7665  // Perhaps we have a positive smi now.
7666  {
7667  Label next(this, Label::kDeferred);
7668  Branch(TaggedIsPositiveSmi(number), &out, &next);
7669  BIND(&next);
7670  }
7671 
7672  Label if_isnegativesmi(this), if_isheapnumber(this);
7673  Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7674 
7675  BIND(&if_isnegativesmi);
7676  {
7677  Node* const uint32_value = SmiToInt32(number);
7678  Node* float64_value = ChangeUint32ToFloat64(uint32_value);
7679  var_result.Bind(AllocateHeapNumberWithValue(float64_value));
7680  Goto(&out);
7681  }
7682 
7683  BIND(&if_isheapnumber);
7684  {
7685  Label return_zero(this);
7686  Node* const value = LoadHeapNumberValue(number);
7687 
7688  {
7689  // +-0.
7690  Label next(this);
7691  Branch(Float64Equal(value, float_zero), &return_zero, &next);
7692  BIND(&next);
7693  }
7694 
7695  {
7696  // NaN.
7697  Label next(this);
7698  Branch(Float64Equal(value, value), &next, &return_zero);
7699  BIND(&next);
7700  }
7701 
7702  {
7703  // +Infinity.
7704  Label next(this);
7705  Node* const positive_infinity =
7706  Float64Constant(std::numeric_limits<double>::infinity());
7707  Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7708  BIND(&next);
7709  }
7710 
7711  {
7712  // -Infinity.
7713  Label next(this);
7714  Node* const negative_infinity =
7715  Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7716  Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7717  BIND(&next);
7718  }
7719 
7720  // * Let int be the mathematical value that is the same sign as number and
7721  // whose magnitude is floor(abs(number)).
7722  // * Let int32bit be int modulo 2^32.
7723  // * Return int32bit.
7724  {
7725  Node* x = Float64Trunc(value);
7726  x = Float64Mod(x, float_two_32);
7727  x = Float64Add(x, float_two_32);
7728  x = Float64Mod(x, float_two_32);
7729 
7730  Node* const result = ChangeFloat64ToTagged(x);
7731  var_result.Bind(result);
7732  Goto(&out);
7733  }
7734 
7735  BIND(&return_zero);
7736  {
7737  var_result.Bind(SmiConstant(0));
7738  Goto(&out);
7739  }
7740  }
7741 
7742  BIND(&out);
7743  return CAST(var_result.value());
7744 }
7745 
7746 TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
7747  SloppyTNode<Object> input) {
7748  Label is_number(this);
7749  Label runtime(this, Label::kDeferred), done(this);
7750  VARIABLE(result, MachineRepresentation::kTagged);
7751  GotoIf(TaggedIsSmi(input), &is_number);
7752 
7753  TNode<Map> input_map = LoadMap(CAST(input));
7754  TNode<Int32T> input_instance_type = LoadMapInstanceType(input_map);
7755 
7756  result.Bind(input);
7757  GotoIf(IsStringInstanceType(input_instance_type), &done);
7758 
7759  Label not_heap_number(this);
7760  Branch(IsHeapNumberMap(input_map), &is_number, &not_heap_number);
7761 
7762  BIND(&is_number);
7763  TNode<Number> number_input = CAST(input);
7764  result.Bind(NumberToString(number_input));
7765  Goto(&done);
7766 
7767  BIND(&not_heap_number);
7768  {
7769  GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), &runtime);
7770  result.Bind(LoadObjectField(CAST(input), Oddball::kToStringOffset));
7771  Goto(&done);
7772  }
7773 
7774  BIND(&runtime);
7775  {
7776  result.Bind(CallRuntime(Runtime::kToString, context, input));
7777  Goto(&done);
7778  }
7779 
7780  BIND(&done);
7781  return CAST(result.value());
7782 }
7783 
7784 TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
7785  SloppyTNode<Object> input) {
7786  VARIABLE(var_result, MachineRepresentation::kTagged, input);
7787  Label stub_call(this, Label::kDeferred), out(this);
7788 
7789  GotoIf(TaggedIsSmi(input), &stub_call);
7790  Branch(IsString(CAST(input)), &out, &stub_call);
7791 
7792  BIND(&stub_call);
7793  var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
7794  Goto(&out);
7795 
7796  BIND(&out);
7797  return CAST(var_result.value());
7798 }
7799 
7800 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
7801  Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
7802  VARIABLE(result, MachineRepresentation::kTagged);
7803  Label done(this, &result);
7804 
7805  BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7806 
7807  BIND(&if_isreceiver);
7808  {
7809  // Convert {input} to a primitive first passing Number hint.
7810  Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
7811  result.Bind(CallStub(callable, context, input));
7812  Goto(&done);
7813  }
7814 
7815  BIND(&if_isnotreceiver);
7816  {
7817  result.Bind(input);
7818  Goto(&done);
7819  }
7820 
7821  BIND(&done);
7822  return result.value();
7823 }
7824 
7825 TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
7826  SloppyTNode<Object> input) {
7827  return CAST(CallBuiltin(Builtins::kToObject, context, input));
7828 }
7829 
7830 TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7831  TNode<Object> input) {
7832  TVARIABLE(JSReceiver, result);
7833  Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7834  Label done(this);
7835 
7836  BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7837 
7838  BIND(&if_isreceiver);
7839  {
7840  result = CAST(input);
7841  Goto(&done);
7842  }
7843 
7844  BIND(&if_isnotreceiver);
7845  {
7846  result = ToObject(context, input);
7847  Goto(&done);
7848  }
7849 
7850  BIND(&done);
7851  return result.value();
7852 }
7853 
7854 TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Object> input,
7855  TNode<Context> context,
7856  Label* range_error) {
7857  TVARIABLE(Smi, result);
7858  Label check_undefined(this), return_zero(this), defined(this),
7859  negative_check(this), done(this);
7860 
7861  GotoIfNot(TaggedIsSmi(input), &check_undefined);
7862  result = CAST(input);
7863  Goto(&negative_check);
7864 
7865  BIND(&check_undefined);
7866  Branch(IsUndefined(input), &return_zero, &defined);
7867 
7868  BIND(&defined);
7869  TNode<Number> integer_input =
7870  CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7871  GotoIfNot(TaggedIsSmi(integer_input), range_error);
7872  result = CAST(integer_input);
7873  Goto(&negative_check);
7874 
7875  BIND(&negative_check);
7876  Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
7877 
7878  BIND(&return_zero);
7879  result = SmiConstant(0);
7880  Goto(&done);
7881 
7882  BIND(&done);
7883  return result.value();
7884 }
7885 
7886 TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Object> input,
7887  TNode<Context> context,
7888  Label* range_error) {
7889  TVARIABLE(Smi, result);
7890  Label to_integer(this), negative_check(this),
7891  heap_number_negative_check(this), return_zero(this), done(this);
7892 
7893  GotoIfNot(TaggedIsSmi(input), &to_integer);
7894  result = CAST(input);
7895  Goto(&negative_check);
7896 
7897  BIND(&to_integer);
7898  {
7899  TNode<Number> integer_input = CAST(
7900  CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7901  GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
7902  result = CAST(integer_input);
7903  Goto(&negative_check);
7904 
7905  // integer_input can still be a negative HeapNumber here.
7906  BIND(&heap_number_negative_check);
7907  TNode<HeapNumber> heap_number_input = CAST(integer_input);
7908  Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
7909  SmiConstant(0))),
7910  &return_zero, range_error);
7911  }
7912 
7913  BIND(&negative_check);
7914  Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
7915 
7916  BIND(&return_zero);
7917  result = SmiConstant(0);
7918  Goto(&done);
7919 
7920  BIND(&done);
7921  return result.value();
7922 }
7923 
7924 TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
7925  SloppyTNode<Object> input) {
7926  TNode<Smi> smi_zero = SmiConstant(0);
7927  return Select<Number>(
7928  TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
7929  [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
7930 }
7931 
7932 TNode<Number> CodeStubAssembler::ToInteger_Inline(
7933  SloppyTNode<Context> context, SloppyTNode<Object> input,
7934  ToIntegerTruncationMode mode) {
7935  Builtins::Name builtin = (mode == kNoTruncation)
7936  ? Builtins::kToInteger
7937  : Builtins::kToInteger_TruncateMinusZero;
7938  return Select<Number>(
7939  TaggedIsSmi(input), [=] { return CAST(input); },
7940  [=] { return CAST(CallBuiltin(builtin, context, input)); });
7941 }
7942 
7943 TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
7944  SloppyTNode<Object> input,
7945  ToIntegerTruncationMode mode) {
7946  // We might need to loop once for ToNumber conversion.
7947  TVARIABLE(Object, var_arg, input);
7948  Label loop(this, &var_arg), out(this);
7949  Goto(&loop);
7950  BIND(&loop);
7951  {
7952  // Shared entry points.
7953  Label return_zero(this, Label::kDeferred);
7954 
7955  // Load the current {arg} value.
7956  TNode<Object> arg = var_arg.value();
7957 
7958  // Check if {arg} is a Smi.
7959  GotoIf(TaggedIsSmi(arg), &out);
7960 
7961  // Check if {arg} is a HeapNumber.
7962  Label if_argisheapnumber(this),
7963  if_argisnotheapnumber(this, Label::kDeferred);
7964  Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
7965  &if_argisnotheapnumber);
7966 
7967  BIND(&if_argisheapnumber);
7968  {
7969  TNode<HeapNumber> arg_hn = CAST(arg);
7970  // Load the floating-point value of {arg}.
7971  Node* arg_value = LoadHeapNumberValue(arg_hn);
7972 
7973  // Check if {arg} is NaN.
7974  GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
7975 
7976  // Truncate {arg} towards zero.
7977  TNode<Float64T> value = Float64Trunc(arg_value);
7978 
7979  if (mode == kTruncateMinusZero) {
7980  // Truncate -0.0 to 0.
7981  GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
7982  }
7983 
7984  var_arg = ChangeFloat64ToTagged(value);
7985  Goto(&out);
7986  }
7987 
7988  BIND(&if_argisnotheapnumber);
7989  {
7990  // Need to convert {arg} to a Number first.
7991  var_arg = UncheckedCast<Object>(
7992  CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
7993  Goto(&loop);
7994  }
7995 
7996  BIND(&return_zero);
7997  var_arg = SmiConstant(0);
7998  Goto(&out);
7999  }
8000 
8001  BIND(&out);
8002  if (mode == kTruncateMinusZero) {
8003  CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
8004  }
8005  return CAST(var_arg.value());
8006 }
8007 
8008 TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
8009  uint32_t shift, uint32_t mask) {
8010  return UncheckedCast<Uint32T>(Word32Shr(
8011  Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
8012 }
8013 
8014 TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
8015  uint32_t shift, uint32_t mask) {
8016  return Unsigned(
8017  WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
8018 }
8019 
8020 TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
8021  TNode<WordT> value, uint32_t shift,
8022  uint32_t mask) {
8023  TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
8024  TNode<IntPtrT> inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
8025  // Ensure the {value} fits fully in the mask.
8026  CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
8027  IntPtrConstant(0)));
8028  return WordOr(WordAnd(word, inverted_mask), encoded_value);
8029 }
8030 
8031 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
8032  if (FLAG_native_code_counters && counter->Enabled()) {
8033  Node* counter_address =
8034  ExternalConstant(ExternalReference::Create(counter));
8035  StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
8036  Int32Constant(value));
8037  }
8038 }
8039 
8040 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
8041  DCHECK_GT(delta, 0);
8042  if (FLAG_native_code_counters && counter->Enabled()) {
8043  Node* counter_address =
8044  ExternalConstant(ExternalReference::Create(counter));
8045  Node* value = Load(MachineType::Int32(), counter_address);
8046  value = Int32Add(value, Int32Constant(delta));
8047  StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8048  }
8049 }
8050 
8051 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
8052  DCHECK_GT(delta, 0);
8053  if (FLAG_native_code_counters && counter->Enabled()) {
8054  Node* counter_address =
8055  ExternalConstant(ExternalReference::Create(counter));
8056  Node* value = Load(MachineType::Int32(), counter_address);
8057  value = Int32Sub(value, Int32Constant(delta));
8058  StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8059  }
8060 }
8061 
8062 void CodeStubAssembler::Increment(Variable* variable, int value,
8063  ParameterMode mode) {
8064  DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
8065  variable->rep() == MachineType::PointerRepresentation());
8066  DCHECK_IMPLIES(mode == SMI_PARAMETERS,
8067  variable->rep() == MachineRepresentation::kTagged ||
8068  variable->rep() == MachineRepresentation::kTaggedSigned);
8069  variable->Bind(IntPtrOrSmiAdd(variable->value(),
8070  IntPtrOrSmiConstant(value, mode), mode));
8071 }
8072 
8073 void CodeStubAssembler::Use(Label* label) {
8074  GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
8075 }
8076 
8077 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
8078  Variable* var_index, Label* if_keyisunique,
8079  Variable* var_unique, Label* if_bailout,
8080  Label* if_notinternalized) {
8081  DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
8082  DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
8083  Comment("TryToName");
8084 
8085  Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
8086  if_keyisother(this, Label::kDeferred);
8087  // Handle Smi and HeapNumber keys.
8088  var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
8089  Goto(if_keyisindex);
8090 
8091  BIND(&if_keyisnotindex);
8092  Node* key_map = LoadMap(key);
8093  var_unique->Bind(key);
8094  // Symbols are unique.
8095  GotoIf(IsSymbolMap(key_map), if_keyisunique);
8096  Node* key_instance_type = LoadMapInstanceType(key_map);
8097  // Miss if |key| is not a String.
8098  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
8099  GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
8100 
8101  // |key| is a String. Check if it has a cached array index.
8102  Node* hash = LoadNameHashField(key);
8103  GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
8104  &if_hascachedindex);
8105  // No cached array index. If the string knows that it contains an index,
8106  // then it must be an uncacheable index. Handle this case in the runtime.
8107  GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
8108  // Check if we have a ThinString.
8109  GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
8110  &if_thinstring);
8111  GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
8112  &if_thinstring);
8113  // Finally, check if |key| is internalized.
8114  STATIC_ASSERT(kNotInternalizedTag != 0);
8115  GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
8116  if_notinternalized != nullptr ? if_notinternalized : if_bailout);
8117  Goto(if_keyisunique);
8118 
8119  BIND(&if_thinstring);
8120  var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
8121  Goto(if_keyisunique);
8122 
8123  BIND(&if_hascachedindex);
8124  var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
8125  Goto(if_keyisindex);
8126 
8127  BIND(&if_keyisother);
8128  GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
8129  var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
8130  Goto(if_keyisunique);
8131 }
8132 
8133 void CodeStubAssembler::TryInternalizeString(
8134  Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
8135  Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
8136  DCHECK(var_index->rep() == MachineType::PointerRepresentation());
8137  DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
8138  CSA_SLOW_ASSERT(this, IsString(string));
8139  Node* function =
8140  ExternalConstant(ExternalReference::try_internalize_string_function());
8141  Node* const isolate_ptr =
8142  ExternalConstant(ExternalReference::isolate_address(isolate()));
8143  Node* result =
8144  CallCFunction2(MachineType::AnyTagged(), MachineType::Pointer(),
8145  MachineType::AnyTagged(), function, isolate_ptr, string);
8146  Label internalized(this);
8147  GotoIf(TaggedIsNotSmi(result), &internalized);
8148  Node* word_result = SmiUntag(result);
8149  GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
8150  if_not_internalized);
8151  GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
8152  if_bailout);
8153  var_index->Bind(word_result);
8154  Goto(if_index);
8155 
8156  BIND(&internalized);
8157  var_internalized->Bind(result);
8158  Goto(if_internalized);
8159 }
8160 
8161 template <typename Dictionary>
8162 TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
8163  int field_index) {
8164  TNode<IntPtrT> entry_index =
8165  IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
8166  return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
8167  field_index));
8168 }
8169 
8170 TNode<MaybeObject> CodeStubAssembler::LoadDescriptorArrayElement(
8171  TNode<DescriptorArray> object, Node* index, int additional_offset) {
8172  return LoadArrayElement(object, DescriptorArray::kHeaderSize, index,
8173  additional_offset);
8174 }
8175 
8176 TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
8177  TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8178  return CAST(LoadDescriptorArrayElement(container, key_index, 0));
8179 }
8180 
8181 TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8182  TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8183  const int kKeyToDetails =
8184  DescriptorArray::ToDetailsIndex(0) - DescriptorArray::ToKeyIndex(0);
8185  return Unsigned(LoadAndUntagToWord32ArrayElement(
8186  container, DescriptorArray::kHeaderSize, key_index,
8187  kKeyToDetails * kPointerSize));
8188 }
8189 
8190 TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8191  TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8192  const int kKeyToValue =
8193  DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8194  return CAST(LoadDescriptorArrayElement(container, key_index,
8195  kKeyToValue * kPointerSize));
8196 }
8197 
8198 TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
8199  TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8200  const int kKeyToValue =
8201  DescriptorArray::ToValueIndex(0) - DescriptorArray::ToKeyIndex(0);
8202  return LoadDescriptorArrayElement(container, key_index,
8203  kKeyToValue * kPointerSize);
8204 }
8205 
8206 TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
8207  TNode<IntPtrT> descriptor_entry) {
8208  return IntPtrMul(descriptor_entry,
8209  IntPtrConstant(DescriptorArray::kEntrySize));
8210 }
8211 
8212 TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8213  TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8214  return CAST(LoadDescriptorArrayElement(
8215  container, DescriptorEntryToIndex(descriptor_entry),
8216  DescriptorArray::ToKeyIndex(0) * kPointerSize));
8217 }
8218 
8219 TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8220  TNode<DescriptorArray> container, int descriptor_entry) {
8221  return CAST(LoadDescriptorArrayElement(
8222  container, IntPtrConstant(0),
8223  DescriptorArray::ToKeyIndex(descriptor_entry) * kPointerSize));
8224 }
8225 
8226 TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8227  TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8228  return Unsigned(LoadAndUntagToWord32ArrayElement(
8229  container, DescriptorArray::kHeaderSize,
8230  DescriptorEntryToIndex(descriptor_entry),
8231  DescriptorArray::ToDetailsIndex(0) * kPointerSize));
8232 }
8233 
8234 TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8235  TNode<DescriptorArray> container, int descriptor_entry) {
8236  return Unsigned(LoadAndUntagToWord32ArrayElement(
8237  container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
8238  DescriptorArray::ToDetailsIndex(descriptor_entry) * kPointerSize));
8239 }
8240 
8241 TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8242  TNode<DescriptorArray> container, int descriptor_entry) {
8243  return CAST(LoadDescriptorArrayElement(
8244  container, IntPtrConstant(0),
8245  DescriptorArray::ToValueIndex(descriptor_entry) * kPointerSize));
8246 }
8247 
8248 TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
8249  TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8250  return LoadDescriptorArrayElement(
8251  container, DescriptorEntryToIndex(descriptor_entry),
8252  DescriptorArray::ToValueIndex(0) * kPointerSize);
8253 }
8254 
8255 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
8256  TNode<IntPtrT>, int);
8257 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
8258  TNode<IntPtrT>, int);
8259 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
8260  TNode<IntPtrT>, int);
8261 
8262 // This must be kept in sync with HashTableBase::ComputeCapacity().
8263 TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
8264  TNode<IntPtrT> at_least_space_for) {
8265  TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
8266  IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
8267  return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
8268 }
8269 
8270 TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
8271  SloppyTNode<IntPtrT> right) {
8272  intptr_t left_constant;
8273  intptr_t right_constant;
8274  if (ToIntPtrConstant(left, left_constant) &&
8275  ToIntPtrConstant(right, right_constant)) {
8276  return IntPtrConstant(std::max(left_constant, right_constant));
8277  }
8278  return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
8279  right);
8280 }
8281 
8282 TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
8283  SloppyTNode<IntPtrT> right) {
8284  intptr_t left_constant;
8285  intptr_t right_constant;
8286  if (ToIntPtrConstant(left, left_constant) &&
8287  ToIntPtrConstant(right, right_constant)) {
8288  return IntPtrConstant(std::min(left_constant, right_constant));
8289  }
8290  return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
8291  right);
8292 }
8293 
8294 template <>
8295 TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
8296  TNode<HeapObject> key) {
8297  CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
8298  return key;
8299 }
8300 
8301 template <>
8302 TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
8303  TNode<HeapObject> key) {
8304  TNode<PropertyCell> property_cell = CAST(key);
8305  return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
8306 }
8307 
8308 template <typename Dictionary>
8309 void CodeStubAssembler::NameDictionaryLookup(
8310  TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
8311  TVariable<IntPtrT>* var_name_index, Label* if_not_found, int inlined_probes,
8312  LookupMode mode) {
8313  static_assert(std::is_same<Dictionary, NameDictionary>::value ||
8314  std::is_same<Dictionary, GlobalDictionary>::value,
8315  "Unexpected NameDictionary");
8316  DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
8317  DCHECK_IMPLIES(mode == kFindInsertionIndex,
8318  inlined_probes == 0 && if_found == nullptr);
8319  Comment("NameDictionaryLookup");
8320 
8321  TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
8322  TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8323  TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
8324 
8325  // See Dictionary::FirstProbe().
8326  TNode<IntPtrT> count = IntPtrConstant(0);
8327  TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8328  Node* undefined = UndefinedConstant();
8329 
8330  for (int i = 0; i < inlined_probes; i++) {
8331  TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8332  *var_name_index = index;
8333 
8334  TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
8335  GotoIf(WordEqual(current, undefined), if_not_found);
8336  current = LoadName<Dictionary>(current);
8337  GotoIf(WordEqual(current, unique_name), if_found);
8338 
8339  // See Dictionary::NextProbe().
8340  count = IntPtrConstant(i + 1);
8341  entry = Signed(WordAnd(IntPtrAdd(entry, count), mask));
8342  }
8343  if (mode == kFindInsertionIndex) {
8344  // Appease the variable merging algorithm for "Goto(&loop)" below.
8345  *var_name_index = IntPtrConstant(0);
8346  }
8347 
8348  TVARIABLE(IntPtrT, var_count, count);
8349  TVARIABLE(IntPtrT, var_entry, entry);
8350  Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
8351  Label loop(this, 3, loop_vars);
8352  Goto(&loop);
8353  BIND(&loop);
8354  {
8355  TNode<IntPtrT> entry = var_entry.value();
8356 
8357  TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8358  *var_name_index = index;
8359 
8360  TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
8361  GotoIf(WordEqual(current, undefined), if_not_found);
8362  if (mode == kFindExisting) {
8363  current = LoadName<Dictionary>(current);
8364  GotoIf(WordEqual(current, unique_name), if_found);
8365  } else {
8366  DCHECK_EQ(kFindInsertionIndex, mode);
8367  GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
8368  }
8369 
8370  // See Dictionary::NextProbe().
8371  Increment(&var_count);
8372  entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8373 
8374  var_entry = entry;
8375  Goto(&loop);
8376  }
8377 }
8378 
8379 // Instantiate template methods to workaround GCC compilation issue.
8380 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
8381  TNode<NameDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
8382  int, LookupMode);
8383 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
8384  TNode<GlobalDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
8385  int, LookupMode);
8386 
8387 Node* CodeStubAssembler::ComputeUnseededHash(Node* key) {
8388  // See v8::internal::ComputeUnseededHash()
8389  Node* hash = TruncateIntPtrToInt32(key);
8390  hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
8391  Word32Shl(hash, Int32Constant(15)));
8392  hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
8393  hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
8394  hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
8395  hash = Int32Mul(hash, Int32Constant(2057));
8396  hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
8397  return Word32And(hash, Int32Constant(0x3FFFFFFF));
8398 }
8399 
8400 Node* CodeStubAssembler::ComputeSeededHash(Node* key) {
8401  Node* const function_addr =
8402  ExternalConstant(ExternalReference::compute_integer_hash());
8403  Node* const isolate_ptr =
8404  ExternalConstant(ExternalReference::isolate_address(isolate()));
8405 
8406  MachineType type_ptr = MachineType::Pointer();
8407  MachineType type_uint32 = MachineType::Uint32();
8408 
8409  Node* const result =
8410  CallCFunction2(type_uint32, type_ptr, type_uint32, function_addr,
8411  isolate_ptr, TruncateIntPtrToInt32(key));
8412  return result;
8413 }
8414 
8415 void CodeStubAssembler::NumberDictionaryLookup(
8416  TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8417  Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
8418  CSA_ASSERT(this, IsNumberDictionary(dictionary));
8419  DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
8420  Comment("NumberDictionaryLookup");
8421 
8422  TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
8423  TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8424 
8425  TNode<WordT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
8426  Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
8427 
8428  // See Dictionary::FirstProbe().
8429  TNode<IntPtrT> count = IntPtrConstant(0);
8430  TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
8431 
8432  Node* undefined = UndefinedConstant();
8433  Node* the_hole = TheHoleConstant();
8434 
8435  TVARIABLE(IntPtrT, var_count, count);
8436  Variable* loop_vars[] = {&var_count, var_entry};
8437  Label loop(this, 2, loop_vars);
8438  *var_entry = entry;
8439  Goto(&loop);
8440  BIND(&loop);
8441  {
8442  TNode<IntPtrT> entry = var_entry->value();
8443 
8444  TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
8445  Node* current = LoadFixedArrayElement(dictionary, index);
8446  GotoIf(WordEqual(current, undefined), if_not_found);
8447  Label next_probe(this);
8448  {
8449  Label if_currentissmi(this), if_currentisnotsmi(this);
8450  Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
8451  BIND(&if_currentissmi);
8452  {
8453  Node* current_value = SmiUntag(current);
8454  Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
8455  }
8456  BIND(&if_currentisnotsmi);
8457  {
8458  GotoIf(WordEqual(current, the_hole), &next_probe);
8459  // Current must be the Number.
8460  Node* current_value = LoadHeapNumberValue(current);
8461  Branch(Float64Equal(current_value, key_as_float64), if_found,
8462  &next_probe);
8463  }
8464  }
8465 
8466  BIND(&next_probe);
8467  // See Dictionary::NextProbe().
8468  Increment(&var_count);
8469  entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8470 
8471  *var_entry = entry;
8472  Goto(&loop);
8473  }
8474 }
8475 
8476 TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
8477  TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8478  Label* not_data, Label* if_hole) {
8479  TVARIABLE(IntPtrT, var_entry);
8480  Label if_found(this);
8481  NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8482  if_hole);
8483  BIND(&if_found);
8484 
8485  // Check that the value is a data property.
8486  TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8487  TNode<Uint32T> details =
8488  LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8489  TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8490  // TODO(jkummerow): Support accessors without missing?
8491  GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8492  // Finally, load the value.
8493  return LoadValueByKeyIndex<NumberDictionary>(dictionary, index);
8494 }
8495 
8496 void CodeStubAssembler::BasicStoreNumberDictionaryElement(
8497  TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8498  TNode<Object> value, Label* not_data, Label* if_hole, Label* read_only) {
8499  TVARIABLE(IntPtrT, var_entry);
8500  Label if_found(this);
8501  NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8502  if_hole);
8503  BIND(&if_found);
8504 
8505  // Check that the value is a data property.
8506  TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8507  TNode<Uint32T> details =
8508  LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
8509  TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8510  // TODO(jkummerow): Support accessors without missing?
8511  GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
8512 
8513  // Check that the property is writeable.
8514  GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
8515  read_only);
8516 
8517  // Finally, store the value.
8518  StoreValueByKeyIndex<NumberDictionary>(dictionary, index, value);
8519 }
8520 
8521 template <class Dictionary>
8522 void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
8523  TNode<Name> key,
8524  TVariable<IntPtrT>* var_key_index) {
8525  UNREACHABLE();
8526 }
8527 
8528 template <>
8529 void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
8530  TNode<NameDictionary> dictionary, TNode<Name> key,
8531  TVariable<IntPtrT>* var_key_index) {
8532  Label done(this);
8533  NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
8534  &done, 0, kFindInsertionIndex);
8535  BIND(&done);
8536 }
8537 
8538 template <class Dictionary>
8539 void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
8540  TNode<Name> key, TNode<Object> value,
8541  TNode<IntPtrT> index,
8542  TNode<Smi> enum_index) {
8543  UNREACHABLE(); // Use specializations instead.
8544 }
8545 
8546 template <>
8547 void CodeStubAssembler::InsertEntry<NameDictionary>(
8548  TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
8549  TNode<IntPtrT> index, TNode<Smi> enum_index) {
8550  // Store name and value.
8551  StoreFixedArrayElement(dictionary, index, name);
8552  StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
8553 
8554  // Prepare details of the new property.
8555  PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
8556  enum_index =
8557  SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
8558  // We OR over the actual index below, so we expect the initial value to be 0.
8559  DCHECK_EQ(0, d.dictionary_index());
8560  TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
8561 
8562  // Private names must be marked non-enumerable.
8563  Label not_private(this, &var_details);
8564  GotoIfNot(IsPrivateSymbol(name), &not_private);
8565  TNode<Smi> dont_enum =
8566  SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
8567  var_details = SmiOr(var_details.value(), dont_enum);
8568  Goto(&not_private);
8569  BIND(&not_private);
8570 
8571  // Finally, store the details.
8572  StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
8573  var_details.value());
8574 }
8575 
8576 template <>
8577 void CodeStubAssembler::InsertEntry<GlobalDictionary>(
8578  TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
8579  TNode<IntPtrT> index, TNode<Smi> enum_index) {
8580  UNIMPLEMENTED();
8581 }
8582 
8583 template <class Dictionary>
8584 void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
8585  TNode<Object> value, Label* bailout) {
8586  CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
8587  TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
8588  TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
8589  TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
8590  // Require 33% to still be free after adding additional_elements.
8591  // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
8592  // But that's OK here because it's only used for a comparison.
8593  TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
8594  GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
8595  // Require rehashing if more than 50% of free elements are deleted elements.
8596  TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
8597  CSA_ASSERT(this, SmiAbove(capacity, new_nof));
8598  TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
8599  GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
8600 
8601  TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
8602  TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
8603  TNode<Smi> max_enum_index =
8604  SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
8605  GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
8606 
8607  // No more bailouts after this point.
8608  // Operations from here on can have side effects.
8609 
8610  SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
8611  SetNumberOfElements<Dictionary>(dictionary, new_nof);
8612 
8613  TVARIABLE(IntPtrT, var_key_index);
8614  FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
8615  InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
8616  enum_index);
8617 }
8618 
8619 template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
8620  TNode<Name>, TNode<Object>,
8621  Label*);
8622 
8623 template <typename Array>
8624 void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
8625  TNode<Array> array,
8626  TNode<Uint32T> number_of_valid_entries,
8627  Label* if_found,
8628  TVariable<IntPtrT>* var_name_index,
8629  Label* if_not_found) {
8630  static_assert(std::is_base_of<FixedArray, Array>::value ||
8631  std::is_base_of<WeakFixedArray, Array>::value ||
8632  std::is_base_of<DescriptorArray, Array>::value,
8633  "T must be a descendant of FixedArray or a WeakFixedArray");
8634  Comment("LookupLinear");
8635  TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
8636  TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
8637  TNode<IntPtrT> last_exclusive = IntPtrAdd(
8638  first_inclusive,
8639  IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
8640 
8641  BuildFastLoop(last_exclusive, first_inclusive,
8642  [=](SloppyTNode<IntPtrT> name_index) {
8643  TNode<MaybeObject> element =
8644  LoadArrayElement(array, Array::kHeaderSize, name_index);
8645  TNode<Name> candidate_name = CAST(element);
8646  *var_name_index = name_index;
8647  GotoIf(WordEqual(candidate_name, unique_name), if_found);
8648  },
8649  -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
8650  Goto(if_not_found);
8651 }
8652 
8653 template <>
8654 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
8655  TNode<DescriptorArray> descriptors) {
8656  return Unsigned(LoadNumberOfDescriptors(descriptors));
8657 }
8658 
8659 template <>
8660 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
8661  TNode<TransitionArray> transitions) {
8662  TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
8663  return Select<Uint32T>(
8664  UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
8665  [=] { return Unsigned(Int32Constant(0)); },
8666  [=] {
8667  return Unsigned(LoadAndUntagToWord32ArrayElement(
8668  transitions, WeakFixedArray::kHeaderSize,
8669  IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
8670  });
8671 }
8672 
8673 template <typename Array>
8674 TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8675  TNode<Uint32T> entry_index) {
8676  TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8677  TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8678  return ChangeInt32ToIntPtr(index);
8679 }
8680 
8681 template <typename Array>
8682 TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8683  return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8684  EntryIndexToIndex<Array>(entry_index));
8685 }
8686 
8687 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8688  TNode<Uint32T>);
8689 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8690  TNode<Uint32T>);
8691 
8692 template <>
8693 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8694  TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8695  TNode<Uint32T> details =
8696  DescriptorArrayGetDetails(descriptors, descriptor_number);
8697  return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8698 }
8699 
8700 template <>
8701 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8702  TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8703  return transition_number;
8704 }
8705 
8706 template <typename Array>
8707 TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8708  TNode<Uint32T> entry_index) {
8709  static_assert(std::is_base_of<TransitionArray, Array>::value ||
8710  std::is_base_of<DescriptorArray, Array>::value,
8711  "T must be a descendant of DescriptorArray or TransitionArray");
8712  const int key_offset = Array::ToKeyIndex(0) * kPointerSize;
8713  TNode<MaybeObject> element =
8714  LoadArrayElement(array, Array::kHeaderSize,
8715  EntryIndexToIndex<Array>(entry_index), key_offset);
8716  return CAST(element);
8717 }
8718 
8719 template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8720  TNode<DescriptorArray>, TNode<Uint32T>);
8721 template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8722  TNode<TransitionArray>, TNode<Uint32T>);
8723 
8724 TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8725  TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8726  const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
8727  return Unsigned(LoadAndUntagToWord32ArrayElement(
8728  descriptors, DescriptorArray::kHeaderSize,
8729  EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8730 }
8731 
8732 template <typename Array>
8733 void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8734  TNode<Array> array,
8735  TNode<Uint32T> number_of_valid_entries,
8736  Label* if_found,
8737  TVariable<IntPtrT>* var_name_index,
8738  Label* if_not_found) {
8739  Comment("LookupBinary");
8740  TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8741  TNode<Uint32T> limit =
8742  Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8743  TVARIABLE(Uint32T, var_high, limit);
8744  TNode<Uint32T> hash = LoadNameHashField(unique_name);
8745  CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8746 
8747  // Assume non-empty array.
8748  CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8749 
8750  Label binary_loop(this, {&var_high, &var_low});
8751  Goto(&binary_loop);
8752  BIND(&binary_loop);
8753  {
8754  // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8755  TNode<Uint32T> mid = Unsigned(
8756  Int32Add(var_low.value(),
8757  Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8758  // mid_name = array->GetSortedKey(mid).
8759  TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8760  TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8761 
8762  TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
8763 
8764  Label mid_greater(this), mid_less(this), merge(this);
8765  Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8766  BIND(&mid_greater);
8767  {
8768  var_high = mid;
8769  Goto(&merge);
8770  }
8771  BIND(&mid_less);
8772  {
8773  var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8774  Goto(&merge);
8775  }
8776  BIND(&merge);
8777  GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8778  }
8779 
8780  Label scan_loop(this, &var_low);
8781  Goto(&scan_loop);
8782  BIND(&scan_loop);
8783  {
8784  GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8785 
8786  TNode<Uint32T> sort_index =
8787  GetSortedKeyIndex<Array>(array, var_low.value());
8788  TNode<Name> current_name = GetKey<Array>(array, sort_index);
8789  TNode<Uint32T> current_hash = LoadNameHashField(current_name);
8790  GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8791  Label next(this);
8792  GotoIf(WordNotEqual(current_name, unique_name), &next);
8793  GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8794  if_not_found);
8795  *var_name_index = ToKeyIndex<Array>(sort_index);
8796  Goto(if_found);
8797 
8798  BIND(&next);
8799  var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8800  Goto(&scan_loop);
8801  }
8802 }
8803 
8804 void CodeStubAssembler::DescriptorArrayForEach(
8805  VariableList& variable_list, TNode<Uint32T> start_descriptor,
8806  TNode<Uint32T> end_descriptor, const ForEachDescriptorBodyFunction& body) {
8807  TNode<IntPtrT> start_index = ToKeyIndex<DescriptorArray>(start_descriptor);
8808  TNode<IntPtrT> end_index = ToKeyIndex<DescriptorArray>(end_descriptor);
8809 
8810  BuildFastLoop(variable_list, start_index, end_index,
8811  [=](Node* index) {
8812  TNode<IntPtrT> descriptor_key_index =
8813  TNode<IntPtrT>::UncheckedCast(index);
8814  body(descriptor_key_index);
8815  },
8816  DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
8817  IndexAdvanceMode::kPost);
8818 }
8819 
8820 void CodeStubAssembler::ForEachEnumerableOwnProperty(
8821  TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
8822  const ForEachKeyValueFunction& body, Label* bailout) {
8823  TNode<Int32T> type = LoadMapInstanceType(map);
8824  TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
8825 
8826  TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8827  TNode<Uint32T> nof_descriptors =
8828  DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
8829 
8830  TVARIABLE(BoolT, var_stable, Int32TrueConstant());
8831  VariableList list({&var_stable}, zone());
8832 
8833  DescriptorArrayForEach(
8834  list, Unsigned(Int32Constant(0)), nof_descriptors,
8835  [=, &var_stable](TNode<IntPtrT> descriptor_key_index) {
8836  TNode<Name> next_key =
8837  LoadKeyByKeyIndex(descriptors, descriptor_key_index);
8838 
8839  TVARIABLE(Object, var_value, SmiConstant(0));
8840  Label callback(this), next_iteration(this);
8841 
8842  {
8843  TVARIABLE(Map, var_map);
8844  TVARIABLE(HeapObject, var_meta_storage);
8845  TVARIABLE(IntPtrT, var_entry);
8846  TVARIABLE(Uint32T, var_details);
8847  Label if_found(this);
8848 
8849  Label if_found_fast(this), if_found_dict(this);
8850 
8851  Label if_stable(this), if_not_stable(this);
8852  Branch(var_stable.value(), &if_stable, &if_not_stable);
8853  BIND(&if_stable);
8854  {
8855  // Directly decode from the descriptor array if |object| did not
8856  // change shape.
8857  var_map = map;
8858  var_meta_storage = descriptors;
8859  var_entry = Signed(descriptor_key_index);
8860  Goto(&if_found_fast);
8861  }
8862  BIND(&if_not_stable);
8863  {
8864  // If the map did change, do a slower lookup. We are still
8865  // guaranteed that the object has a simple shape, and that the key
8866  // is a name.
8867  var_map = LoadMap(object);
8868  TryLookupPropertyInSimpleObject(
8869  object, var_map.value(), next_key, &if_found_fast,
8870  &if_found_dict, &var_meta_storage, &var_entry, &next_iteration);
8871  }
8872 
8873  BIND(&if_found_fast);
8874  {
8875  TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
8876  TNode<IntPtrT> name_index = var_entry.value();
8877 
8878  // Skip non-enumerable properties.
8879  var_details = LoadDetailsByKeyIndex(descriptors, name_index);
8880  GotoIf(IsSetWord32(var_details.value(),
8881  PropertyDetails::kAttributesDontEnumMask),
8882  &next_iteration);
8883 
8884  LoadPropertyFromFastObject(object, var_map.value(), descriptors,
8885  name_index, var_details.value(),
8886  &var_value);
8887  Goto(&if_found);
8888  }
8889  BIND(&if_found_dict);
8890  {
8891  TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
8892  TNode<IntPtrT> entry = var_entry.value();
8893 
8894  TNode<Uint32T> details =
8895  LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
8896  // Skip non-enumerable properties.
8897  GotoIf(
8898  IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
8899  &next_iteration);
8900 
8901  var_details = details;
8902  var_value = LoadValueByKeyIndex<NameDictionary>(dictionary, entry);
8903  Goto(&if_found);
8904  }
8905 
8906  // Here we have details and value which could be an accessor.
8907  BIND(&if_found);
8908  {
8909  Label slow_load(this, Label::kDeferred);
8910 
8911  var_value = CallGetterIfAccessor(var_value.value(),
8912  var_details.value(), context,
8913  object, &slow_load, kCallJSGetter);
8914  Goto(&callback);
8915 
8916  BIND(&slow_load);
8917  var_value =
8918  CallRuntime(Runtime::kGetProperty, context, object, next_key);
8919  Goto(&callback);
8920 
8921  BIND(&callback);
8922  body(next_key, var_value.value());
8923 
8924  // Check if |object| is still stable, i.e. we can proceed using
8925  // property details from preloaded |descriptors|.
8926  var_stable =
8927  Select<BoolT>(var_stable.value(),
8928  [=] { return WordEqual(LoadMap(object), map); },
8929  [=] { return Int32FalseConstant(); });
8930 
8931  Goto(&next_iteration);
8932  }
8933  }
8934 
8935  BIND(&next_iteration);
8936  });
8937 }
8938 
8939 void CodeStubAssembler::DescriptorLookup(
8940  SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
8941  SloppyTNode<Uint32T> bitfield3, Label* if_found,
8942  TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
8943  Comment("DescriptorArrayLookup");
8944  TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
8945  Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
8946  var_name_index, if_not_found);
8947 }
8948 
8949 void CodeStubAssembler::TransitionLookup(
8950  SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
8951  Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
8952  Comment("TransitionArrayLookup");
8953  TNode<Uint32T> number_of_valid_transitions =
8954  NumberOfEntries<TransitionArray>(transitions);
8955  Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
8956  if_found, var_name_index, if_not_found);
8957 }
8958 
8959 template <typename Array>
8960 void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
8961  TNode<Uint32T> number_of_valid_entries,
8962  Label* if_found,
8963  TVariable<IntPtrT>* var_name_index,
8964  Label* if_not_found) {
8965  Comment("ArrayLookup");
8966  if (!number_of_valid_entries) {
8967  number_of_valid_entries = NumberOfEntries(array);
8968  }
8969  GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
8970  Label linear_search(this), binary_search(this);
8971  const int kMaxElementsForLinearSearch = 32;
8972  Branch(Uint32LessThanOrEqual(number_of_valid_entries,
8973  Int32Constant(kMaxElementsForLinearSearch)),
8974  &linear_search, &binary_search);
8975  BIND(&linear_search);
8976  {
8977  LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
8978  var_name_index, if_not_found);
8979  }
8980  BIND(&binary_search);
8981  {
8982  LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
8983  var_name_index, if_not_found);
8984  }
8985 }
8986 
8987 TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
8988  uint32_t mask =
8989  Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
8990  // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
8991  return Select<BoolT>(
8992  IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
8993  [=] { return Int32FalseConstant(); },
8994  [=] { return IsClearWord32(LoadMapBitField(map), mask); });
8995 }
8996 
8997 void CodeStubAssembler::TryLookupPropertyInSimpleObject(
8998  TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
8999  Label* if_found_fast, Label* if_found_dict,
9000  TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9001  Label* if_not_found) {
9002  CSA_ASSERT(this, IsSimpleObjectMap(map));
9003 
9004  TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
9005  Label if_isfastmap(this), if_isslowmap(this);
9006  Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
9007  &if_isfastmap);
9008  BIND(&if_isfastmap);
9009  {
9010  TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
9011  *var_meta_storage = descriptors;
9012 
9013  DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
9014  var_name_index, if_not_found);
9015  }
9016  BIND(&if_isslowmap);
9017  {
9018  TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
9019  *var_meta_storage = dictionary;
9020 
9021  NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
9022  var_name_index, if_not_found);
9023  }
9024 }
9025 
9026 void CodeStubAssembler::TryLookupProperty(
9027  SloppyTNode<JSObject> object, SloppyTNode<Map> map,
9028  SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
9029  Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
9030  TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9031  Label* if_not_found, Label* if_bailout) {
9032  Label if_objectisspecial(this);
9033  GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
9034 
9035  TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
9036  if_found_dict, var_meta_storage,
9037  var_name_index, if_not_found);
9038 
9039  BIND(&if_objectisspecial);
9040  {
9041  // Handle global object here and bailout for other special objects.
9042  GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
9043  if_bailout);
9044 
9045  // Handle interceptors and access checks in runtime.
9046  TNode<Int32T> bit_field = LoadMapBitField(map);
9047  int mask =
9048  Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
9049  GotoIf(IsSetWord32(bit_field, mask), if_bailout);
9050 
9051  TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
9052  *var_meta_storage = dictionary;
9053 
9054  NameDictionaryLookup<GlobalDictionary>(
9055  dictionary, unique_name, if_found_global, var_name_index, if_not_found);
9056  }
9057 }
9058 
9059 void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
9060  Node* instance_type,
9061  Node* unique_name, Label* if_found,
9062  Label* if_not_found,
9063  Label* if_bailout) {
9064  Comment("TryHasOwnProperty");
9065  TVARIABLE(HeapObject, var_meta_storage);
9066  TVARIABLE(IntPtrT, var_name_index);
9067 
9068  Label if_found_global(this);
9069  TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
9070  &if_found_global, &var_meta_storage, &var_name_index,
9071  if_not_found, if_bailout);
9072 
9073  BIND(&if_found_global);
9074  {
9075  VARIABLE(var_value, MachineRepresentation::kTagged);
9076  VARIABLE(var_details, MachineRepresentation::kWord32);
9077  // Check if the property cell is not deleted.
9078  LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
9079  var_name_index.value(), &var_value,
9080  &var_details, if_not_found);
9081  Goto(if_found);
9082  }
9083 }
9084 
9085 Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
9086  Handle<Name> name,
9087  Label* if_null_or_undefined) {
9088  Node* method = GetProperty(context, object, name);
9089 
9090  GotoIf(IsUndefined(method), if_null_or_undefined);
9091  GotoIf(IsNull(method), if_null_or_undefined);
9092 
9093  return method;
9094 }
9095 
9096 void CodeStubAssembler::LoadPropertyFromFastObject(
9097  Node* object, Node* map, TNode<DescriptorArray> descriptors,
9098  Node* name_index, Variable* var_details, Variable* var_value) {
9099  DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
9100  DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9101 
9102  Node* details =
9103  LoadDetailsByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index));
9104  var_details->Bind(details);
9105 
9106  LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
9107  var_value);
9108 }
9109 
9110 void CodeStubAssembler::LoadPropertyFromFastObject(
9111  Node* object, Node* map, TNode<DescriptorArray> descriptors,
9112  Node* name_index, Node* details, Variable* var_value) {
9113  Comment("[ LoadPropertyFromFastObject");
9114 
9115  Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
9116 
9117  Label if_in_field(this), if_in_descriptor(this), done(this);
9118  Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
9119  &if_in_descriptor);
9120  BIND(&if_in_field);
9121  {
9122  Node* field_index =
9123  DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
9124  Node* representation =
9125  DecodeWord32<PropertyDetails::RepresentationField>(details);
9126 
9127  field_index =
9128  IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
9129  Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
9130 
9131  Label if_inobject(this), if_backing_store(this);
9132  VARIABLE(var_double_value, MachineRepresentation::kFloat64);
9133  Label rebox_double(this, &var_double_value);
9134  Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
9135  &if_backing_store);
9136  BIND(&if_inobject);
9137  {
9138  Comment("if_inobject");
9139  Node* field_offset = TimesPointerSize(field_index);
9140 
9141  Label if_double(this), if_tagged(this);
9142  Branch(Word32NotEqual(representation,
9143  Int32Constant(Representation::kDouble)),
9144  &if_tagged, &if_double);
9145  BIND(&if_tagged);
9146  {
9147  var_value->Bind(LoadObjectField(object, field_offset));
9148  Goto(&done);
9149  }
9150  BIND(&if_double);
9151  {
9152  if (FLAG_unbox_double_fields) {
9153  var_double_value.Bind(
9154  LoadObjectField(object, field_offset, MachineType::Float64()));
9155  } else {
9156  Node* mutable_heap_number = LoadObjectField(object, field_offset);
9157  var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
9158  }
9159  Goto(&rebox_double);
9160  }
9161  }
9162  BIND(&if_backing_store);
9163  {
9164  Comment("if_backing_store");
9165  TNode<HeapObject> properties = LoadFastProperties(object);
9166  field_index = IntPtrSub(field_index, instance_size_in_words);
9167  Node* value = LoadPropertyArrayElement(CAST(properties), field_index);
9168 
9169  Label if_double(this), if_tagged(this);
9170  Branch(Word32NotEqual(representation,
9171  Int32Constant(Representation::kDouble)),
9172  &if_tagged, &if_double);
9173  BIND(&if_tagged);
9174  {
9175  var_value->Bind(value);
9176  Goto(&done);
9177  }
9178  BIND(&if_double);
9179  {
9180  var_double_value.Bind(LoadHeapNumberValue(value));
9181  Goto(&rebox_double);
9182  }
9183  }
9184  BIND(&rebox_double);
9185  {
9186  Comment("rebox_double");
9187  Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
9188  var_value->Bind(heap_number);
9189  Goto(&done);
9190  }
9191  }
9192  BIND(&if_in_descriptor);
9193  {
9194  var_value->Bind(
9195  LoadValueByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index)));
9196  Goto(&done);
9197  }
9198  BIND(&done);
9199 
9200  Comment("] LoadPropertyFromFastObject");
9201 }
9202 
9203 void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
9204  Node* name_index,
9205  Variable* var_details,
9206  Variable* var_value) {
9207  Comment("LoadPropertyFromNameDictionary");
9208  CSA_ASSERT(this, IsNameDictionary(dictionary));
9209 
9210  var_details->Bind(
9211  LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
9212  var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
9213 
9214  Comment("] LoadPropertyFromNameDictionary");
9215 }
9216 
9217 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
9218  Node* name_index,
9219  Variable* var_details,
9220  Variable* var_value,
9221  Label* if_deleted) {
9222  Comment("[ LoadPropertyFromGlobalDictionary");
9223  CSA_ASSERT(this, IsGlobalDictionary(dictionary));
9224 
9225  Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
9226  CSA_ASSERT(this, IsPropertyCell(property_cell));
9227 
9228  Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
9229  GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
9230 
9231  var_value->Bind(value);
9232 
9233  Node* details = LoadAndUntagToWord32ObjectField(property_cell,
9234  PropertyCell::kDetailsOffset);
9235  var_details->Bind(details);
9236 
9237  Comment("] LoadPropertyFromGlobalDictionary");
9238 }
9239 
9240 // |value| is the property backing store's contents, which is either a value
9241 // or an accessor pair, as specified by |details|.
9242 // Returns either the original value, or the result of the getter call.
9243 TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
9244  Node* value, Node* details, Node* context, Node* receiver,
9245  Label* if_bailout, GetOwnPropertyMode mode) {
9246  VARIABLE(var_value, MachineRepresentation::kTagged, value);
9247  Label done(this), if_accessor_info(this, Label::kDeferred);
9248 
9249  Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
9250  GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
9251 
9252  // Accessor case.
9253  GotoIfNot(IsAccessorPair(value), &if_accessor_info);
9254 
9255  // AccessorPair case.
9256  {
9257  if (mode == kCallJSGetter) {
9258  Node* accessor_pair = value;
9259  Node* getter =
9260  LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
9261  Node* getter_map = LoadMap(getter);
9262  Node* instance_type = LoadMapInstanceType(getter_map);
9263  // FunctionTemplateInfo getters are not supported yet.
9264  GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
9265  if_bailout);
9266 
9267  // Return undefined if the {getter} is not callable.
9268  var_value.Bind(UndefinedConstant());
9269  GotoIfNot(IsCallableMap(getter_map), &done);
9270 
9271  // Call the accessor.
9272  Callable callable = CodeFactory::Call(isolate());
9273  Node* result = CallJS(callable, context, getter, receiver);
9274  var_value.Bind(result);
9275  }
9276  Goto(&done);
9277  }
9278 
9279  // AccessorInfo case.
9280  BIND(&if_accessor_info);
9281  {
9282  Node* accessor_info = value;
9283  CSA_ASSERT(this, IsAccessorInfo(value));
9284  CSA_ASSERT(this, TaggedIsNotSmi(receiver));
9285  Label if_array(this), if_function(this), if_value(this);
9286 
9287  // Dispatch based on {receiver} instance type.
9288  Node* receiver_map = LoadMap(receiver);
9289  Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
9290  GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
9291  GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
9292  Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
9293  if_bailout);
9294 
9295  // JSArray AccessorInfo case.
9296  BIND(&if_array);
9297  {
9298  // We only deal with the "length" accessor on JSArray.
9299  GotoIfNot(IsLengthString(
9300  LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9301  if_bailout);
9302  var_value.Bind(LoadJSArrayLength(receiver));
9303  Goto(&done);
9304  }
9305 
9306  // JSFunction AccessorInfo case.
9307  BIND(&if_function);
9308  {
9309  // We only deal with the "prototype" accessor on JSFunction here.
9310  GotoIfNot(IsPrototypeString(
9311  LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9312  if_bailout);
9313 
9314  GotoIfPrototypeRequiresRuntimeLookup(CAST(receiver), CAST(receiver_map),
9315  if_bailout);
9316  var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
9317  Goto(&done);
9318  }
9319 
9320  // JSValue AccessorInfo case.
9321  BIND(&if_value);
9322  {
9323  // We only deal with the "length" accessor on JSValue string wrappers.
9324  GotoIfNot(IsLengthString(
9325  LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9326  if_bailout);
9327  Node* receiver_value = LoadJSValueValue(receiver);
9328  GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
9329  GotoIfNot(IsString(receiver_value), if_bailout);
9330  var_value.Bind(LoadStringLengthAsSmi(receiver_value));
9331  Goto(&done);
9332  }
9333  }
9334 
9335  BIND(&done);
9336  return UncheckedCast<Object>(var_value.value());
9337 }
9338 
9339 void CodeStubAssembler::TryGetOwnProperty(
9340  Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9341  Node* unique_name, Label* if_found_value, Variable* var_value,
9342  Label* if_not_found, Label* if_bailout) {
9343  TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
9344  if_found_value, var_value, nullptr, nullptr, if_not_found,
9345  if_bailout, kCallJSGetter);
9346 }
9347 
9348 void CodeStubAssembler::TryGetOwnProperty(
9349  Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
9350  Node* unique_name, Label* if_found_value, Variable* var_value,
9351  Variable* var_details, Variable* var_raw_value, Label* if_not_found,
9352  Label* if_bailout, GetOwnPropertyMode mode) {
9353  DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
9354  Comment("TryGetOwnProperty");
9355 
9356  TVARIABLE(HeapObject, var_meta_storage);
9357  TVARIABLE(IntPtrT, var_entry);
9358 
9359  Label if_found_fast(this), if_found_dict(this), if_found_global(this);
9360 
9361  VARIABLE(local_var_details, MachineRepresentation::kWord32);
9362  if (!var_details) {
9363  var_details = &local_var_details;
9364  }
9365  Label if_found(this);
9366 
9367  TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
9368  &if_found_dict, &if_found_global, &var_meta_storage,
9369  &var_entry, if_not_found, if_bailout);
9370  BIND(&if_found_fast);
9371  {
9372  TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
9373  Node* name_index = var_entry.value();
9374 
9375  LoadPropertyFromFastObject(object, map, descriptors, name_index,
9376  var_details, var_value);
9377  Goto(&if_found);
9378  }
9379  BIND(&if_found_dict);
9380  {
9381  Node* dictionary = var_meta_storage.value();
9382  Node* entry = var_entry.value();
9383  LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
9384  Goto(&if_found);
9385  }
9386  BIND(&if_found_global);
9387  {
9388  Node* dictionary = var_meta_storage.value();
9389  Node* entry = var_entry.value();
9390 
9391  LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
9392  if_not_found);
9393  Goto(&if_found);
9394  }
9395  // Here we have details and value which could be an accessor.
9396  BIND(&if_found);
9397  {
9398  // TODO(ishell): Execute C++ accessor in case of accessor info
9399  if (var_raw_value) {
9400  var_raw_value->Bind(var_value->value());
9401  }
9402  Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
9403  context, receiver, if_bailout, mode);
9404  var_value->Bind(value);
9405  Goto(if_found_value);
9406  }
9407 }
9408 
9409 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
9410  SloppyTNode<Int32T> instance_type,
9411  SloppyTNode<IntPtrT> intptr_index,
9412  Label* if_found, Label* if_absent,
9413  Label* if_not_found,
9414  Label* if_bailout) {
9415  // Handle special objects in runtime.
9416  GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
9417 
9418  Node* elements_kind = LoadMapElementsKind(map);
9419 
9420  // TODO(verwaest): Support other elements kinds as well.
9421  Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
9422  if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
9423  if_typedarray(this);
9424  // clang-format off
9425  int32_t values[] = {
9426  // Handled by {if_isobjectorsmi}.
9427  PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
9428  HOLEY_ELEMENTS,
9429  // Handled by {if_isdouble}.
9430  PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
9431  // Handled by {if_isdictionary}.
9432  DICTIONARY_ELEMENTS,
9433  // Handled by {if_isfaststringwrapper}.
9434  FAST_STRING_WRAPPER_ELEMENTS,
9435  // Handled by {if_isslowstringwrapper}.
9436  SLOW_STRING_WRAPPER_ELEMENTS,
9437  // Handled by {if_not_found}.
9438  NO_ELEMENTS,
9439  // Handled by {if_typed_array}.
9440  UINT8_ELEMENTS,
9441  INT8_ELEMENTS,
9442  UINT16_ELEMENTS,
9443  INT16_ELEMENTS,
9444  UINT32_ELEMENTS,
9445  INT32_ELEMENTS,
9446  FLOAT32_ELEMENTS,
9447  FLOAT64_ELEMENTS,
9448  UINT8_CLAMPED_ELEMENTS,
9449  BIGUINT64_ELEMENTS,
9450  BIGINT64_ELEMENTS,
9451  };
9452  Label* labels[] = {
9453  &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9454  &if_isobjectorsmi,
9455  &if_isdouble, &if_isdouble,
9456  &if_isdictionary,
9457  &if_isfaststringwrapper,
9458  &if_isslowstringwrapper,
9459  if_not_found,
9460  &if_typedarray,
9461  &if_typedarray,
9462  &if_typedarray,
9463  &if_typedarray,
9464  &if_typedarray,
9465  &if_typedarray,
9466  &if_typedarray,
9467  &if_typedarray,
9468  &if_typedarray,
9469  &if_typedarray,
9470  &if_typedarray,
9471  };
9472  // clang-format on
9473  STATIC_ASSERT(arraysize(values) == arraysize(labels));
9474  Switch(elements_kind, if_bailout, values, labels, arraysize(values));
9475 
9476  BIND(&if_isobjectorsmi);
9477  {
9478  TNode<FixedArray> elements = CAST(LoadElements(object));
9479  TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9480 
9481  GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9482 
9483  TNode<Object> element = LoadFixedArrayElement(elements, intptr_index);
9484  TNode<Oddball> the_hole = TheHoleConstant();
9485  Branch(WordEqual(element, the_hole), if_not_found, if_found);
9486  }
9487  BIND(&if_isdouble);
9488  {
9489  TNode<FixedArrayBase> elements = LoadElements(object);
9490  TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
9491 
9492  GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
9493 
9494  // Check if the element is a double hole, but don't load it.
9495  LoadFixedDoubleArrayElement(CAST(elements), intptr_index,
9496  MachineType::None(), 0, INTPTR_PARAMETERS,
9497  if_not_found);
9498  Goto(if_found);
9499  }
9500  BIND(&if_isdictionary);
9501  {
9502  // Negative keys must be converted to property names.
9503  GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9504 
9505  TVARIABLE(IntPtrT, var_entry);
9506  TNode<NumberDictionary> elements = CAST(LoadElements(object));
9507  NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
9508  if_not_found);
9509  }
9510  BIND(&if_isfaststringwrapper);
9511  {
9512  CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9513  Node* string = LoadJSValueValue(object);
9514  CSA_ASSERT(this, IsString(string));
9515  Node* length = LoadStringLengthAsWord(string);
9516  GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9517  Goto(&if_isobjectorsmi);
9518  }
9519  BIND(&if_isslowstringwrapper);
9520  {
9521  CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
9522  Node* string = LoadJSValueValue(object);
9523  CSA_ASSERT(this, IsString(string));
9524  Node* length = LoadStringLengthAsWord(string);
9525  GotoIf(UintPtrLessThan(intptr_index, length), if_found);
9526  Goto(&if_isdictionary);
9527  }
9528  BIND(&if_typedarray);
9529  {
9530  Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9531  GotoIf(IsDetachedBuffer(buffer), if_absent);
9532 
9533  Node* length = SmiUntag(LoadJSTypedArrayLength(CAST(object)));
9534  Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
9535  }
9536  BIND(&if_oob);
9537  {
9538  // Positive OOB indices mean "not found", negative indices must be
9539  // converted to property names.
9540  GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9541  Goto(if_not_found);
9542  }
9543 }
9544 
9545 void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
9546  Label* if_maybe_special_index,
9547  Label* if_not_special_index) {
9548  // TODO(cwhan.tunz): Implement fast cases more.
9549 
9550  // If a name is empty or too long, it's not a special index
9551  // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
9552  const int kBufferSize = 24;
9553  TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
9554  GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
9555  GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
9556  if_not_special_index);
9557 
9558  // If the first character of name is not a digit or '-', or we can't match it
9559  // to Infinity or NaN, then this is not a special index.
9560  TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
9561  // If the name starts with '-', it can be a negative index.
9562  GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
9563  // If the name starts with 'I', it can be "Infinity".
9564  GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
9565  // If the name starts with 'N', it can be "NaN".
9566  GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
9567  // Finally, if the first character is not a digit either, then we are sure
9568  // that the name is not a special index.
9569  GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
9570  GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
9571  Goto(if_maybe_special_index);
9572 }
9573 
9574 void CodeStubAssembler::TryPrototypeChainLookup(
9575  Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
9576  const LookupInHolder& lookup_element_in_holder, Label* if_end,
9577  Label* if_bailout, Label* if_proxy) {
9578  // Ensure receiver is JSReceiver, otherwise bailout.
9579  Label if_objectisnotsmi(this);
9580  Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
9581  BIND(&if_objectisnotsmi);
9582 
9583  Node* map = LoadMap(receiver);
9584  Node* instance_type = LoadMapInstanceType(map);
9585  {
9586  Label if_objectisreceiver(this);
9587  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9588  STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
9589  Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
9590  if_bailout);
9591  BIND(&if_objectisreceiver);
9592 
9593  if (if_proxy) {
9594  GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
9595  }
9596  }
9597 
9598  VARIABLE(var_index, MachineType::PointerRepresentation());
9599  VARIABLE(var_unique, MachineRepresentation::kTagged);
9600 
9601  Label if_keyisindex(this), if_iskeyunique(this);
9602  TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
9603  if_bailout);
9604 
9605  BIND(&if_iskeyunique);
9606  {
9607  VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9608  VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9609  VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9610  instance_type);
9611 
9612  Variable* merged_variables[] = {&var_holder, &var_holder_map,
9613  &var_holder_instance_type};
9614  Label loop(this, arraysize(merged_variables), merged_variables);
9615  Goto(&loop);
9616  BIND(&loop);
9617  {
9618  Node* holder_map = var_holder_map.value();
9619  Node* holder_instance_type = var_holder_instance_type.value();
9620 
9621  Label next_proto(this), check_integer_indexed_exotic(this);
9622  lookup_property_in_holder(receiver, var_holder.value(), holder_map,
9623  holder_instance_type, var_unique.value(),
9624  &check_integer_indexed_exotic, if_bailout);
9625 
9626  BIND(&check_integer_indexed_exotic);
9627  {
9628  // Bailout if it can be an integer indexed exotic case.
9629  GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
9630  &next_proto);
9631  GotoIfNot(IsString(var_unique.value()), &next_proto);
9632  BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
9633  &next_proto);
9634  }
9635 
9636  BIND(&next_proto);
9637 
9638  Node* proto = LoadMapPrototype(holder_map);
9639 
9640  GotoIf(IsNull(proto), if_end);
9641 
9642  Node* map = LoadMap(proto);
9643  Node* instance_type = LoadMapInstanceType(map);
9644 
9645  var_holder.Bind(proto);
9646  var_holder_map.Bind(map);
9647  var_holder_instance_type.Bind(instance_type);
9648  Goto(&loop);
9649  }
9650  }
9651  BIND(&if_keyisindex);
9652  {
9653  VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
9654  VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
9655  VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
9656  instance_type);
9657 
9658  Variable* merged_variables[] = {&var_holder, &var_holder_map,
9659  &var_holder_instance_type};
9660  Label loop(this, arraysize(merged_variables), merged_variables);
9661  Goto(&loop);
9662  BIND(&loop);
9663  {
9664  Label next_proto(this);
9665  lookup_element_in_holder(receiver, var_holder.value(),
9666  var_holder_map.value(),
9667  var_holder_instance_type.value(),
9668  var_index.value(), &next_proto, if_bailout);
9669  BIND(&next_proto);
9670 
9671  Node* proto = LoadMapPrototype(var_holder_map.value());
9672 
9673  GotoIf(IsNull(proto), if_end);
9674 
9675  Node* map = LoadMap(proto);
9676  Node* instance_type = LoadMapInstanceType(map);
9677 
9678  var_holder.Bind(proto);
9679  var_holder_map.Bind(map);
9680  var_holder_instance_type.Bind(instance_type);
9681  Goto(&loop);
9682  }
9683  }
9684 }
9685 
9686 Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
9687  Node* prototype) {
9688  CSA_ASSERT(this, TaggedIsNotSmi(object));
9689  VARIABLE(var_result, MachineRepresentation::kTagged);
9690  Label return_false(this), return_true(this),
9691  return_runtime(this, Label::kDeferred), return_result(this);
9692 
9693  // Loop through the prototype chain looking for the {prototype}.
9694  VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
9695  Label loop(this, &var_object_map);
9696  Goto(&loop);
9697  BIND(&loop);
9698  {
9699  // Check if we can determine the prototype directly from the {object_map}.
9700  Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
9701  Node* object_map = var_object_map.value();
9702  TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
9703  Branch(IsSpecialReceiverInstanceType(object_instance_type),
9704  &if_objectisspecial, &if_objectisdirect);
9705  BIND(&if_objectisspecial);
9706  {
9707  // The {object_map} is a special receiver map or a primitive map, check
9708  // if we need to use the if_objectisspecial path in the runtime.
9709  GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
9710  &return_runtime);
9711  Node* object_bitfield = LoadMapBitField(object_map);
9712  int mask = Map::HasNamedInterceptorBit::kMask |
9713  Map::IsAccessCheckNeededBit::kMask;
9714  Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
9715  &if_objectisdirect);
9716  }
9717  BIND(&if_objectisdirect);
9718 
9719  // Check the current {object} prototype.
9720  Node* object_prototype = LoadMapPrototype(object_map);
9721  GotoIf(IsNull(object_prototype), &return_false);
9722  GotoIf(WordEqual(object_prototype, prototype), &return_true);
9723 
9724  // Continue with the prototype.
9725  CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
9726  var_object_map.Bind(LoadMap(object_prototype));
9727  Goto(&loop);
9728  }
9729 
9730  BIND(&return_true);
9731  var_result.Bind(TrueConstant());
9732  Goto(&return_result);
9733 
9734  BIND(&return_false);
9735  var_result.Bind(FalseConstant());
9736  Goto(&return_result);
9737 
9738  BIND(&return_runtime);
9739  {
9740  // Fallback to the runtime implementation.
9741  var_result.Bind(
9742  CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
9743  }
9744  Goto(&return_result);
9745 
9746  BIND(&return_result);
9747  return var_result.value();
9748 }
9749 
9750 Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
9751  Node* object) {
9752  VARIABLE(var_result, MachineRepresentation::kTagged);
9753  Label return_runtime(this, Label::kDeferred), return_result(this);
9754 
9755  // Goto runtime if {object} is a Smi.
9756  GotoIf(TaggedIsSmi(object), &return_runtime);
9757 
9758  // Goto runtime if {callable} is a Smi.
9759  GotoIf(TaggedIsSmi(callable), &return_runtime);
9760 
9761  // Load map of {callable}.
9762  Node* callable_map = LoadMap(callable);
9763 
9764  // Goto runtime if {callable} is not a JSFunction.
9765  Node* callable_instance_type = LoadMapInstanceType(callable_map);
9766  GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
9767  &return_runtime);
9768 
9769  GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), CAST(callable_map),
9770  &return_runtime);
9771 
9772  // Get the "prototype" (or initial map) of the {callable}.
9773  Node* callable_prototype =
9774  LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
9775  {
9776  Label callable_prototype_valid(this);
9777  VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
9778  callable_prototype);
9779 
9780  // Resolve the "prototype" if the {callable} has an initial map. Afterwards
9781  // the {callable_prototype} will be either the JSReceiver prototype object
9782  // or the hole value, which means that no instances of the {callable} were
9783  // created so far and hence we should return false.
9784  Node* callable_prototype_instance_type =
9785  LoadInstanceType(callable_prototype);
9786  GotoIfNot(InstanceTypeEqual(callable_prototype_instance_type, MAP_TYPE),
9787  &callable_prototype_valid);
9788  var_callable_prototype.Bind(
9789  LoadObjectField(callable_prototype, Map::kPrototypeOffset));
9790  Goto(&callable_prototype_valid);
9791  BIND(&callable_prototype_valid);
9792  callable_prototype = var_callable_prototype.value();
9793  }
9794 
9795  // Loop through the prototype chain looking for the {callable} prototype.
9796  var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
9797  Goto(&return_result);
9798 
9799  BIND(&return_runtime);
9800  {
9801  // Fallback to the runtime implementation.
9802  var_result.Bind(
9803  CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
9804  }
9805  Goto(&return_result);
9806 
9807  BIND(&return_result);
9808  return var_result.value();
9809 }
9810 
9811 TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
9812  ElementsKind kind,
9813  ParameterMode mode,
9814  int base_size) {
9815  CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
9816  int element_size_shift = ElementsKindToShiftSize(kind);
9817  int element_size = 1 << element_size_shift;
9818  int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
9819  intptr_t index = 0;
9820  bool constant_index = false;
9821  if (mode == SMI_PARAMETERS) {
9822  element_size_shift -= kSmiShiftBits;
9823  Smi smi_index;
9824  constant_index = ToSmiConstant(index_node, &smi_index);
9825  if (constant_index) index = smi_index->value();
9826  index_node = BitcastTaggedToWord(index_node);
9827  } else {
9828  DCHECK(mode == INTPTR_PARAMETERS);
9829  constant_index = ToIntPtrConstant(index_node, index);
9830  }
9831  if (constant_index) {
9832  return IntPtrConstant(base_size + element_size * index);
9833  }
9834 
9835  TNode<WordT> shifted_index =
9836  (element_size_shift == 0)
9837  ? UncheckedCast<WordT>(index_node)
9838  : ((element_size_shift > 0)
9839  ? WordShl(index_node, IntPtrConstant(element_size_shift))
9840  : WordSar(index_node, IntPtrConstant(-element_size_shift)));
9841  return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
9842 }
9843 
9844 TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
9845  SloppyTNode<IntPtrT> length,
9846  int header_size,
9847  ElementsKind kind) {
9848  // Make sure we point to the last field.
9849  int element_size = 1 << ElementsKindToShiftSize(kind);
9850  int correction = header_size - kHeapObjectTag - element_size;
9851  TNode<IntPtrT> last_offset =
9852  ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
9853  return IntPtrLessThanOrEqual(offset, last_offset);
9854 }
9855 
9856 TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVector(
9857  SloppyTNode<JSFunction> closure, Label* if_undefined) {
9858  TNode<Object> maybe_vector = LoadFeedbackVectorUnchecked(closure);
9859  if (if_undefined) {
9860  GotoIf(IsUndefined(maybe_vector), if_undefined);
9861  }
9862  return CAST(maybe_vector);
9863 }
9864 
9865 TNode<Object> CodeStubAssembler::LoadFeedbackVectorUnchecked(
9866  SloppyTNode<JSFunction> closure) {
9867  TNode<FeedbackCell> feedback_cell =
9868  CAST(LoadObjectField(closure, JSFunction::kFeedbackCellOffset));
9869  TNode<Object> maybe_vector =
9870  LoadObjectField(feedback_cell, FeedbackCell::kValueOffset);
9871  return maybe_vector;
9872 }
9873 
9874 TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
9875  TNode<JSFunction> function =
9876  CAST(LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset));
9877  return LoadFeedbackVector(function);
9878 }
9879 
9880 void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* maybe_vector,
9881  Node* slot_id) {
9882  Label end(this);
9883  // If feedback_vector is not valid, then nothing to do.
9884  GotoIf(IsUndefined(maybe_vector), &end);
9885 
9886  // This method is used for binary op and compare feedback. These
9887  // vector nodes are initialized with a smi 0, so we can simply OR
9888  // our new feedback in place.
9889  TNode<FeedbackVector> feedback_vector = CAST(maybe_vector);
9890  TNode<MaybeObject> feedback_element =
9891  LoadFeedbackVectorSlot(feedback_vector, slot_id);
9892  TNode<Smi> previous_feedback = CAST(feedback_element);
9893  TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
9894 
9895  GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
9896  {
9897  StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
9898  SKIP_WRITE_BARRIER);
9899  ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
9900  Goto(&end);
9901  }
9902 
9903  BIND(&end);
9904 }
9905 
9906 void CodeStubAssembler::ReportFeedbackUpdate(
9907  SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
9908  const char* reason) {
9909  // Reset profiler ticks.
9910  StoreObjectFieldNoWriteBarrier(
9911  feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
9912  MachineRepresentation::kWord32);
9913 
9914 #ifdef V8_TRACE_FEEDBACK_UPDATES
9915  // Trace the update.
9916  CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
9917  LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
9918  SmiTag(slot_id), StringConstant(reason));
9919 #endif // V8_TRACE_FEEDBACK_UPDATES
9920 }
9921 
9922 void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
9923  int new_feedback) {
9924  if (existing_feedback == nullptr) return;
9925  existing_feedback->Bind(SmiConstant(new_feedback));
9926 }
9927 
9928 void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
9929  int feedback) {
9930  if (existing_feedback == nullptr) return;
9931  existing_feedback->Bind(
9932  SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
9933 }
9934 
9935 void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
9936  Node* feedback) {
9937  if (existing_feedback == nullptr) return;
9938  existing_feedback->Bind(
9939  SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
9940 }
9941 
9942 void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
9943  Label* if_protector) {
9944  // This list must be kept in sync with LookupIterator::UpdateProtector!
9945  // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
9946  GotoIf(WordEqual(name, LoadRoot(RootIndex::kconstructor_string)),
9947  if_protector);
9948  GotoIf(WordEqual(name, LoadRoot(RootIndex::kiterator_symbol)), if_protector);
9949  GotoIf(WordEqual(name, LoadRoot(RootIndex::knext_string)), if_protector);
9950  GotoIf(WordEqual(name, LoadRoot(RootIndex::kspecies_symbol)), if_protector);
9951  GotoIf(WordEqual(name, LoadRoot(RootIndex::kis_concat_spreadable_symbol)),
9952  if_protector);
9953  GotoIf(WordEqual(name, LoadRoot(RootIndex::kresolve_string)), if_protector);
9954  GotoIf(WordEqual(name, LoadRoot(RootIndex::kthen_string)), if_protector);
9955  // Fall through if no case matched.
9956 }
9957 
9958 TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
9959  return Select<Map>(
9960  TaggedIsSmi(receiver),
9961  [=] { return CAST(LoadRoot(RootIndex::kHeapNumberMap)); },
9962  [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
9963 }
9964 
9965 TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
9966  TVARIABLE(IntPtrT, var_intptr_key);
9967  Label done(this, &var_intptr_key), key_is_smi(this);
9968  GotoIf(TaggedIsSmi(key), &key_is_smi);
9969  // Try to convert a heap number to a Smi.
9970  GotoIfNot(IsHeapNumber(key), miss);
9971  {
9972  TNode<Float64T> value = LoadHeapNumberValue(key);
9973  TNode<Int32T> int_value = RoundFloat64ToInt32(value);
9974  GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
9975  var_intptr_key = ChangeInt32ToIntPtr(int_value);
9976  Goto(&done);
9977  }
9978 
9979  BIND(&key_is_smi);
9980  {
9981  var_intptr_key = SmiUntag(key);
9982  Goto(&done);
9983  }
9984 
9985  BIND(&done);
9986  return var_intptr_key.value();
9987 }
9988 
9989 Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
9990  Node* value, Label* bailout) {
9991  // Mapped arguments are actual arguments. Unmapped arguments are values added
9992  // to the arguments object after it was created for the call. Mapped arguments
9993  // are stored in the context at indexes given by elements[key + 2]. Unmapped
9994  // arguments are stored as regular indexed properties in the arguments array,
9995  // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
9996  // look at argument object construction.
9997  //
9998  // The sloppy arguments elements array has a special format:
9999  //
10000  // 0: context
10001  // 1: unmapped arguments array
10002  // 2: mapped_index0,
10003  // 3: mapped_index1,
10004  // ...
10005  //
10006  // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
10007  // If key + 2 >= elements.length then attempt to look in the unmapped
10008  // arguments array (given by elements[1]) and return the value at key, missing
10009  // to the runtime if the unmapped arguments array is not a fixed array or if
10010  // key >= unmapped_arguments_array.length.
10011  //
10012  // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
10013  // in the unmapped arguments array, as described above. Otherwise, t is a Smi
10014  // index into the context array given at elements[0]. Return the value at
10015  // context[t].
10016 
10017  bool is_load = value == nullptr;
10018 
10019  GotoIfNot(TaggedIsSmi(key), bailout);
10020  key = SmiUntag(key);
10021  GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
10022 
10023  TNode<FixedArray> elements = CAST(LoadElements(receiver));
10024  TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
10025 
10026  VARIABLE(var_result, MachineRepresentation::kTagged);
10027  if (!is_load) {
10028  var_result.Bind(value);
10029  }
10030  Label if_mapped(this), if_unmapped(this), end(this, &var_result);
10031  Node* intptr_two = IntPtrConstant(2);
10032  Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
10033 
10034  GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
10035 
10036  TNode<Object> mapped_index =
10037  LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
10038  Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
10039 
10040  BIND(&if_mapped);
10041  {
10042  TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
10043  TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
10044  if (is_load) {
10045  Node* result = LoadContextElement(the_context, mapped_index_intptr);
10046  CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
10047  var_result.Bind(result);
10048  } else {
10049  StoreContextElement(the_context, mapped_index_intptr, value);
10050  }
10051  Goto(&end);
10052  }
10053 
10054  BIND(&if_unmapped);
10055  {
10056  TNode<HeapObject> backing_store_ho =
10057  CAST(LoadFixedArrayElement(elements, 1));
10058  GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
10059  bailout);
10060  TNode<FixedArray> backing_store = CAST(backing_store_ho);
10061 
10062  TNode<IntPtrT> backing_store_length =
10063  LoadAndUntagFixedArrayBaseLength(backing_store);
10064  GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
10065 
10066  // The key falls into unmapped range.
10067  if (is_load) {
10068  Node* result = LoadFixedArrayElement(backing_store, key);
10069  GotoIf(WordEqual(result, TheHoleConstant()), bailout);
10070  var_result.Bind(result);
10071  } else {
10072  StoreFixedArrayElement(backing_store, key, value);
10073  }
10074  Goto(&end);
10075  }
10076 
10077  BIND(&end);
10078  return var_result.value();
10079 }
10080 
10081 TNode<Context> CodeStubAssembler::LoadScriptContext(
10082  TNode<Context> context, TNode<IntPtrT> context_index) {
10083  TNode<Context> native_context = LoadNativeContext(context);
10084  TNode<ScriptContextTable> script_context_table = CAST(
10085  LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
10086 
10087  TNode<Context> script_context = CAST(LoadFixedArrayElement(
10088  script_context_table, context_index,
10089  ScriptContextTable::kFirstContextSlotIndex * kPointerSize));
10090  return script_context;
10091 }
10092 
10093 namespace {
10094 
10095 // Converts typed array elements kind to a machine representations.
10096 MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
10097  switch (kind) {
10098  case UINT8_CLAMPED_ELEMENTS:
10099  case UINT8_ELEMENTS:
10100  case INT8_ELEMENTS:
10101  return MachineRepresentation::kWord8;
10102  case UINT16_ELEMENTS:
10103  case INT16_ELEMENTS:
10104  return MachineRepresentation::kWord16;
10105  case UINT32_ELEMENTS:
10106  case INT32_ELEMENTS:
10107  return MachineRepresentation::kWord32;
10108  case FLOAT32_ELEMENTS:
10109  return MachineRepresentation::kFloat32;
10110  case FLOAT64_ELEMENTS:
10111  return MachineRepresentation::kFloat64;
10112  default:
10113  UNREACHABLE();
10114  }
10115 }
10116 
10117 } // namespace
10118 
10119 void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
10120  Node* index, Node* value,
10121  ParameterMode mode) {
10122  if (IsFixedTypedArrayElementsKind(kind)) {
10123  if (kind == UINT8_CLAMPED_ELEMENTS) {
10124  CSA_ASSERT(this,
10125  Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
10126  }
10127  Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
10128  // TODO(cbruni): Add OOB check once typed.
10129  MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10130  StoreNoWriteBarrier(rep, elements, offset, value);
10131  return;
10132  } else if (IsDoubleElementsKind(kind)) {
10133  // Make sure we do not store signalling NaNs into double arrays.
10134  TNode<Float64T> value_silenced = Float64SilenceNaN(value);
10135  StoreFixedDoubleArrayElement(CAST(elements), index, value_silenced, mode);
10136  } else {
10137  WriteBarrierMode barrier_mode =
10138  IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
10139  StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
10140  }
10141 }
10142 
10143 Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
10144  Label done(this);
10145  Node* int32_zero = Int32Constant(0);
10146  Node* int32_255 = Int32Constant(255);
10147  VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
10148  GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
10149  var_value.Bind(int32_zero);
10150  GotoIf(Int32LessThan(int32_value, int32_zero), &done);
10151  var_value.Bind(int32_255);
10152  Goto(&done);
10153  BIND(&done);
10154  return var_value.value();
10155 }
10156 
10157 Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
10158  Label done(this);
10159  VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
10160  GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
10161  var_value.Bind(Int32Constant(255));
10162  GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
10163  {
10164  Node* rounded_value = Float64RoundToEven(float64_value);
10165  var_value.Bind(TruncateFloat64ToWord32(rounded_value));
10166  Goto(&done);
10167  }
10168  BIND(&done);
10169  return var_value.value();
10170 }
10171 
10172 Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
10173  TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10174  DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
10175 
10176  MachineRepresentation rep;
10177  switch (elements_kind) {
10178  case UINT8_ELEMENTS:
10179  case INT8_ELEMENTS:
10180  case UINT16_ELEMENTS:
10181  case INT16_ELEMENTS:
10182  case UINT32_ELEMENTS:
10183  case INT32_ELEMENTS:
10184  case UINT8_CLAMPED_ELEMENTS:
10185  rep = MachineRepresentation::kWord32;
10186  break;
10187  case FLOAT32_ELEMENTS:
10188  rep = MachineRepresentation::kFloat32;
10189  break;
10190  case FLOAT64_ELEMENTS:
10191  rep = MachineRepresentation::kFloat64;
10192  break;
10193  case BIGINT64_ELEMENTS:
10194  case BIGUINT64_ELEMENTS:
10195  return ToBigInt(context, input);
10196  default:
10197  UNREACHABLE();
10198  }
10199 
10200  VARIABLE(var_result, rep);
10201  VARIABLE(var_input, MachineRepresentation::kTagged, input);
10202  Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10203  convert(this), loop(this, &var_input);
10204  Goto(&loop);
10205  BIND(&loop);
10206  GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10207  // We can handle both HeapNumber and Oddball here, since Oddball has the
10208  // same layout as the HeapNumber for the HeapNumber::value field. This
10209  // way we can also properly optimize stores of oddballs to typed arrays.
10210  GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
10211  STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
10212  Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
10213  &if_heapnumber_or_oddball, &convert);
10214 
10215  BIND(&if_heapnumber_or_oddball);
10216  {
10217  Node* value = UncheckedCast<Float64T>(LoadObjectField(
10218  var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
10219  if (rep == MachineRepresentation::kWord32) {
10220  if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10221  value = Float64ToUint8Clamped(value);
10222  } else {
10223  value = TruncateFloat64ToWord32(value);
10224  }
10225  } else if (rep == MachineRepresentation::kFloat32) {
10226  value = TruncateFloat64ToFloat32(value);
10227  } else {
10228  DCHECK_EQ(MachineRepresentation::kFloat64, rep);
10229  }
10230  var_result.Bind(value);
10231  Goto(&done);
10232  }
10233 
10234  BIND(&if_smi);
10235  {
10236  Node* value = SmiToInt32(var_input.value());
10237  if (rep == MachineRepresentation::kFloat32) {
10238  value = RoundInt32ToFloat32(value);
10239  } else if (rep == MachineRepresentation::kFloat64) {
10240  value = ChangeInt32ToFloat64(value);
10241  } else {
10242  DCHECK_EQ(MachineRepresentation::kWord32, rep);
10243  if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10244  value = Int32ToUint8Clamped(value);
10245  }
10246  }
10247  var_result.Bind(value);
10248  Goto(&done);
10249  }
10250 
10251  BIND(&convert);
10252  {
10253  var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
10254  Goto(&loop);
10255  }
10256 
10257  BIND(&done);
10258  return var_result.value();
10259 }
10260 
10261 void CodeStubAssembler::EmitBigTypedArrayElementStore(
10262  TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
10263  TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
10264  Label* opt_if_neutered) {
10265  TNode<BigInt> bigint_value = ToBigInt(context, value);
10266 
10267  if (opt_if_neutered != nullptr) {
10268  // Check if buffer has been neutered. Must happen after {ToBigInt}!
10269  Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10270  GotoIf(IsDetachedBuffer(buffer), opt_if_neutered);
10271  }
10272 
10273  TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
10274  TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
10275  INTPTR_PARAMETERS, 0);
10276  EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
10277 }
10278 
10279 void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
10280  TVariable<UintPtrT>* var_low,
10281  TVariable<UintPtrT>* var_high) {
10282  Label done(this);
10283  *var_low = Unsigned(IntPtrConstant(0));
10284  *var_high = Unsigned(IntPtrConstant(0));
10285  TNode<WordT> bitfield = LoadBigIntBitfield(bigint);
10286  TNode<UintPtrT> length = DecodeWord<BigIntBase::LengthBits>(bitfield);
10287  TNode<UintPtrT> sign = DecodeWord<BigIntBase::SignBits>(bitfield);
10288  GotoIf(WordEqual(length, IntPtrConstant(0)), &done);
10289  *var_low = LoadBigIntDigit(bigint, 0);
10290  if (!Is64()) {
10291  Label load_done(this);
10292  GotoIf(WordEqual(length, IntPtrConstant(1)), &load_done);
10293  *var_high = LoadBigIntDigit(bigint, 1);
10294  Goto(&load_done);
10295  BIND(&load_done);
10296  }
10297  GotoIf(WordEqual(sign, IntPtrConstant(0)), &done);
10298  // Negative value. Simulate two's complement.
10299  if (!Is64()) {
10300  *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
10301  Label no_carry(this);
10302  GotoIf(WordEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
10303  *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
10304  Goto(&no_carry);
10305  BIND(&no_carry);
10306  }
10307  *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
10308  Goto(&done);
10309  BIND(&done);
10310 }
10311 
10312 void CodeStubAssembler::EmitBigTypedArrayElementStore(
10313  TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
10314  TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
10315  TVARIABLE(UintPtrT, var_low);
10316  // Only used on 32-bit platforms.
10317  TVARIABLE(UintPtrT, var_high);
10318  BigIntToRawBytes(bigint_value, &var_low, &var_high);
10319 
10320  // Assert that offset < elements.length. Given that it's an offset for a raw
10321  // pointer we correct it by the usual kHeapObjectTag offset.
10322  CSA_ASSERT(
10323  this, IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(elements),
10324  kHeapObjectTag, BIGINT64_ELEMENTS));
10325 
10326  MachineRepresentation rep = WordT::kMachineRepresentation;
10327 #if defined(V8_TARGET_BIG_ENDIAN)
10328  if (!Is64()) {
10329  StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
10330  StoreNoWriteBarrier(rep, backing_store,
10331  IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
10332  var_low.value());
10333  } else {
10334  StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10335  }
10336 #else
10337  StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
10338  if (!Is64()) {
10339  StoreNoWriteBarrier(rep, backing_store,
10340  IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
10341  var_high.value());
10342  }
10343 #endif
10344 }
10345 
10346 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
10347  ElementsKind elements_kind,
10348  KeyedAccessStoreMode store_mode,
10349  Label* bailout, Node* context) {
10350  CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
10351 
10352  Node* elements = LoadElements(object);
10353  if (!IsSmiOrObjectElementsKind(elements_kind)) {
10354  CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10355  } else if (!IsCOWHandlingStoreMode(store_mode)) {
10356  GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
10357  }
10358 
10359  // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
10360  ParameterMode parameter_mode = INTPTR_PARAMETERS;
10361  TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
10362 
10363  if (IsFixedTypedArrayElementsKind(elements_kind)) {
10364  Label done(this);
10365 
10366  // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
10367  // bounds check.
10368  value = PrepareValueForWriteToTypedArray(CAST(value), elements_kind,
10369  CAST(context));
10370 
10371  // There must be no allocations between the buffer load and
10372  // and the actual store to backing store, because GC may decide that
10373  // the buffer is not alive or move the elements.
10374  // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
10375 
10376  // Check if buffer has been neutered.
10377  Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
10378  GotoIf(IsDetachedBuffer(buffer), bailout);
10379 
10380  // Bounds check.
10381  Node* length =
10382  TaggedToParameter(LoadJSTypedArrayLength(CAST(object)), parameter_mode);
10383 
10384  if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
10385  // Skip the store if we write beyond the length or
10386  // to a property with a negative integer index.
10387  GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
10388  } else if (store_mode == STANDARD_STORE) {
10389  GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10390  } else {
10391  // This case is produced due to the dispatched call in
10392  // ElementsTransitionAndStore and StoreFastElement.
10393  // TODO(jgruber): Avoid generating unsupported combinations to save code
10394  // size.
10395  DebugBreak();
10396  }
10397 
10398  if (elements_kind == BIGINT64_ELEMENTS ||
10399  elements_kind == BIGUINT64_ELEMENTS) {
10400  TNode<BigInt> bigint_value = UncheckedCast<BigInt>(value);
10401 
10402  TNode<RawPtrT> backing_store =
10403  LoadFixedTypedArrayBackingStore(CAST(elements));
10404  TNode<IntPtrT> offset = ElementOffsetFromIndex(
10405  intptr_key, BIGINT64_ELEMENTS, INTPTR_PARAMETERS, 0);
10406  EmitBigTypedArrayElementStore(CAST(elements), backing_store, offset,
10407  bigint_value);
10408  } else {
10409  Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
10410  StoreElement(backing_store, elements_kind, intptr_key, value,
10411  parameter_mode);
10412  }
10413  Goto(&done);
10414 
10415  BIND(&done);
10416  return;
10417  }
10418  DCHECK(IsFastElementsKind(elements_kind));
10419 
10420  Node* length =
10421  SelectImpl(IsJSArray(object), [=]() { return LoadJSArrayLength(object); },
10422  [=]() { return LoadFixedArrayBaseLength(elements); },
10423  MachineRepresentation::kTagged);
10424  length = TaggedToParameter(length, parameter_mode);
10425 
10426  // In case value is stored into a fast smi array, assure that the value is
10427  // a smi before manipulating the backing store. Otherwise the backing store
10428  // may be left in an invalid state.
10429  if (IsSmiElementsKind(elements_kind)) {
10430  GotoIfNot(TaggedIsSmi(value), bailout);
10431  } else if (IsDoubleElementsKind(elements_kind)) {
10432  value = TryTaggedToFloat64(value, bailout);
10433  }
10434 
10435  if (IsGrowStoreMode(store_mode)) {
10436  elements = CheckForCapacityGrow(object, elements, elements_kind, length,
10437  intptr_key, parameter_mode, bailout);
10438  } else {
10439  GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
10440  }
10441 
10442  // If we didn't grow {elements}, it might still be COW, in which case we
10443  // copy it now.
10444  if (!IsSmiOrObjectElementsKind(elements_kind)) {
10445  CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10446  } else if (IsCOWHandlingStoreMode(store_mode)) {
10447  elements = CopyElementsOnWrite(object, elements, elements_kind, length,
10448  parameter_mode, bailout);
10449  }
10450 
10451  CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10452  StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
10453 }
10454 
10455 Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
10456  ElementsKind kind, Node* length,
10457  Node* key, ParameterMode mode,
10458  Label* bailout) {
10459  DCHECK(IsFastElementsKind(kind));
10460  VARIABLE(checked_elements, MachineRepresentation::kTagged);
10461  Label grow_case(this), no_grow_case(this), done(this),
10462  grow_bailout(this, Label::kDeferred);
10463 
10464  Node* condition;
10465  if (IsHoleyElementsKind(kind)) {
10466  condition = UintPtrGreaterThanOrEqual(key, length);
10467  } else {
10468  // We don't support growing here unless the value is being appended.
10469  condition = WordEqual(key, length);
10470  }
10471  Branch(condition, &grow_case, &no_grow_case);
10472 
10473  BIND(&grow_case);
10474  {
10475  Node* current_capacity =
10476  TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10477  checked_elements.Bind(elements);
10478  Label fits_capacity(this);
10479  // If key is negative, we will notice in Runtime::kGrowArrayElements.
10480  GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
10481 
10482  {
10483  Node* new_elements = TryGrowElementsCapacity(
10484  object, elements, kind, key, current_capacity, mode, &grow_bailout);
10485  checked_elements.Bind(new_elements);
10486  Goto(&fits_capacity);
10487  }
10488 
10489  BIND(&grow_bailout);
10490  {
10491  Node* tagged_key = mode == SMI_PARAMETERS
10492  ? key
10493  : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
10494  Node* maybe_elements = CallRuntime(
10495  Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
10496  GotoIf(TaggedIsSmi(maybe_elements), bailout);
10497  CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
10498  checked_elements.Bind(maybe_elements);
10499  Goto(&fits_capacity);
10500  }
10501 
10502  BIND(&fits_capacity);
10503  GotoIfNot(IsJSArray(object), &done);
10504 
10505  Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
10506  StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
10507  ParameterToTagged(new_length, mode));
10508  Goto(&done);
10509  }
10510 
10511  BIND(&no_grow_case);
10512  {
10513  GotoIfNot(UintPtrLessThan(key, length), bailout);
10514  checked_elements.Bind(elements);
10515  Goto(&done);
10516  }
10517 
10518  BIND(&done);
10519  return checked_elements.value();
10520 }
10521 
10522 Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
10523  ElementsKind kind, Node* length,
10524  ParameterMode mode,
10525  Label* bailout) {
10526  VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
10527  Label done(this);
10528 
10529  GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
10530  {
10531  Node* capacity =
10532  TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
10533  Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
10534  length, capacity, mode, bailout);
10535  new_elements_var.Bind(new_elements);
10536  Goto(&done);
10537  }
10538 
10539  BIND(&done);
10540  return new_elements_var.value();
10541 }
10542 
10543 void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
10544  ElementsKind from_kind,
10545  ElementsKind to_kind,
10546  Label* bailout) {
10547  DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
10548  if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
10549  TrapAllocationMemento(object, bailout);
10550  }
10551 
10552  if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
10553  Comment("Non-simple map transition");
10554  Node* elements = LoadElements(object);
10555 
10556  Label done(this);
10557  GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
10558 
10559  // TODO(ishell): Use OptimalParameterMode().
10560  ParameterMode mode = INTPTR_PARAMETERS;
10561  Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
10562  Node* array_length = SelectImpl(
10563  IsJSArray(object),
10564  [=]() {
10565  CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(object)));
10566  return SmiUntag(LoadFastJSArrayLength(object));
10567  },
10568  [=]() { return elements_length; },
10569  MachineType::PointerRepresentation());
10570 
10571  CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
10572 
10573  GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
10574  elements_length, mode, bailout);
10575  Goto(&done);
10576  BIND(&done);
10577  }
10578 
10579  StoreMap(object, map);
10580 }
10581 
10582 void CodeStubAssembler::TrapAllocationMemento(Node* object,
10583  Label* memento_found) {
10584  Comment("[ TrapAllocationMemento");
10585  Label no_memento_found(this);
10586  Label top_check(this), map_check(this);
10587 
10588  TNode<ExternalReference> new_space_top_address = ExternalConstant(
10589  ExternalReference::new_space_allocation_top_address(isolate()));
10590  const int kMementoMapOffset = JSArray::kSize;
10591  const int kMementoLastWordOffset =
10592  kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
10593 
10594  // Bail out if the object is not in new space.
10595  TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
10596  TNode<IntPtrT> object_page = PageFromAddress(object_word);
10597  {
10598  TNode<IntPtrT> page_flags =
10599  UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
10600  IntPtrConstant(Page::kFlagsOffset)));
10601  GotoIf(WordEqual(WordAnd(page_flags,
10602  IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
10603  IntPtrConstant(0)),
10604  &no_memento_found);
10605  }
10606 
10607  TNode<IntPtrT> memento_last_word = IntPtrAdd(
10608  object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
10609  TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
10610 
10611  TNode<IntPtrT> new_space_top = UncheckedCast<IntPtrT>(
10612  Load(MachineType::Pointer(), new_space_top_address));
10613  TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
10614 
10615  // If the object is in new space, we need to check whether respective
10616  // potential memento object is on the same page as the current top.
10617  GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
10618 
10619  // The object is on a different page than allocation top. Bail out if the
10620  // object sits on the page boundary as no memento can follow and we cannot
10621  // touch the memory following it.
10622  Branch(WordEqual(object_page, memento_last_word_page), &map_check,
10623  &no_memento_found);
10624 
10625  // If top is on the same page as the current object, we need to check whether
10626  // we are below top.
10627  BIND(&top_check);
10628  {
10629  Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
10630  &no_memento_found, &map_check);
10631  }
10632 
10633  // Memento map check.
10634  BIND(&map_check);
10635  {
10636  TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
10637  Branch(WordEqual(memento_map, LoadRoot(RootIndex::kAllocationMementoMap)),
10638  memento_found, &no_memento_found);
10639  }
10640  BIND(&no_memento_found);
10641  Comment("] TrapAllocationMemento");
10642 }
10643 
10644 TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
10645  return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
10646 }
10647 
10648 TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
10649  SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
10650  TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
10651  Node* site = Allocate(size, CodeStubAssembler::kPretenured);
10652  StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
10653  // Should match AllocationSite::Initialize.
10654  TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
10655  IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
10656  StoreObjectFieldNoWriteBarrier(
10657  site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
10658  SmiTag(Signed(field)));
10659 
10660  // Unlike literals, constructed arrays don't have nested sites
10661  TNode<Smi> zero = SmiConstant(0);
10662  StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
10663 
10664  // Pretenuring calculation field.
10665  StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
10666  Int32Constant(0),
10667  MachineRepresentation::kWord32);
10668 
10669  // Pretenuring memento creation count field.
10670  StoreObjectFieldNoWriteBarrier(
10671  site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0),
10672  MachineRepresentation::kWord32);
10673 
10674  // Store an empty fixed array for the code dependency.
10675  StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
10676  RootIndex::kEmptyWeakFixedArray);
10677 
10678  // Link the object to the allocation site list
10679  TNode<ExternalReference> site_list = ExternalConstant(
10680  ExternalReference::allocation_sites_list_address(isolate()));
10681  TNode<Object> next_site = CAST(LoadBufferObject(site_list, 0));
10682 
10683  // TODO(mvstanton): This is a store to a weak pointer, which we may want to
10684  // mark as such in order to skip the write barrier, once we have a unified
10685  // system for weakness. For now we decided to keep it like this because having
10686  // an initial write barrier backed store makes this pointer strong until the
10687  // next GC, and allocation sites are designed to survive several GCs anyway.
10688  StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
10689  StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
10690 
10691  StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
10692  SMI_PARAMETERS);
10693  return CAST(site);
10694 }
10695 
10696 TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
10697  SloppyTNode<FeedbackVector> feedback_vector, Node* slot,
10698  SloppyTNode<HeapObject> value, int additional_offset,
10699  ParameterMode parameter_mode) {
10700  TNode<MaybeObject> weak_value = MakeWeak(value);
10701  StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
10702  UPDATE_WRITE_BARRIER, additional_offset,
10703  parameter_mode);
10704  return weak_value;
10705 }
10706 
10707 TNode<BoolT> CodeStubAssembler::NotHasBoilerplate(
10708  TNode<Object> maybe_literal_site) {
10709  return TaggedIsSmi(maybe_literal_site);
10710 }
10711 
10712 TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
10713  TNode<AllocationSite> allocation_site) {
10714  TNode<Smi> transition_info = CAST(LoadObjectField(
10715  allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10716  return transition_info;
10717 }
10718 
10719 TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
10720  TNode<AllocationSite> allocation_site) {
10721  TNode<JSObject> boilerplate = CAST(LoadObjectField(
10722  allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10723  return boilerplate;
10724 }
10725 
10726 TNode<Int32T> CodeStubAssembler::LoadElementsKind(
10727  TNode<AllocationSite> allocation_site) {
10728  TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
10729  TNode<Int32T> elements_kind =
10730  Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
10731  SmiToInt32(transition_info)));
10732  CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10733  return elements_kind;
10734 }
10735 
10736 Node* CodeStubAssembler::BuildFastLoop(
10737  const CodeStubAssembler::VariableList& vars, Node* start_index,
10738  Node* end_index, const FastLoopBody& body, int increment,
10739  ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
10740  CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
10741  CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
10742  MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
10743  ? MachineType::PointerRepresentation()
10744  : MachineRepresentation::kTaggedSigned;
10745  VARIABLE(var, index_rep, start_index);
10746  VariableList vars_copy(vars.begin(), vars.end(), zone());
10747  vars_copy.push_back(&var);
10748  Label loop(this, vars_copy);
10749  Label after_loop(this);
10750  // Introduce an explicit second check of the termination condition before the
10751  // loop that helps turbofan generate better code. If there's only a single
10752  // check, then the CodeStubAssembler forces it to be at the beginning of the
10753  // loop requiring a backwards branch at the end of the loop (it's not possible
10754  // to force the loop header check at the end of the loop and branch forward to
10755  // it from the pre-header). The extra branch is slower in the case that the
10756  // loop actually iterates.
10757  Node* first_check = WordEqual(var.value(), end_index);
10758  int32_t first_check_val;
10759  if (ToInt32Constant(first_check, first_check_val)) {
10760  if (first_check_val) return var.value();
10761  Goto(&loop);
10762  } else {
10763  Branch(first_check, &after_loop, &loop);
10764  }
10765 
10766  BIND(&loop);
10767  {
10768  if (advance_mode == IndexAdvanceMode::kPre) {
10769  Increment(&var, increment, parameter_mode);
10770  }
10771  body(var.value());
10772  if (advance_mode == IndexAdvanceMode::kPost) {
10773  Increment(&var, increment, parameter_mode);
10774  }
10775  Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
10776  }
10777  BIND(&after_loop);
10778  return var.value();
10779 }
10780 
10781 void CodeStubAssembler::BuildFastFixedArrayForEach(
10782  const CodeStubAssembler::VariableList& vars, Node* fixed_array,
10783  ElementsKind kind, Node* first_element_inclusive,
10784  Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
10785  ParameterMode mode, ForEachDirection direction) {
10786  STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
10787  CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
10788  CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
10789  CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
10790  IsPropertyArray(fixed_array)));
10791  int32_t first_val;
10792  bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
10793  int32_t last_val;
10794  bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
10795  if (constant_first && constent_last) {
10796  int delta = last_val - first_val;
10797  DCHECK_GE(delta, 0);
10798  if (delta <= kElementLoopUnrollThreshold) {
10799  if (direction == ForEachDirection::kForward) {
10800  for (int i = first_val; i < last_val; ++i) {
10801  Node* index = IntPtrConstant(i);
10802  Node* offset =
10803  ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10804  FixedArray::kHeaderSize - kHeapObjectTag);
10805  body(fixed_array, offset);
10806  }
10807  } else {
10808  for (int i = last_val - 1; i >= first_val; --i) {
10809  Node* index = IntPtrConstant(i);
10810  Node* offset =
10811  ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10812  FixedArray::kHeaderSize - kHeapObjectTag);
10813  body(fixed_array, offset);
10814  }
10815  }
10816  return;
10817  }
10818  }
10819 
10820  Node* start =
10821  ElementOffsetFromIndex(first_element_inclusive, kind, mode,
10822  FixedArray::kHeaderSize - kHeapObjectTag);
10823  Node* limit =
10824  ElementOffsetFromIndex(last_element_exclusive, kind, mode,
10825  FixedArray::kHeaderSize - kHeapObjectTag);
10826  if (direction == ForEachDirection::kReverse) std::swap(start, limit);
10827 
10828  int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
10829  BuildFastLoop(
10830  vars, start, limit,
10831  [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
10832  direction == ForEachDirection::kReverse ? -increment : increment,
10833  INTPTR_PARAMETERS,
10834  direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
10835  : IndexAdvanceMode::kPost);
10836 }
10837 
10838 void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
10839  Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
10840  GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
10841  doesnt_fit);
10842 }
10843 
10844 void CodeStubAssembler::InitializeFieldsWithRoot(Node* object,
10845  Node* start_offset,
10846  Node* end_offset,
10847  RootIndex root_index) {
10848  CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
10849  start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
10850  end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
10851  Node* root_value = LoadRoot(root_index);
10852  BuildFastLoop(end_offset, start_offset,
10853  [this, object, root_value](Node* current) {
10854  StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
10855  current, root_value);
10856  },
10857  -kPointerSize, INTPTR_PARAMETERS,
10858  CodeStubAssembler::IndexAdvanceMode::kPre);
10859 }
10860 
10861 void CodeStubAssembler::BranchIfNumberRelationalComparison(
10862  Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
10863  CSA_SLOW_ASSERT(this, IsNumber(left));
10864  CSA_SLOW_ASSERT(this, IsNumber(right));
10865 
10866  Label do_float_comparison(this);
10867  TVARIABLE(Float64T, var_left_float);
10868  TVARIABLE(Float64T, var_right_float);
10869 
10870  Branch(TaggedIsSmi(left),
10871  [&] {
10872  TNode<Smi> smi_left = CAST(left);
10873 
10874  Branch(TaggedIsSmi(right),
10875  [&] {
10876  TNode<Smi> smi_right = CAST(right);
10877 
10878  // Both {left} and {right} are Smi, so just perform a fast
10879  // Smi comparison.
10880  switch (op) {
10881  case Operation::kEqual:
10882  BranchIfSmiEqual(smi_left, smi_right, if_true,
10883  if_false);
10884  break;
10885  case Operation::kLessThan:
10886  BranchIfSmiLessThan(smi_left, smi_right, if_true,
10887  if_false);
10888  break;
10889  case Operation::kLessThanOrEqual:
10890  BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
10891  if_false);
10892  break;
10893  case Operation::kGreaterThan:
10894  BranchIfSmiLessThan(smi_right, smi_left, if_true,
10895  if_false);
10896  break;
10897  case Operation::kGreaterThanOrEqual:
10898  BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
10899  if_false);
10900  break;
10901  default:
10902  UNREACHABLE();
10903  }
10904  },
10905  [&] {
10906  CSA_ASSERT(this, IsHeapNumber(right));
10907  var_left_float = SmiToFloat64(smi_left);
10908  var_right_float = LoadHeapNumberValue(right);
10909  Goto(&do_float_comparison);
10910  });
10911  },
10912  [&] {
10913  CSA_ASSERT(this, IsHeapNumber(left));
10914  var_left_float = LoadHeapNumberValue(left);
10915 
10916  Branch(TaggedIsSmi(right),
10917  [&] {
10918  var_right_float = SmiToFloat64(right);
10919  Goto(&do_float_comparison);
10920  },
10921  [&] {
10922  CSA_ASSERT(this, IsHeapNumber(right));
10923  var_right_float = LoadHeapNumberValue(right);
10924  Goto(&do_float_comparison);
10925  });
10926  });
10927 
10928  BIND(&do_float_comparison);
10929  {
10930  switch (op) {
10931  case Operation::kEqual:
10932  Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
10933  if_true, if_false);
10934  break;
10935  case Operation::kLessThan:
10936  Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
10937  if_true, if_false);
10938  break;
10939  case Operation::kLessThanOrEqual:
10940  Branch(Float64LessThanOrEqual(var_left_float.value(),
10941  var_right_float.value()),
10942  if_true, if_false);
10943  break;
10944  case Operation::kGreaterThan:
10945  Branch(
10946  Float64GreaterThan(var_left_float.value(), var_right_float.value()),
10947  if_true, if_false);
10948  break;
10949  case Operation::kGreaterThanOrEqual:
10950  Branch(Float64GreaterThanOrEqual(var_left_float.value(),
10951  var_right_float.value()),
10952  if_true, if_false);
10953  break;
10954  default:
10955  UNREACHABLE();
10956  }
10957  }
10958 }
10959 
10960 void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
10961  Label* if_true) {
10962  Label if_false(this);
10963  BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
10964  right, if_true, &if_false);
10965  BIND(&if_false);
10966 }
10967 
10968 namespace {
10969 Operation Reverse(Operation op) {
10970  switch (op) {
10971  case Operation::kLessThan:
10972  return Operation::kGreaterThan;
10973  case Operation::kLessThanOrEqual:
10974  return Operation::kGreaterThanOrEqual;
10975  case Operation::kGreaterThan:
10976  return Operation::kLessThan;
10977  case Operation::kGreaterThanOrEqual:
10978  return Operation::kLessThanOrEqual;
10979  default:
10980  break;
10981  }
10982  UNREACHABLE();
10983 }
10984 } // anonymous namespace
10985 
10986 Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
10987  Node* right, Node* context,
10988  Variable* var_type_feedback) {
10989  Label return_true(this), return_false(this), do_float_comparison(this),
10990  end(this);
10991  TVARIABLE(Oddball, var_result); // Actually only "true" or "false".
10992  TVARIABLE(Float64T, var_left_float);
10993  TVARIABLE(Float64T, var_right_float);
10994 
10995  // We might need to loop several times due to ToPrimitive and/or ToNumeric
10996  // conversions.
10997  VARIABLE(var_left, MachineRepresentation::kTagged, left);
10998  VARIABLE(var_right, MachineRepresentation::kTagged, right);
10999  VariableList loop_variable_list({&var_left, &var_right}, zone());
11000  if (var_type_feedback != nullptr) {
11001  // Initialize the type feedback to None. The current feedback is combined
11002  // with the previous feedback.
11003  var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11004  loop_variable_list.push_back(var_type_feedback);
11005  }
11006  Label loop(this, loop_variable_list);
11007  Goto(&loop);
11008  BIND(&loop);
11009  {
11010  left = var_left.value();
11011  right = var_right.value();
11012 
11013  Label if_left_smi(this), if_left_not_smi(this);
11014  Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11015 
11016  BIND(&if_left_smi);
11017  {
11018  TNode<Smi> smi_left = CAST(left);
11019  Label if_right_smi(this), if_right_heapnumber(this),
11020  if_right_bigint(this, Label::kDeferred),
11021  if_right_not_numeric(this, Label::kDeferred);
11022  GotoIf(TaggedIsSmi(right), &if_right_smi);
11023  Node* right_map = LoadMap(right);
11024  GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11025  Node* right_instance_type = LoadMapInstanceType(right_map);
11026  Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11027  &if_right_not_numeric);
11028 
11029  BIND(&if_right_smi);
11030  {
11031  TNode<Smi> smi_right = CAST(right);
11032  CombineFeedback(var_type_feedback,
11033  CompareOperationFeedback::kSignedSmall);
11034  switch (op) {
11035  case Operation::kLessThan:
11036  BranchIfSmiLessThan(smi_left, smi_right, &return_true,
11037  &return_false);
11038  break;
11039  case Operation::kLessThanOrEqual:
11040  BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
11041  &return_false);
11042  break;
11043  case Operation::kGreaterThan:
11044  BranchIfSmiLessThan(smi_right, smi_left, &return_true,
11045  &return_false);
11046  break;
11047  case Operation::kGreaterThanOrEqual:
11048  BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
11049  &return_false);
11050  break;
11051  default:
11052  UNREACHABLE();
11053  }
11054  }
11055 
11056  BIND(&if_right_heapnumber);
11057  {
11058  CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11059  var_left_float = SmiToFloat64(smi_left);
11060  var_right_float = LoadHeapNumberValue(right);
11061  Goto(&do_float_comparison);
11062  }
11063 
11064  BIND(&if_right_bigint);
11065  {
11066  OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11067  var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11068  NoContextConstant(),
11069  SmiConstant(Reverse(op)), right, left));
11070  Goto(&end);
11071  }
11072 
11073  BIND(&if_right_not_numeric);
11074  {
11075  OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11076  // Convert {right} to a Numeric; we don't need to perform the
11077  // dedicated ToPrimitive(right, hint Number) operation, as the
11078  // ToNumeric(right) will by itself already invoke ToPrimitive with
11079  // a Number hint.
11080  var_right.Bind(
11081  CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11082  Goto(&loop);
11083  }
11084  }
11085 
11086  BIND(&if_left_not_smi);
11087  {
11088  Node* left_map = LoadMap(left);
11089 
11090  Label if_right_smi(this), if_right_not_smi(this);
11091  Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11092 
11093  BIND(&if_right_smi);
11094  {
11095  Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11096  if_left_not_numeric(this, Label::kDeferred);
11097  GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11098  Node* left_instance_type = LoadMapInstanceType(left_map);
11099  Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
11100  &if_left_not_numeric);
11101 
11102  BIND(&if_left_heapnumber);
11103  {
11104  CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11105  var_left_float = LoadHeapNumberValue(left);
11106  var_right_float = SmiToFloat64(right);
11107  Goto(&do_float_comparison);
11108  }
11109 
11110  BIND(&if_left_bigint);
11111  {
11112  OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11113  var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11114  NoContextConstant(), SmiConstant(op),
11115  left, right));
11116  Goto(&end);
11117  }
11118 
11119  BIND(&if_left_not_numeric);
11120  {
11121  OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11122  // Convert {left} to a Numeric; we don't need to perform the
11123  // dedicated ToPrimitive(left, hint Number) operation, as the
11124  // ToNumeric(left) will by itself already invoke ToPrimitive with
11125  // a Number hint.
11126  var_left.Bind(
11127  CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11128  Goto(&loop);
11129  }
11130  }
11131 
11132  BIND(&if_right_not_smi);
11133  {
11134  Node* right_map = LoadMap(right);
11135 
11136  Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
11137  if_left_string(this), if_left_other(this, Label::kDeferred);
11138  GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
11139  Node* left_instance_type = LoadMapInstanceType(left_map);
11140  GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
11141  Branch(IsStringInstanceType(left_instance_type), &if_left_string,
11142  &if_left_other);
11143 
11144  BIND(&if_left_heapnumber);
11145  {
11146  Label if_right_heapnumber(this),
11147  if_right_bigint(this, Label::kDeferred),
11148  if_right_not_numeric(this, Label::kDeferred);
11149  GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
11150  Node* right_instance_type = LoadMapInstanceType(right_map);
11151  Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
11152  &if_right_not_numeric);
11153 
11154  BIND(&if_right_heapnumber);
11155  {
11156  CombineFeedback(var_type_feedback,
11157  CompareOperationFeedback::kNumber);
11158  var_left_float = LoadHeapNumberValue(left);
11159  var_right_float = LoadHeapNumberValue(right);
11160  Goto(&do_float_comparison);
11161  }
11162 
11163  BIND(&if_right_bigint);
11164  {
11165  OverwriteFeedback(var_type_feedback,
11166  CompareOperationFeedback::kAny);
11167  var_result = CAST(CallRuntime(
11168  Runtime::kBigIntCompareToNumber, NoContextConstant(),
11169  SmiConstant(Reverse(op)), right, left));
11170  Goto(&end);
11171  }
11172 
11173  BIND(&if_right_not_numeric);
11174  {
11175  OverwriteFeedback(var_type_feedback,
11176  CompareOperationFeedback::kAny);
11177  // Convert {right} to a Numeric; we don't need to perform
11178  // dedicated ToPrimitive(right, hint Number) operation, as the
11179  // ToNumeric(right) will by itself already invoke ToPrimitive with
11180  // a Number hint.
11181  var_right.Bind(
11182  CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11183  Goto(&loop);
11184  }
11185  }
11186 
11187  BIND(&if_left_bigint);
11188  {
11189  Label if_right_heapnumber(this), if_right_bigint(this),
11190  if_right_string(this), if_right_other(this);
11191  GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11192  Node* right_instance_type = LoadMapInstanceType(right_map);
11193  GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11194  Branch(IsStringInstanceType(right_instance_type), &if_right_string,
11195  &if_right_other);
11196 
11197  BIND(&if_right_heapnumber);
11198  {
11199  OverwriteFeedback(var_type_feedback,
11200  CompareOperationFeedback::kAny);
11201  var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
11202  NoContextConstant(), SmiConstant(op),
11203  left, right));
11204  Goto(&end);
11205  }
11206 
11207  BIND(&if_right_bigint);
11208  {
11209  CombineFeedback(var_type_feedback,
11210  CompareOperationFeedback::kBigInt);
11211  var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
11212  NoContextConstant(), SmiConstant(op),
11213  left, right));
11214  Goto(&end);
11215  }
11216 
11217  BIND(&if_right_string);
11218  {
11219  OverwriteFeedback(var_type_feedback,
11220  CompareOperationFeedback::kAny);
11221  var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
11222  NoContextConstant(), SmiConstant(op),
11223  left, right));
11224  Goto(&end);
11225  }
11226 
11227  // {right} is not a Number, BigInt, or String.
11228  BIND(&if_right_other);
11229  {
11230  OverwriteFeedback(var_type_feedback,
11231  CompareOperationFeedback::kAny);
11232  // Convert {right} to a Numeric; we don't need to perform
11233  // dedicated ToPrimitive(right, hint Number) operation, as the
11234  // ToNumeric(right) will by itself already invoke ToPrimitive with
11235  // a Number hint.
11236  var_right.Bind(
11237  CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
11238  Goto(&loop);
11239  }
11240  }
11241 
11242  BIND(&if_left_string);
11243  {
11244  Node* right_instance_type = LoadMapInstanceType(right_map);
11245 
11246  Label if_right_not_string(this, Label::kDeferred);
11247  GotoIfNot(IsStringInstanceType(right_instance_type),
11248  &if_right_not_string);
11249 
11250  // Both {left} and {right} are strings.
11251  CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
11252  Builtins::Name builtin;
11253  switch (op) {
11254  case Operation::kLessThan:
11255  builtin = Builtins::kStringLessThan;
11256  break;
11257  case Operation::kLessThanOrEqual:
11258  builtin = Builtins::kStringLessThanOrEqual;
11259  break;
11260  case Operation::kGreaterThan:
11261  builtin = Builtins::kStringGreaterThan;
11262  break;
11263  case Operation::kGreaterThanOrEqual:
11264  builtin = Builtins::kStringGreaterThanOrEqual;
11265  break;
11266  default:
11267  UNREACHABLE();
11268  }
11269  var_result = CAST(CallBuiltin(builtin, context, left, right));
11270  Goto(&end);
11271 
11272  BIND(&if_right_not_string);
11273  {
11274  OverwriteFeedback(var_type_feedback,
11275  CompareOperationFeedback::kAny);
11276  // {left} is a String, while {right} isn't. Check if {right} is
11277  // a BigInt, otherwise call ToPrimitive(right, hint Number) if
11278  // {right} is a receiver, or ToNumeric(left) and then
11279  // ToNumeric(right) in the other cases.
11280  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11281  Label if_right_bigint(this),
11282  if_right_receiver(this, Label::kDeferred);
11283  GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
11284  GotoIf(IsJSReceiverInstanceType(right_instance_type),
11285  &if_right_receiver);
11286 
11287  var_left.Bind(
11288  CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11289  var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11290  Goto(&loop);
11291 
11292  BIND(&if_right_bigint);
11293  {
11294  var_result = CAST(CallRuntime(
11295  Runtime::kBigIntCompareToString, NoContextConstant(),
11296  SmiConstant(Reverse(op)), right, left));
11297  Goto(&end);
11298  }
11299 
11300  BIND(&if_right_receiver);
11301  {
11302  Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11303  isolate(), ToPrimitiveHint::kNumber);
11304  var_right.Bind(CallStub(callable, context, right));
11305  Goto(&loop);
11306  }
11307  }
11308  }
11309 
11310  BIND(&if_left_other);
11311  {
11312  // {left} is neither a Numeric nor a String, and {right} is not a Smi.
11313  if (var_type_feedback != nullptr) {
11314  // Collect NumberOrOddball feedback if {left} is an Oddball
11315  // and {right} is either a HeapNumber or Oddball. Otherwise collect
11316  // Any feedback.
11317  Label collect_any_feedback(this), collect_oddball_feedback(this),
11318  collect_feedback_done(this);
11319  GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
11320  &collect_any_feedback);
11321 
11322  GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
11323  Node* right_instance_type = LoadMapInstanceType(right_map);
11324  Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
11325  &collect_oddball_feedback, &collect_any_feedback);
11326 
11327  BIND(&collect_oddball_feedback);
11328  {
11329  CombineFeedback(var_type_feedback,
11330  CompareOperationFeedback::kNumberOrOddball);
11331  Goto(&collect_feedback_done);
11332  }
11333 
11334  BIND(&collect_any_feedback);
11335  {
11336  OverwriteFeedback(var_type_feedback,
11337  CompareOperationFeedback::kAny);
11338  Goto(&collect_feedback_done);
11339  }
11340 
11341  BIND(&collect_feedback_done);
11342  }
11343 
11344  // If {left} is a receiver, call ToPrimitive(left, hint Number).
11345  // Otherwise call ToNumeric(right) and then ToNumeric(left), the
11346  // order here is important as it's observable by user code.
11347  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
11348  Label if_left_receiver(this, Label::kDeferred);
11349  GotoIf(IsJSReceiverInstanceType(left_instance_type),
11350  &if_left_receiver);
11351 
11352  var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
11353  var_left.Bind(
11354  CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
11355  Goto(&loop);
11356 
11357  BIND(&if_left_receiver);
11358  {
11359  Callable callable = CodeFactory::NonPrimitiveToPrimitive(
11360  isolate(), ToPrimitiveHint::kNumber);
11361  var_left.Bind(CallStub(callable, context, left));
11362  Goto(&loop);
11363  }
11364  }
11365  }
11366  }
11367  }
11368 
11369  BIND(&do_float_comparison);
11370  {
11371  switch (op) {
11372  case Operation::kLessThan:
11373  Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11374  &return_true, &return_false);
11375  break;
11376  case Operation::kLessThanOrEqual:
11377  Branch(Float64LessThanOrEqual(var_left_float.value(),
11378  var_right_float.value()),
11379  &return_true, &return_false);
11380  break;
11381  case Operation::kGreaterThan:
11382  Branch(
11383  Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11384  &return_true, &return_false);
11385  break;
11386  case Operation::kGreaterThanOrEqual:
11387  Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11388  var_right_float.value()),
11389  &return_true, &return_false);
11390  break;
11391  default:
11392  UNREACHABLE();
11393  }
11394  }
11395 
11396  BIND(&return_true);
11397  {
11398  var_result = TrueConstant();
11399  Goto(&end);
11400  }
11401 
11402  BIND(&return_false);
11403  {
11404  var_result = FalseConstant();
11405  Goto(&end);
11406  }
11407 
11408  BIND(&end);
11409  return var_result.value();
11410 }
11411 
11412 TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
11413  SloppyTNode<Int32T> instance_type) {
11414  TNode<Smi> feedback = SelectSmiConstant(
11415  Word32Equal(
11416  Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
11417  Int32Constant(kInternalizedTag)),
11418  CompareOperationFeedback::kInternalizedString,
11419  CompareOperationFeedback::kString);
11420  return feedback;
11421 }
11422 
11423 void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
11424  Label* if_notequal,
11425  Variable* var_type_feedback) {
11426  // In case of abstract or strict equality checks, we need additional checks
11427  // for NaN values because they are not considered equal, even if both the
11428  // left and the right hand side reference exactly the same value.
11429 
11430  Label if_smi(this), if_heapnumber(this);
11431  GotoIf(TaggedIsSmi(value), &if_smi);
11432 
11433  Node* value_map = LoadMap(value);
11434  GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
11435 
11436  // For non-HeapNumbers, all we do is collect type feedback.
11437  if (var_type_feedback != nullptr) {
11438  Node* instance_type = LoadMapInstanceType(value_map);
11439 
11440  Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
11441  if_bigint(this);
11442  GotoIf(IsStringInstanceType(instance_type), &if_string);
11443  GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
11444  GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
11445  Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
11446 
11447  BIND(&if_string);
11448  {
11449  CSA_ASSERT(this, IsString(value));
11450  CombineFeedback(var_type_feedback,
11451  CollectFeedbackForString(instance_type));
11452  Goto(if_equal);
11453  }
11454 
11455  BIND(&if_symbol);
11456  {
11457  CSA_ASSERT(this, IsSymbol(value));
11458  CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
11459  Goto(if_equal);
11460  }
11461 
11462  BIND(&if_receiver);
11463  {
11464  CSA_ASSERT(this, IsJSReceiver(value));
11465  CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
11466  Goto(if_equal);
11467  }
11468 
11469  BIND(&if_bigint);
11470  {
11471  CSA_ASSERT(this, IsBigInt(value));
11472  CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11473  Goto(if_equal);
11474  }
11475 
11476  BIND(&if_oddball);
11477  {
11478  CSA_ASSERT(this, IsOddball(value));
11479  Label if_boolean(this), if_not_boolean(this);
11480  Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
11481 
11482  BIND(&if_boolean);
11483  {
11484  CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11485  Goto(if_equal);
11486  }
11487 
11488  BIND(&if_not_boolean);
11489  {
11490  CSA_ASSERT(this, IsNullOrUndefined(value));
11491  CombineFeedback(var_type_feedback,
11492  CompareOperationFeedback::kReceiverOrNullOrUndefined);
11493  Goto(if_equal);
11494  }
11495  }
11496  } else {
11497  Goto(if_equal);
11498  }
11499 
11500  BIND(&if_heapnumber);
11501  {
11502  CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11503  Node* number_value = LoadHeapNumberValue(value);
11504  BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
11505  }
11506 
11507  BIND(&if_smi);
11508  {
11509  CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
11510  Goto(if_equal);
11511  }
11512 }
11513 
11514 // ES6 section 7.2.12 Abstract Equality Comparison
11515 Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
11516  Variable* var_type_feedback) {
11517  // This is a slightly optimized version of Object::Equals. Whenever you
11518  // change something functionality wise in here, remember to update the
11519  // Object::Equals method as well.
11520 
11521  Label if_equal(this), if_notequal(this), do_float_comparison(this),
11522  do_right_stringtonumber(this, Label::kDeferred), end(this);
11523  VARIABLE(result, MachineRepresentation::kTagged);
11524  TVARIABLE(Float64T, var_left_float);
11525  TVARIABLE(Float64T, var_right_float);
11526 
11527  // We can avoid code duplication by exploiting the fact that abstract equality
11528  // is symmetric.
11529  Label use_symmetry(this);
11530 
11531  // We might need to loop several times due to ToPrimitive and/or ToNumber
11532  // conversions.
11533  VARIABLE(var_left, MachineRepresentation::kTagged, left);
11534  VARIABLE(var_right, MachineRepresentation::kTagged, right);
11535  VariableList loop_variable_list({&var_left, &var_right}, zone());
11536  if (var_type_feedback != nullptr) {
11537  // Initialize the type feedback to None. The current feedback will be
11538  // combined with the previous feedback.
11539  OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11540  loop_variable_list.push_back(var_type_feedback);
11541  }
11542  Label loop(this, loop_variable_list);
11543  Goto(&loop);
11544  BIND(&loop);
11545  {
11546  left = var_left.value();
11547  right = var_right.value();
11548 
11549  Label if_notsame(this);
11550  GotoIf(WordNotEqual(left, right), &if_notsame);
11551  {
11552  // {left} and {right} reference the exact same value, yet we need special
11553  // treatment for HeapNumber, as NaN is not equal to NaN.
11554  GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
11555  }
11556 
11557  BIND(&if_notsame);
11558  Label if_left_smi(this), if_left_not_smi(this);
11559  Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11560 
11561  BIND(&if_left_smi);
11562  {
11563  Label if_right_smi(this), if_right_not_smi(this);
11564  Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11565 
11566  BIND(&if_right_smi);
11567  {
11568  // We have already checked for {left} and {right} being the same value,
11569  // so when we get here they must be different Smis.
11570  CombineFeedback(var_type_feedback,
11571  CompareOperationFeedback::kSignedSmall);
11572  Goto(&if_notequal);
11573  }
11574 
11575  BIND(&if_right_not_smi);
11576  Node* right_map = LoadMap(right);
11577  Label if_right_heapnumber(this), if_right_boolean(this),
11578  if_right_bigint(this, Label::kDeferred),
11579  if_right_receiver(this, Label::kDeferred);
11580  GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11581  // {left} is Smi and {right} is not HeapNumber or Smi.
11582  if (var_type_feedback != nullptr) {
11583  var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11584  }
11585  GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11586  Node* right_type = LoadMapInstanceType(right_map);
11587  GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11588  GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11589  Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11590  &if_notequal);
11591 
11592  BIND(&if_right_heapnumber);
11593  {
11594  var_left_float = SmiToFloat64(left);
11595  var_right_float = LoadHeapNumberValue(right);
11596  CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11597  Goto(&do_float_comparison);
11598  }
11599 
11600  BIND(&if_right_boolean);
11601  {
11602  var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11603  Goto(&loop);
11604  }
11605 
11606  BIND(&if_right_bigint);
11607  {
11608  result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11609  NoContextConstant(), right, left));
11610  Goto(&end);
11611  }
11612 
11613  BIND(&if_right_receiver);
11614  {
11615  Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11616  var_right.Bind(CallStub(callable, context, right));
11617  Goto(&loop);
11618  }
11619  }
11620 
11621  BIND(&if_left_not_smi);
11622  {
11623  GotoIf(TaggedIsSmi(right), &use_symmetry);
11624 
11625  Label if_left_symbol(this), if_left_number(this), if_left_string(this),
11626  if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
11627  if_left_receiver(this);
11628 
11629  Node* left_map = LoadMap(left);
11630  Node* right_map = LoadMap(right);
11631  Node* left_type = LoadMapInstanceType(left_map);
11632  Node* right_type = LoadMapInstanceType(right_map);
11633 
11634  GotoIf(IsStringInstanceType(left_type), &if_left_string);
11635  GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
11636  GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
11637  GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
11638  Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
11639  &if_left_receiver);
11640 
11641  BIND(&if_left_string);
11642  {
11643  GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
11644  result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
11645  CombineFeedback(var_type_feedback,
11646  SmiOr(CollectFeedbackForString(left_type),
11647  CollectFeedbackForString(right_type)));
11648  Goto(&end);
11649  }
11650 
11651  BIND(&if_left_number);
11652  {
11653  Label if_right_not_number(this);
11654  GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
11655 
11656  var_left_float = LoadHeapNumberValue(left);
11657  var_right_float = LoadHeapNumberValue(right);
11658  CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11659  Goto(&do_float_comparison);
11660 
11661  BIND(&if_right_not_number);
11662  {
11663  Label if_right_boolean(this);
11664  if (var_type_feedback != nullptr) {
11665  var_type_feedback->Bind(
11666  SmiConstant(CompareOperationFeedback::kAny));
11667  }
11668  GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11669  GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11670  GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
11671  Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11672  &if_notequal);
11673 
11674  BIND(&if_right_boolean);
11675  {
11676  var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11677  Goto(&loop);
11678  }
11679  }
11680  }
11681 
11682  BIND(&if_left_bigint);
11683  {
11684  Label if_right_heapnumber(this), if_right_bigint(this),
11685  if_right_string(this), if_right_boolean(this);
11686  GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11687  GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11688  GotoIf(IsStringInstanceType(right_type), &if_right_string);
11689  GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11690  Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11691  &if_notequal);
11692 
11693  BIND(&if_right_heapnumber);
11694  {
11695  if (var_type_feedback != nullptr) {
11696  var_type_feedback->Bind(
11697  SmiConstant(CompareOperationFeedback::kAny));
11698  }
11699  result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
11700  NoContextConstant(), left, right));
11701  Goto(&end);
11702  }
11703 
11704  BIND(&if_right_bigint);
11705  {
11706  CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11707  result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
11708  NoContextConstant(), left, right));
11709  Goto(&end);
11710  }
11711 
11712  BIND(&if_right_string);
11713  {
11714  if (var_type_feedback != nullptr) {
11715  var_type_feedback->Bind(
11716  SmiConstant(CompareOperationFeedback::kAny));
11717  }
11718  result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
11719  NoContextConstant(), left, right));
11720  Goto(&end);
11721  }
11722 
11723  BIND(&if_right_boolean);
11724  {
11725  if (var_type_feedback != nullptr) {
11726  var_type_feedback->Bind(
11727  SmiConstant(CompareOperationFeedback::kAny));
11728  }
11729  var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
11730  Goto(&loop);
11731  }
11732  }
11733 
11734  BIND(&if_left_oddball);
11735  {
11736  Label if_left_boolean(this), if_left_not_boolean(this);
11737  Branch(IsBooleanMap(left_map), &if_left_boolean, &if_left_not_boolean);
11738 
11739  BIND(&if_left_not_boolean);
11740  {
11741  // {left} is either Null or Undefined. Check if {right} is
11742  // undetectable (which includes Null and Undefined).
11743  Label if_right_undetectable(this), if_right_not_undetectable(this);
11744  Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11745  &if_right_not_undetectable);
11746 
11747  BIND(&if_right_undetectable);
11748  {
11749  if (var_type_feedback != nullptr) {
11750  // If {right} is undetectable, it must be either also
11751  // Null or Undefined, or a Receiver (aka document.all).
11752  var_type_feedback->Bind(SmiConstant(
11753  CompareOperationFeedback::kReceiverOrNullOrUndefined));
11754  }
11755  Goto(&if_equal);
11756  }
11757 
11758  BIND(&if_right_not_undetectable);
11759  {
11760  if (var_type_feedback != nullptr) {
11761  // Track whether {right} is Null, Undefined or Receiver.
11762  var_type_feedback->Bind(SmiConstant(
11763  CompareOperationFeedback::kReceiverOrNullOrUndefined));
11764  GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
11765  GotoIfNot(IsBooleanMap(right_map), &if_notequal);
11766  var_type_feedback->Bind(
11767  SmiConstant(CompareOperationFeedback::kAny));
11768  }
11769  Goto(&if_notequal);
11770  }
11771  }
11772 
11773  BIND(&if_left_boolean);
11774  {
11775  if (var_type_feedback != nullptr) {
11776  var_type_feedback->Bind(
11777  SmiConstant(CompareOperationFeedback::kAny));
11778  }
11779 
11780  // If {right} is a Boolean too, it must be a different Boolean.
11781  GotoIf(WordEqual(right_map, left_map), &if_notequal);
11782 
11783  // Otherwise, convert {left} to number and try again.
11784  var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
11785  Goto(&loop);
11786  }
11787  }
11788 
11789  BIND(&if_left_symbol);
11790  {
11791  Label if_right_receiver(this);
11792  GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
11793  // {right} is not a JSReceiver and also not the same Symbol as {left},
11794  // so the result is "not equal".
11795  if (var_type_feedback != nullptr) {
11796  Label if_right_symbol(this);
11797  GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
11798  var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11799  Goto(&if_notequal);
11800 
11801  BIND(&if_right_symbol);
11802  {
11803  CombineFeedback(var_type_feedback,
11804  CompareOperationFeedback::kSymbol);
11805  Goto(&if_notequal);
11806  }
11807  } else {
11808  Goto(&if_notequal);
11809  }
11810 
11811  BIND(&if_right_receiver);
11812  {
11813  // {left} is a Primitive and {right} is a JSReceiver, so swapping
11814  // the order is not observable.
11815  if (var_type_feedback != nullptr) {
11816  var_type_feedback->Bind(
11817  SmiConstant(CompareOperationFeedback::kAny));
11818  }
11819  Goto(&use_symmetry);
11820  }
11821  }
11822 
11823  BIND(&if_left_receiver);
11824  {
11825  CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
11826  Label if_right_receiver(this), if_right_not_receiver(this);
11827  Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11828  &if_right_not_receiver);
11829 
11830  BIND(&if_right_receiver);
11831  {
11832  // {left} and {right} are different JSReceiver references.
11833  CombineFeedback(var_type_feedback,
11834  CompareOperationFeedback::kReceiver);
11835  Goto(&if_notequal);
11836  }
11837 
11838  BIND(&if_right_not_receiver);
11839  {
11840  // Check if {right} is undetectable, which means it must be Null
11841  // or Undefined, since we already ruled out Receiver for {right}.
11842  Label if_right_undetectable(this),
11843  if_right_not_undetectable(this, Label::kDeferred);
11844  Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11845  &if_right_not_undetectable);
11846 
11847  BIND(&if_right_undetectable);
11848  {
11849  // When we get here, {right} must be either Null or Undefined.
11850  CSA_ASSERT(this, IsNullOrUndefined(right));
11851  if (var_type_feedback != nullptr) {
11852  var_type_feedback->Bind(SmiConstant(
11853  CompareOperationFeedback::kReceiverOrNullOrUndefined));
11854  }
11855  Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
11856  }
11857 
11858  BIND(&if_right_not_undetectable);
11859  {
11860  // {right} is a Primitive, and neither Null or Undefined;
11861  // convert {left} to Primitive too.
11862  if (var_type_feedback != nullptr) {
11863  var_type_feedback->Bind(
11864  SmiConstant(CompareOperationFeedback::kAny));
11865  }
11866  Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11867  var_left.Bind(CallStub(callable, context, left));
11868  Goto(&loop);
11869  }
11870  }
11871  }
11872  }
11873 
11874  BIND(&do_right_stringtonumber);
11875  {
11876  var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
11877  Goto(&loop);
11878  }
11879 
11880  BIND(&use_symmetry);
11881  {
11882  var_left.Bind(right);
11883  var_right.Bind(left);
11884  Goto(&loop);
11885  }
11886  }
11887 
11888  BIND(&do_float_comparison);
11889  {
11890  Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11891  &if_equal, &if_notequal);
11892  }
11893 
11894  BIND(&if_equal);
11895  {
11896  result.Bind(TrueConstant());
11897  Goto(&end);
11898  }
11899 
11900  BIND(&if_notequal);
11901  {
11902  result.Bind(FalseConstant());
11903  Goto(&end);
11904  }
11905 
11906  BIND(&end);
11907  return result.value();
11908 }
11909 
11910 Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
11911  Variable* var_type_feedback) {
11912  // Pseudo-code for the algorithm below:
11913  //
11914  // if (lhs == rhs) {
11915  // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
11916  // return true;
11917  // }
11918  // if (!lhs->IsSmi()) {
11919  // if (lhs->IsHeapNumber()) {
11920  // if (rhs->IsSmi()) {
11921  // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
11922  // } else if (rhs->IsHeapNumber()) {
11923  // return HeapNumber::cast(rhs)->value() ==
11924  // HeapNumber::cast(lhs)->value();
11925  // } else {
11926  // return false;
11927  // }
11928  // } else {
11929  // if (rhs->IsSmi()) {
11930  // return false;
11931  // } else {
11932  // if (lhs->IsString()) {
11933  // if (rhs->IsString()) {
11934  // return %StringEqual(lhs, rhs);
11935  // } else {
11936  // return false;
11937  // }
11938  // } else if (lhs->IsBigInt()) {
11939  // if (rhs->IsBigInt()) {
11940  // return %BigIntEqualToBigInt(lhs, rhs);
11941  // } else {
11942  // return false;
11943  // }
11944  // } else {
11945  // return false;
11946  // }
11947  // }
11948  // }
11949  // } else {
11950  // if (rhs->IsSmi()) {
11951  // return false;
11952  // } else {
11953  // if (rhs->IsHeapNumber()) {
11954  // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
11955  // } else {
11956  // return false;
11957  // }
11958  // }
11959  // }
11960 
11961  Label if_equal(this), if_notequal(this), end(this);
11962  VARIABLE(result, MachineRepresentation::kTagged);
11963 
11964  // Check if {lhs} and {rhs} refer to the same object.
11965  Label if_same(this), if_notsame(this);
11966  Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
11967 
11968  BIND(&if_same);
11969  {
11970  // The {lhs} and {rhs} reference the exact same value, yet we need special
11971  // treatment for HeapNumber, as NaN is not equal to NaN.
11972  if (var_type_feedback != nullptr) {
11973  var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11974  }
11975  GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
11976  }
11977 
11978  BIND(&if_notsame);
11979  {
11980  // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
11981  // BigInt and String they can still be considered equal.
11982 
11983  if (var_type_feedback != nullptr) {
11984  var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11985  }
11986 
11987  // Check if {lhs} is a Smi or a HeapObject.
11988  Label if_lhsissmi(this), if_lhsisnotsmi(this);
11989  Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
11990 
11991  BIND(&if_lhsisnotsmi);
11992  {
11993  // Load the map of {lhs}.
11994  Node* lhs_map = LoadMap(lhs);
11995 
11996  // Check if {lhs} is a HeapNumber.
11997  Label if_lhsisnumber(this), if_lhsisnotnumber(this);
11998  Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
11999 
12000  BIND(&if_lhsisnumber);
12001  {
12002  // Check if {rhs} is a Smi or a HeapObject.
12003  Label if_rhsissmi(this), if_rhsisnotsmi(this);
12004  Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12005 
12006  BIND(&if_rhsissmi);
12007  {
12008  // Convert {lhs} and {rhs} to floating point values.
12009  Node* lhs_value = LoadHeapNumberValue(lhs);
12010  Node* rhs_value = SmiToFloat64(rhs);
12011 
12012  if (var_type_feedback != nullptr) {
12013  var_type_feedback->Bind(
12014  SmiConstant(CompareOperationFeedback::kNumber));
12015  }
12016 
12017  // Perform a floating point comparison of {lhs} and {rhs}.
12018  Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12019  }
12020 
12021  BIND(&if_rhsisnotsmi);
12022  {
12023  // Load the map of {rhs}.
12024  Node* rhs_map = LoadMap(rhs);
12025 
12026  // Check if {rhs} is also a HeapNumber.
12027  Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12028  Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12029 
12030  BIND(&if_rhsisnumber);
12031  {
12032  // Convert {lhs} and {rhs} to floating point values.
12033  Node* lhs_value = LoadHeapNumberValue(lhs);
12034  Node* rhs_value = LoadHeapNumberValue(rhs);
12035 
12036  if (var_type_feedback != nullptr) {
12037  var_type_feedback->Bind(
12038  SmiConstant(CompareOperationFeedback::kNumber));
12039  }
12040 
12041  // Perform a floating point comparison of {lhs} and {rhs}.
12042  Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12043  }
12044 
12045  BIND(&if_rhsisnotnumber);
12046  Goto(&if_notequal);
12047  }
12048  }
12049 
12050  BIND(&if_lhsisnotnumber);
12051  {
12052  // Check if {rhs} is a Smi or a HeapObject.
12053  Label if_rhsissmi(this), if_rhsisnotsmi(this);
12054  Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12055 
12056  BIND(&if_rhsissmi);
12057  Goto(&if_notequal);
12058 
12059  BIND(&if_rhsisnotsmi);
12060  {
12061  // Load the instance type of {lhs}.
12062  Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
12063 
12064  // Check if {lhs} is a String.
12065  Label if_lhsisstring(this), if_lhsisnotstring(this);
12066  Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
12067  &if_lhsisnotstring);
12068 
12069  BIND(&if_lhsisstring);
12070  {
12071  // Load the instance type of {rhs}.
12072  Node* rhs_instance_type = LoadInstanceType(rhs);
12073 
12074  // Check if {rhs} is also a String.
12075  Label if_rhsisstring(this, Label::kDeferred),
12076  if_rhsisnotstring(this);
12077  Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
12078  &if_rhsisnotstring);
12079 
12080  BIND(&if_rhsisstring);
12081  {
12082  if (var_type_feedback != nullptr) {
12083  TNode<Smi> lhs_feedback =
12084  CollectFeedbackForString(lhs_instance_type);
12085  TNode<Smi> rhs_feedback =
12086  CollectFeedbackForString(rhs_instance_type);
12087  var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
12088  }
12089  result.Bind(CallBuiltin(Builtins::kStringEqual,
12090  NoContextConstant(), lhs, rhs));
12091  Goto(&end);
12092  }
12093 
12094  BIND(&if_rhsisnotstring);
12095  Goto(&if_notequal);
12096  }
12097 
12098  BIND(&if_lhsisnotstring);
12099 
12100  // Check if {lhs} is a BigInt.
12101  Label if_lhsisbigint(this), if_lhsisnotbigint(this);
12102  Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12103  &if_lhsisnotbigint);
12104 
12105  BIND(&if_lhsisbigint);
12106  {
12107  // Load the instance type of {rhs}.
12108  Node* rhs_instance_type = LoadInstanceType(rhs);
12109 
12110  // Check if {rhs} is also a BigInt.
12111  Label if_rhsisbigint(this, Label::kDeferred),
12112  if_rhsisnotbigint(this);
12113  Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
12114  &if_rhsisnotbigint);
12115 
12116  BIND(&if_rhsisbigint);
12117  {
12118  if (var_type_feedback != nullptr) {
12119  var_type_feedback->Bind(
12120  SmiConstant(CompareOperationFeedback::kBigInt));
12121  }
12122  result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
12123  NoContextConstant(), lhs, rhs));
12124  Goto(&end);
12125  }
12126 
12127  BIND(&if_rhsisnotbigint);
12128  Goto(&if_notequal);
12129  }
12130 
12131  BIND(&if_lhsisnotbigint);
12132  if (var_type_feedback != nullptr) {
12133  // Load the instance type of {rhs}.
12134  Node* rhs_map = LoadMap(rhs);
12135  Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
12136 
12137  Label if_lhsissymbol(this), if_lhsisreceiver(this),
12138  if_lhsisoddball(this);
12139  GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
12140  &if_lhsisreceiver);
12141  GotoIf(IsBooleanMap(lhs_map), &if_notequal);
12142  GotoIf(IsOddballInstanceType(lhs_instance_type), &if_lhsisoddball);
12143  Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
12144  &if_notequal);
12145 
12146  BIND(&if_lhsisreceiver);
12147  {
12148  GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12149  var_type_feedback->Bind(
12150  SmiConstant(CompareOperationFeedback::kReceiver));
12151  GotoIf(IsJSReceiverInstanceType(rhs_instance_type), &if_notequal);
12152  var_type_feedback->Bind(SmiConstant(
12153  CompareOperationFeedback::kReceiverOrNullOrUndefined));
12154  GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
12155  var_type_feedback->Bind(
12156  SmiConstant(CompareOperationFeedback::kAny));
12157  Goto(&if_notequal);
12158  }
12159 
12160  BIND(&if_lhsisoddball);
12161  {
12162  STATIC_ASSERT(LAST_PRIMITIVE_TYPE == ODDBALL_TYPE);
12163  GotoIf(IsBooleanMap(rhs_map), &if_notequal);
12164  GotoIf(
12165  Int32LessThan(rhs_instance_type, Int32Constant(ODDBALL_TYPE)),
12166  &if_notequal);
12167  var_type_feedback->Bind(SmiConstant(
12168  CompareOperationFeedback::kReceiverOrNullOrUndefined));
12169  Goto(&if_notequal);
12170  }
12171 
12172  BIND(&if_lhsissymbol);
12173  {
12174  GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
12175  var_type_feedback->Bind(
12176  SmiConstant(CompareOperationFeedback::kSymbol));
12177  Goto(&if_notequal);
12178  }
12179  } else {
12180  Goto(&if_notequal);
12181  }
12182  }
12183  }
12184  }
12185 
12186  BIND(&if_lhsissmi);
12187  {
12188  // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
12189  // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
12190  // HeapNumber with an equal floating point value.
12191 
12192  // Check if {rhs} is a Smi or a HeapObject.
12193  Label if_rhsissmi(this), if_rhsisnotsmi(this);
12194  Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
12195 
12196  BIND(&if_rhsissmi);
12197  if (var_type_feedback != nullptr) {
12198  var_type_feedback->Bind(
12199  SmiConstant(CompareOperationFeedback::kSignedSmall));
12200  }
12201  Goto(&if_notequal);
12202 
12203  BIND(&if_rhsisnotsmi);
12204  {
12205  // Load the map of the {rhs}.
12206  Node* rhs_map = LoadMap(rhs);
12207 
12208  // The {rhs} could be a HeapNumber with the same value as {lhs}.
12209  Label if_rhsisnumber(this), if_rhsisnotnumber(this);
12210  Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
12211 
12212  BIND(&if_rhsisnumber);
12213  {
12214  // Convert {lhs} and {rhs} to floating point values.
12215  Node* lhs_value = SmiToFloat64(lhs);
12216  Node* rhs_value = LoadHeapNumberValue(rhs);
12217 
12218  if (var_type_feedback != nullptr) {
12219  var_type_feedback->Bind(
12220  SmiConstant(CompareOperationFeedback::kNumber));
12221  }
12222 
12223  // Perform a floating point comparison of {lhs} and {rhs}.
12224  Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12225  }
12226 
12227  BIND(&if_rhsisnotnumber);
12228  Goto(&if_notequal);
12229  }
12230  }
12231  }
12232 
12233  BIND(&if_equal);
12234  {
12235  result.Bind(TrueConstant());
12236  Goto(&end);
12237  }
12238 
12239  BIND(&if_notequal);
12240  {
12241  result.Bind(FalseConstant());
12242  Goto(&end);
12243  }
12244 
12245  BIND(&end);
12246  return result.value();
12247 }
12248 
12249 // ECMA#sec-samevalue
12250 // This algorithm differs from the Strict Equality Comparison Algorithm in its
12251 // treatment of signed zeroes and NaNs.
12252 void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
12253  Label* if_false) {
12254  VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
12255  VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
12256  Label do_fcmp(this);
12257 
12258  // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
12259  // StrictEqual - SameValue considers two NaNs to be equal.
12260  GotoIf(WordEqual(lhs, rhs), if_true);
12261 
12262  // Check if the {lhs} is a Smi.
12263  Label if_lhsissmi(this), if_lhsisheapobject(this);
12264  Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
12265 
12266  BIND(&if_lhsissmi);
12267  {
12268  // Since {lhs} is a Smi, the comparison can only yield true
12269  // iff the {rhs} is a HeapNumber with the same float64 value.
12270  Branch(TaggedIsSmi(rhs), if_false, [&] {
12271  GotoIfNot(IsHeapNumber(rhs), if_false);
12272  var_lhs_value.Bind(SmiToFloat64(lhs));
12273  var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12274  Goto(&do_fcmp);
12275  });
12276  }
12277 
12278  BIND(&if_lhsisheapobject);
12279  {
12280  // Check if the {rhs} is a Smi.
12281  Branch(TaggedIsSmi(rhs),
12282  [&] {
12283  // Since {rhs} is a Smi, the comparison can only yield true
12284  // iff the {lhs} is a HeapNumber with the same float64 value.
12285  GotoIfNot(IsHeapNumber(lhs), if_false);
12286  var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12287  var_rhs_value.Bind(SmiToFloat64(rhs));
12288  Goto(&do_fcmp);
12289  },
12290  [&] {
12291  // Now this can only yield true if either both {lhs} and {rhs} are
12292  // HeapNumbers with the same value, or both are Strings with the
12293  // same character sequence, or both are BigInts with the same
12294  // value.
12295  Label if_lhsisheapnumber(this), if_lhsisstring(this),
12296  if_lhsisbigint(this);
12297  Node* const lhs_map = LoadMap(lhs);
12298  GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
12299  Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
12300  GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
12301  Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
12302  if_false);
12303 
12304  BIND(&if_lhsisheapnumber);
12305  {
12306  GotoIfNot(IsHeapNumber(rhs), if_false);
12307  var_lhs_value.Bind(LoadHeapNumberValue(lhs));
12308  var_rhs_value.Bind(LoadHeapNumberValue(rhs));
12309  Goto(&do_fcmp);
12310  }
12311 
12312  BIND(&if_lhsisstring);
12313  {
12314  // Now we can only yield true if {rhs} is also a String
12315  // with the same sequence of characters.
12316  GotoIfNot(IsString(rhs), if_false);
12317  Node* const result = CallBuiltin(Builtins::kStringEqual,
12318  NoContextConstant(), lhs, rhs);
12319  Branch(IsTrue(result), if_true, if_false);
12320  }
12321 
12322  BIND(&if_lhsisbigint);
12323  {
12324  GotoIfNot(IsBigInt(rhs), if_false);
12325  Node* const result = CallRuntime(Runtime::kBigIntEqualToBigInt,
12326  NoContextConstant(), lhs, rhs);
12327  Branch(IsTrue(result), if_true, if_false);
12328  }
12329  });
12330  }
12331 
12332  BIND(&do_fcmp);
12333  {
12334  Node* const lhs_value = var_lhs_value.value();
12335  Node* const rhs_value = var_rhs_value.value();
12336 
12337  Label if_equal(this), if_notequal(this);
12338  Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12339 
12340  BIND(&if_equal);
12341  {
12342  // We still need to handle the case when {lhs} and {rhs} are -0.0 and
12343  // 0.0 (or vice versa). Compare the high word to
12344  // distinguish between the two.
12345  Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
12346  Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
12347 
12348  // If x is +0 and y is -0, return false.
12349  // If x is -0 and y is +0, return false.
12350  Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
12351  }
12352 
12353  BIND(&if_notequal);
12354  {
12355  // Return true iff both {rhs} and {lhs} are NaN.
12356  GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
12357  Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
12358  }
12359  }
12360 }
12361 
12362 TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<Context> context,
12363  SloppyTNode<Object> object,
12364  SloppyTNode<Object> key,
12365  HasPropertyLookupMode mode) {
12366  Label call_runtime(this, Label::kDeferred), return_true(this),
12367  return_false(this), end(this), if_proxy(this, Label::kDeferred);
12368 
12369  CodeStubAssembler::LookupInHolder lookup_property_in_holder =
12370  [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
12371  Node* holder_instance_type, Node* unique_name,
12372  Label* next_holder, Label* if_bailout) {
12373  TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
12374  &return_true, next_holder, if_bailout);
12375  };
12376 
12377  CodeStubAssembler::LookupInHolder lookup_element_in_holder =
12378  [this, &return_true, &return_false](
12379  Node* receiver, Node* holder, Node* holder_map,
12380  Node* holder_instance_type, Node* index, Label* next_holder,
12381  Label* if_bailout) {
12382  TryLookupElement(holder, holder_map, holder_instance_type, index,
12383  &return_true, &return_false, next_holder, if_bailout);
12384  };
12385 
12386  TryPrototypeChainLookup(object, key, lookup_property_in_holder,
12387  lookup_element_in_holder, &return_false,
12388  &call_runtime, &if_proxy);
12389 
12390  TVARIABLE(Oddball, result);
12391 
12392  BIND(&if_proxy);
12393  {
12394  TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
12395  switch (mode) {
12396  case kHasProperty:
12397  GotoIf(IsPrivateSymbol(name), &return_false);
12398 
12399  result = CAST(
12400  CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
12401  Goto(&end);
12402  break;
12403  case kForInHasProperty:
12404  Goto(&call_runtime);
12405  break;
12406  }
12407  }
12408 
12409  BIND(&return_true);
12410  {
12411  result = TrueConstant();
12412  Goto(&end);
12413  }
12414 
12415  BIND(&return_false);
12416  {
12417  result = FalseConstant();
12418  Goto(&end);
12419  }
12420 
12421  BIND(&call_runtime);
12422  {
12423  Runtime::FunctionId fallback_runtime_function_id;
12424  switch (mode) {
12425  case kHasProperty:
12426  fallback_runtime_function_id = Runtime::kHasProperty;
12427  break;
12428  case kForInHasProperty:
12429  fallback_runtime_function_id = Runtime::kForInHasProperty;
12430  break;
12431  }
12432 
12433  result =
12434  CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
12435  Goto(&end);
12436  }
12437 
12438  BIND(&end);
12439  CSA_ASSERT(this, IsBoolean(result.value()));
12440  return result.value();
12441 }
12442 
12443 Node* CodeStubAssembler::Typeof(Node* value) {
12444  VARIABLE(result_var, MachineRepresentation::kTagged);
12445 
12446  Label return_number(this, Label::kDeferred), if_oddball(this),
12447  return_function(this), return_undefined(this), return_object(this),
12448  return_string(this), return_bigint(this), return_result(this);
12449 
12450  GotoIf(TaggedIsSmi(value), &return_number);
12451 
12452  Node* map = LoadMap(value);
12453 
12454  GotoIf(IsHeapNumberMap(map), &return_number);
12455 
12456  Node* instance_type = LoadMapInstanceType(map);
12457 
12458  GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
12459 
12460  Node* callable_or_undetectable_mask = Word32And(
12461  LoadMapBitField(map),
12462  Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
12463 
12464  GotoIf(Word32Equal(callable_or_undetectable_mask,
12465  Int32Constant(Map::IsCallableBit::kMask)),
12466  &return_function);
12467 
12468  GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
12469  &return_undefined);
12470 
12471  GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
12472 
12473  GotoIf(IsStringInstanceType(instance_type), &return_string);
12474 
12475  GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
12476 
12477  CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
12478  result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
12479  Goto(&return_result);
12480 
12481  BIND(&return_number);
12482  {
12483  result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
12484  Goto(&return_result);
12485  }
12486 
12487  BIND(&if_oddball);
12488  {
12489  Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
12490  result_var.Bind(type);
12491  Goto(&return_result);
12492  }
12493 
12494  BIND(&return_function);
12495  {
12496  result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
12497  Goto(&return_result);
12498  }
12499 
12500  BIND(&return_undefined);
12501  {
12502  result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
12503  Goto(&return_result);
12504  }
12505 
12506  BIND(&return_object);
12507  {
12508  result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
12509  Goto(&return_result);
12510  }
12511 
12512  BIND(&return_string);
12513  {
12514  result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
12515  Goto(&return_result);
12516  }
12517 
12518  BIND(&return_bigint);
12519  {
12520  result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
12521  Goto(&return_result);
12522  }
12523 
12524  BIND(&return_result);
12525  return result_var.value();
12526 }
12527 
12528 TNode<Object> CodeStubAssembler::GetSuperConstructor(
12529  SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
12530  Label is_not_constructor(this, Label::kDeferred), out(this);
12531  TVARIABLE(Object, result);
12532 
12533  TNode<Map> map = LoadMap(active_function);
12534  TNode<Object> prototype = LoadMapPrototype(map);
12535  TNode<Map> prototype_map = LoadMap(CAST(prototype));
12536  GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
12537 
12538  result = prototype;
12539  Goto(&out);
12540 
12541  BIND(&is_not_constructor);
12542  {
12543  CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
12544  active_function);
12545  Unreachable();
12546  }
12547 
12548  BIND(&out);
12549  return result.value();
12550 }
12551 
12552 TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
12553  SloppyTNode<Context> context, SloppyTNode<Object> object,
12554  SloppyTNode<JSReceiver> default_constructor) {
12555  Isolate* isolate = this->isolate();
12556  TVARIABLE(JSReceiver, var_result, default_constructor);
12557 
12558  // 2. Let C be ? Get(O, "constructor").
12559  TNode<Object> constructor =
12560  GetProperty(context, object, isolate->factory()->constructor_string());
12561 
12562  // 3. If C is undefined, return defaultConstructor.
12563  Label out(this);
12564  GotoIf(IsUndefined(constructor), &out);
12565 
12566  // 4. If Type(C) is not Object, throw a TypeError exception.
12567  ThrowIfNotJSReceiver(context, constructor,
12568  MessageTemplate::kConstructorNotReceiver);
12569 
12570  // 5. Let S be ? Get(C, @@species).
12571  TNode<Object> species =
12572  GetProperty(context, constructor, isolate->factory()->species_symbol());
12573 
12574  // 6. If S is either undefined or null, return defaultConstructor.
12575  GotoIf(IsNullOrUndefined(species), &out);
12576 
12577  // 7. If IsConstructor(S) is true, return S.
12578  Label throw_error(this);
12579  GotoIf(TaggedIsSmi(species), &throw_error);
12580  GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
12581  var_result = CAST(species);
12582  Goto(&out);
12583 
12584  // 8. Throw a TypeError exception.
12585  BIND(&throw_error);
12586  ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
12587 
12588  BIND(&out);
12589  return var_result.value();
12590 }
12591 
12592 Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
12593  Node* context) {
12594  VARIABLE(var_result, MachineRepresentation::kTagged);
12595  Label if_notcallable(this, Label::kDeferred),
12596  if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
12597  if_nohandler(this, Label::kDeferred), return_true(this),
12598  return_false(this), return_result(this, &var_result);
12599 
12600  // Ensure that the {callable} is actually a JSReceiver.
12601  GotoIf(TaggedIsSmi(callable), &if_notreceiver);
12602  GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
12603 
12604  // Load the @@hasInstance property from {callable}.
12605  Node* inst_of_handler =
12606  GetProperty(context, callable, HasInstanceSymbolConstant());
12607 
12608  // Optimize for the likely case where {inst_of_handler} is the builtin
12609  // Function.prototype[@@hasInstance] method, and emit a direct call in
12610  // that case without any additional checking.
12611  Node* native_context = LoadNativeContext(context);
12612  Node* function_has_instance =
12613  LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
12614  GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
12615  &if_otherhandler);
12616  {
12617  // Call to Function.prototype[@@hasInstance] directly.
12618  Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
12619  CallTrampolineDescriptor{});
12620  Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
12621  var_result.Bind(result);
12622  Goto(&return_result);
12623  }
12624 
12625  BIND(&if_otherhandler);
12626  {
12627  // Check if there's actually an {inst_of_handler}.
12628  GotoIf(IsNull(inst_of_handler), &if_nohandler);
12629  GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
12630 
12631  // Call the {inst_of_handler} for {callable} and {object}.
12632  Node* result = CallJS(
12633  CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
12634  context, inst_of_handler, callable, object);
12635 
12636  // Convert the {result} to a Boolean.
12637  BranchIfToBooleanIsTrue(result, &return_true, &return_false);
12638  }
12639 
12640  BIND(&if_nohandler);
12641  {
12642  // Ensure that the {callable} is actually Callable.
12643  GotoIfNot(IsCallable(callable), &if_notcallable);
12644 
12645  // Use the OrdinaryHasInstance algorithm.
12646  Node* result =
12647  CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
12648  var_result.Bind(result);
12649  Goto(&return_result);
12650  }
12651 
12652  BIND(&if_notcallable);
12653  { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
12654 
12655  BIND(&if_notreceiver);
12656  { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
12657 
12658  BIND(&return_true);
12659  var_result.Bind(TrueConstant());
12660  Goto(&return_result);
12661 
12662  BIND(&return_false);
12663  var_result.Bind(FalseConstant());
12664  Goto(&return_result);
12665 
12666  BIND(&return_result);
12667  return var_result.value();
12668 }
12669 
12670 TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
12671  TVARIABLE(Number, var_result);
12672  TVARIABLE(Float64T, var_finc_value);
12673  Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
12674  Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12675 
12676  BIND(&if_issmi);
12677  {
12678  Label if_overflow(this);
12679  TNode<Smi> smi_value = CAST(value);
12680  TNode<Smi> one = SmiConstant(1);
12681  var_result = TrySmiAdd(smi_value, one, &if_overflow);
12682  Goto(&end);
12683 
12684  BIND(&if_overflow);
12685  {
12686  var_finc_value = SmiToFloat64(smi_value);
12687  Goto(&do_finc);
12688  }
12689  }
12690 
12691  BIND(&if_isnotsmi);
12692  {
12693  TNode<HeapNumber> heap_number_value = CAST(value);
12694 
12695  // Load the HeapNumber value.
12696  var_finc_value = LoadHeapNumberValue(heap_number_value);
12697  Goto(&do_finc);
12698  }
12699 
12700  BIND(&do_finc);
12701  {
12702  TNode<Float64T> finc_value = var_finc_value.value();
12703  TNode<Float64T> one = Float64Constant(1.0);
12704  TNode<Float64T> finc_result = Float64Add(finc_value, one);
12705  var_result = AllocateHeapNumberWithValue(finc_result);
12706  Goto(&end);
12707  }
12708 
12709  BIND(&end);
12710  return var_result.value();
12711 }
12712 
12713 TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
12714  TVARIABLE(Number, var_result);
12715  TVARIABLE(Float64T, var_fdec_value);
12716  Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
12717  Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12718 
12719  BIND(&if_issmi);
12720  {
12721  TNode<Smi> smi_value = CAST(value);
12722  TNode<Smi> one = SmiConstant(1);
12723  Label if_overflow(this);
12724  var_result = TrySmiSub(smi_value, one, &if_overflow);
12725  Goto(&end);
12726 
12727  BIND(&if_overflow);
12728  {
12729  var_fdec_value = SmiToFloat64(smi_value);
12730  Goto(&do_fdec);
12731  }
12732  }
12733 
12734  BIND(&if_isnotsmi);
12735  {
12736  TNode<HeapNumber> heap_number_value = CAST(value);
12737 
12738  // Load the HeapNumber value.
12739  var_fdec_value = LoadHeapNumberValue(heap_number_value);
12740  Goto(&do_fdec);
12741  }
12742 
12743  BIND(&do_fdec);
12744  {
12745  TNode<Float64T> fdec_value = var_fdec_value.value();
12746  TNode<Float64T> minus_one = Float64Constant(-1.0);
12747  TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
12748  var_result = AllocateHeapNumberWithValue(fdec_result);
12749  Goto(&end);
12750  }
12751 
12752  BIND(&end);
12753  return var_result.value();
12754 }
12755 
12756 TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
12757  SloppyTNode<Number> b) {
12758  TVARIABLE(Number, var_result);
12759  Label float_add(this, Label::kDeferred), end(this);
12760  GotoIf(TaggedIsNotSmi(a), &float_add);
12761  GotoIf(TaggedIsNotSmi(b), &float_add);
12762 
12763  // Try fast Smi addition first.
12764  var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
12765  Goto(&end);
12766 
12767  BIND(&float_add);
12768  {
12769  var_result = ChangeFloat64ToTagged(
12770  Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12771  Goto(&end);
12772  }
12773 
12774  BIND(&end);
12775  return var_result.value();
12776 }
12777 
12778 TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
12779  SloppyTNode<Number> b) {
12780  TVARIABLE(Number, var_result);
12781  Label float_sub(this, Label::kDeferred), end(this);
12782  GotoIf(TaggedIsNotSmi(a), &float_sub);
12783  GotoIf(TaggedIsNotSmi(b), &float_sub);
12784 
12785  // Try fast Smi subtraction first.
12786  var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
12787  Goto(&end);
12788 
12789  BIND(&float_sub);
12790  {
12791  var_result = ChangeFloat64ToTagged(
12792  Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12793  Goto(&end);
12794  }
12795 
12796  BIND(&end);
12797  return var_result.value();
12798 }
12799 
12800 void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
12801  Label is_number(this);
12802  GotoIf(TaggedIsSmi(input), &is_number);
12803  Branch(IsHeapNumber(input), &is_number, is_not_number);
12804  BIND(&is_number);
12805 }
12806 
12807 void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
12808  GotoIf(TaggedIsSmi(input), is_number);
12809  GotoIf(IsHeapNumber(input), is_number);
12810 }
12811 
12812 TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
12813  Operation bitwise_op) {
12814  switch (bitwise_op) {
12815  case Operation::kBitwiseAnd:
12816  return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
12817  case Operation::kBitwiseOr:
12818  return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
12819  case Operation::kBitwiseXor:
12820  return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
12821  case Operation::kShiftLeft:
12822  if (!Word32ShiftIsSafe()) {
12823  right32 = Word32And(right32, Int32Constant(0x1F));
12824  }
12825  return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
12826  case Operation::kShiftRight:
12827  if (!Word32ShiftIsSafe()) {
12828  right32 = Word32And(right32, Int32Constant(0x1F));
12829  }
12830  return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
12831  case Operation::kShiftRightLogical:
12832  if (!Word32ShiftIsSafe()) {
12833  right32 = Word32And(right32, Int32Constant(0x1F));
12834  }
12835  return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
12836  default:
12837  break;
12838  }
12839  UNREACHABLE();
12840 }
12841 
12842 // ES #sec-createarrayiterator
12843 TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
12844  TNode<Context> context, TNode<Object> object, IterationKind kind) {
12845  TNode<Context> native_context = LoadNativeContext(context);
12846  TNode<Map> iterator_map = CAST(LoadContextElement(
12847  native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
12848  Node* iterator = Allocate(JSArrayIterator::kSize);
12849  StoreMapNoWriteBarrier(iterator, iterator_map);
12850  StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
12851  RootIndex::kEmptyFixedArray);
12852  StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
12853  RootIndex::kEmptyFixedArray);
12854  StoreObjectFieldNoWriteBarrier(
12855  iterator, JSArrayIterator::kIteratedObjectOffset, object);
12856  StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
12857  SmiConstant(0));
12858  StoreObjectFieldNoWriteBarrier(
12859  iterator, JSArrayIterator::kKindOffset,
12860  SmiConstant(Smi::FromInt(static_cast<int>(kind))));
12861  return CAST(iterator);
12862 }
12863 
12864 Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
12865  Node* done) {
12866  CSA_ASSERT(this, IsBoolean(done));
12867  Node* native_context = LoadNativeContext(context);
12868  Node* map =
12869  LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
12870  Node* result = Allocate(JSIteratorResult::kSize);
12871  StoreMapNoWriteBarrier(result, map);
12872  StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
12873  RootIndex::kEmptyFixedArray);
12874  StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
12875  RootIndex::kEmptyFixedArray);
12876  StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
12877  StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
12878  return result;
12879 }
12880 
12881 Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
12882  Node* key,
12883  Node* value) {
12884  Node* native_context = LoadNativeContext(context);
12885  Node* length = SmiConstant(2);
12886  int const elements_size = FixedArray::SizeFor(2);
12887  TNode<FixedArray> elements = UncheckedCast<FixedArray>(
12888  Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
12889  StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
12890  RootIndex::kFixedArrayMap);
12891  StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
12892  StoreFixedArrayElement(elements, 0, key);
12893  StoreFixedArrayElement(elements, 1, value);
12894  Node* array_map = LoadContextElement(
12895  native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
12896  TNode<HeapObject> array = InnerAllocate(elements, elements_size);
12897  StoreMapNoWriteBarrier(array, array_map);
12898  StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
12899  RootIndex::kEmptyFixedArray);
12900  StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
12901  StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
12902  Node* iterator_map =
12903  LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
12904  TNode<HeapObject> result = InnerAllocate(array, JSArray::kSize);
12905  StoreMapNoWriteBarrier(result, iterator_map);
12906  StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
12907  RootIndex::kEmptyFixedArray);
12908  StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
12909  RootIndex::kEmptyFixedArray);
12910  StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
12911  StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
12912  RootIndex::kFalseValue);
12913  return result;
12914 }
12915 
12916 TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
12917  TNode<Object> o,
12918  TNode<Number> len) {
12919  TNode<JSReceiver> constructor =
12920  CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
12921  return Construct(context, constructor, len);
12922 }
12923 
12924 TNode<JSReceiver> CodeStubAssembler::InternalArrayCreate(TNode<Context> context,
12925  TNode<Number> len) {
12926  TNode<Context> native_context = LoadNativeContext(context);
12927  TNode<JSReceiver> constructor = CAST(LoadContextElement(
12928  native_context, Context::INTERNAL_ARRAY_FUNCTION_INDEX));
12929  return Construct(context, constructor, len);
12930 }
12931 
12932 Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
12933  CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
12934  TNode<Uint32T> buffer_bit_field = LoadJSArrayBufferBitField(CAST(buffer));
12935  return IsSetWord32<JSArrayBuffer::WasNeuteredBit>(buffer_bit_field);
12936 }
12937 
12938 void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
12939  SloppyTNode<Context> context, TNode<JSArrayBuffer> array_buffer,
12940  const char* method_name) {
12941  Label if_detached(this, Label::kDeferred), if_not_detached(this);
12942  Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
12943  BIND(&if_detached);
12944  ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
12945  BIND(&if_not_detached);
12946 }
12947 
12948 void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
12949  SloppyTNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
12950  const char* method_name) {
12951  TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
12952  ThrowIfArrayBufferIsDetached(context, buffer, method_name);
12953 }
12954 
12955 TNode<Uint32T> CodeStubAssembler::LoadJSArrayBufferBitField(
12956  TNode<JSArrayBuffer> array_buffer) {
12957  return LoadObjectField<Uint32T>(array_buffer, JSArrayBuffer::kBitFieldOffset);
12958 }
12959 
12960 TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStore(
12961  TNode<JSArrayBuffer> array_buffer) {
12962  return LoadObjectField<RawPtrT>(array_buffer,
12963  JSArrayBuffer::kBackingStoreOffset);
12964 }
12965 
12966 TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
12967  TNode<JSArrayBufferView> array_buffer_view) {
12968  return LoadObjectField<JSArrayBuffer>(array_buffer_view,
12969  JSArrayBufferView::kBufferOffset);
12970 }
12971 
12972 TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
12973  TNode<JSArrayBufferView> array_buffer_view) {
12974  return LoadObjectField<UintPtrT>(array_buffer_view,
12975  JSArrayBufferView::kByteLengthOffset);
12976 }
12977 
12978 TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
12979  TNode<JSArrayBufferView> array_buffer_view) {
12980  return LoadObjectField<UintPtrT>(array_buffer_view,
12981  JSArrayBufferView::kByteOffsetOffset);
12982 }
12983 
12984 TNode<Smi> CodeStubAssembler::LoadJSTypedArrayLength(
12985  TNode<JSTypedArray> typed_array) {
12986  return LoadObjectField<Smi>(typed_array, JSTypedArray::kLengthOffset);
12987 }
12988 
12989 CodeStubArguments::CodeStubArguments(
12990  CodeStubAssembler* assembler, Node* argc, Node* fp,
12991  CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
12992  : assembler_(assembler),
12993  argc_mode_(param_mode),
12994  receiver_mode_(receiver_mode),
12995  argc_(argc),
12996  arguments_(),
12997  fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
12998  Node* offset = assembler_->ElementOffsetFromIndex(
12999  argc_, PACKED_ELEMENTS, param_mode,
13000  (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
13001  arguments_ = assembler_->UncheckedCast<RawPtr<Object>>(
13002  assembler_->IntPtrAdd(fp_, offset));
13003 }
13004 
13005 TNode<Object> CodeStubArguments::GetReceiver() const {
13006  DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13007  return assembler_->UncheckedCast<Object>(
13008  assembler_->Load(MachineType::AnyTagged(), arguments_,
13009  assembler_->IntPtrConstant(kPointerSize)));
13010 }
13011 
13012 void CodeStubArguments::SetReceiver(TNode<Object> object) const {
13013  DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
13014  assembler_->StoreNoWriteBarrier(MachineRepresentation::kTagged, arguments_,
13015  assembler_->IntPtrConstant(kPointerSize),
13016  object);
13017 }
13018 
13019 TNode<RawPtr<Object>> CodeStubArguments::AtIndexPtr(
13020  Node* index, CodeStubAssembler::ParameterMode mode) const {
13021  typedef compiler::Node Node;
13022  Node* negated_index = assembler_->IntPtrOrSmiSub(
13023  assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
13024  Node* offset = assembler_->ElementOffsetFromIndex(negated_index,
13025  PACKED_ELEMENTS, mode, 0);
13026  return assembler_->UncheckedCast<RawPtr<Object>>(assembler_->IntPtrAdd(
13027  assembler_->UncheckedCast<IntPtrT>(arguments_), offset));
13028 }
13029 
13030 TNode<Object> CodeStubArguments::AtIndex(
13031  Node* index, CodeStubAssembler::ParameterMode mode) const {
13032  DCHECK_EQ(argc_mode_, mode);
13033  CSA_ASSERT(assembler_,
13034  assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
13035  return assembler_->UncheckedCast<Object>(
13036  assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode)));
13037 }
13038 
13039 TNode<Object> CodeStubArguments::AtIndex(int index) const {
13040  return AtIndex(assembler_->IntPtrConstant(index));
13041 }
13042 
13043 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13044  int index, TNode<Object> default_value) {
13045  CodeStubAssembler::TVariable<Object> result(assembler_);
13046  CodeStubAssembler::Label argument_missing(assembler_),
13047  argument_done(assembler_, &result);
13048 
13049  assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
13050  assembler_->IntPtrOrSmiConstant(index, argc_mode_),
13051  argc_, argc_mode_),
13052  &argument_missing);
13053  result = AtIndex(index);
13054  assembler_->Goto(&argument_done);
13055 
13056  assembler_->BIND(&argument_missing);
13057  result = default_value;
13058  assembler_->Goto(&argument_done);
13059 
13060  assembler_->BIND(&argument_done);
13061  return result.value();
13062 }
13063 
13064 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
13065  TNode<IntPtrT> index, TNode<Object> default_value) {
13066  CodeStubAssembler::TVariable<Object> result(assembler_);
13067  CodeStubAssembler::Label argument_missing(assembler_),
13068  argument_done(assembler_, &result);
13069 
13070  assembler_->GotoIf(
13071  assembler_->UintPtrOrSmiGreaterThanOrEqual(
13072  assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
13073  &argument_missing);
13074  result = AtIndex(index);
13075  assembler_->Goto(&argument_done);
13076 
13077  assembler_->BIND(&argument_missing);
13078  result = default_value;
13079  assembler_->Goto(&argument_done);
13080 
13081  assembler_->BIND(&argument_done);
13082  return result.value();
13083 }
13084 
13085 void CodeStubArguments::ForEach(
13086  const CodeStubAssembler::VariableList& vars,
13087  const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
13088  CodeStubAssembler::ParameterMode mode) {
13089  assembler_->Comment("CodeStubArguments::ForEach");
13090  if (first == nullptr) {
13091  first = assembler_->IntPtrOrSmiConstant(0, mode);
13092  }
13093  if (last == nullptr) {
13094  DCHECK_EQ(mode, argc_mode_);
13095  last = argc_;
13096  }
13097  Node* start = assembler_->IntPtrSub(
13098  assembler_->UncheckedCast<IntPtrT>(arguments_),
13099  assembler_->ElementOffsetFromIndex(first, PACKED_ELEMENTS, mode));
13100  Node* end = assembler_->IntPtrSub(
13101  assembler_->UncheckedCast<IntPtrT>(arguments_),
13102  assembler_->ElementOffsetFromIndex(last, PACKED_ELEMENTS, mode));
13103  assembler_->BuildFastLoop(vars, start, end,
13104  [this, &body](Node* current) {
13105  Node* arg = assembler_->Load(
13106  MachineType::AnyTagged(), current);
13107  body(arg);
13108  },
13109  -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
13110  CodeStubAssembler::IndexAdvanceMode::kPost);
13111 }
13112 
13113 void CodeStubArguments::PopAndReturn(Node* value) {
13114  Node* pop_count;
13115  if (receiver_mode_ == ReceiverMode::kHasReceiver) {
13116  pop_count = assembler_->IntPtrOrSmiAdd(
13117  argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
13118  } else {
13119  pop_count = argc_;
13120  }
13121 
13122  assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
13123  value);
13124 }
13125 
13126 Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
13127  STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13128  return Uint32LessThanOrEqual(elements_kind,
13129  Int32Constant(LAST_FAST_ELEMENTS_KIND));
13130 }
13131 
13132 TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
13133  TNode<Int32T> elements_kind) {
13134  STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13135  STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
13136  STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
13137  return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
13138  Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
13139 }
13140 
13141 Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
13142  STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
13143  STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13144  STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
13145  return Uint32LessThanOrEqual(elements_kind,
13146  Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
13147 }
13148 
13149 Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
13150  return Uint32LessThanOrEqual(elements_kind,
13151  Int32Constant(HOLEY_SMI_ELEMENTS));
13152 }
13153 
13154 Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
13155  CSA_ASSERT(this, IsFastElementsKind(elements_kind));
13156 
13157  STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
13158  STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
13159  STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
13160  return IsSetWord32(elements_kind, 1);
13161 }
13162 
13163 Node* CodeStubAssembler::IsElementsKindGreaterThan(
13164  Node* target_kind, ElementsKind reference_kind) {
13165  return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
13166 }
13167 
13168 TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
13169  TNode<Int32T> target_kind, ElementsKind reference_kind) {
13170  return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
13171 }
13172 
13173 Node* CodeStubAssembler::IsDebugActive() {
13174  Node* is_debug_active = Load(
13175  MachineType::Uint8(),
13176  ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
13177  return Word32NotEqual(is_debug_active, Int32Constant(0));
13178 }
13179 
13180 TNode<BoolT> CodeStubAssembler::IsRuntimeCallStatsEnabled() {
13181  TNode<Word32T> flag_value = UncheckedCast<Word32T>(Load(
13182  MachineType::Int32(),
13183  ExternalConstant(ExternalReference::address_of_runtime_stats_flag())));
13184  return Word32NotEqual(flag_value, Int32Constant(0));
13185 }
13186 
13187 Node* CodeStubAssembler::IsPromiseHookEnabled() {
13188  Node* const promise_hook = Load(
13189  MachineType::Pointer(),
13190  ExternalConstant(ExternalReference::promise_hook_address(isolate())));
13191  return WordNotEqual(promise_hook, IntPtrConstant(0));
13192 }
13193 
13194 Node* CodeStubAssembler::HasAsyncEventDelegate() {
13195  Node* const async_event_delegate =
13196  Load(MachineType::Pointer(),
13197  ExternalConstant(
13198  ExternalReference::async_event_delegate_address(isolate())));
13199  return WordNotEqual(async_event_delegate, IntPtrConstant(0));
13200 }
13201 
13202 Node* CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
13203  Node* const promise_hook_or_async_event_delegate =
13204  Load(MachineType::Uint8(),
13205  ExternalConstant(
13206  ExternalReference::promise_hook_or_async_event_delegate_address(
13207  isolate())));
13208  return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
13209 }
13210 
13211 Node* CodeStubAssembler::
13212  IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate() {
13213  Node* const promise_hook_or_debug_is_active_or_async_event_delegate = Load(
13214  MachineType::Uint8(),
13215  ExternalConstant(
13216  ExternalReference::
13217  promise_hook_or_debug_is_active_or_async_event_delegate_address(
13218  isolate())));
13219  return Word32NotEqual(promise_hook_or_debug_is_active_or_async_event_delegate,
13220  Int32Constant(0));
13221 }
13222 
13223 TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
13224  CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
13225  CSA_ASSERT(this,
13226  SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
13227 
13228  int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
13229  int index_shift = kPointerSizeLog2 - kSmiShiftBits;
13230  TNode<WordT> table_index =
13231  index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
13232  : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
13233 
13234  return CAST(
13235  Load(MachineType::TaggedPointer(),
13236  ExternalConstant(ExternalReference::builtins_address(isolate())),
13237  table_index));
13238 }
13239 
13240 TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
13241  SloppyTNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
13242  TNode<Object> sfi_data =
13243  LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
13244 
13245  TVARIABLE(Code, sfi_code);
13246 
13247  Label done(this);
13248  Label check_instance_type(this);
13249 
13250  // IsSmi: Is builtin
13251  GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
13252  if (if_compile_lazy) {
13253  GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
13254  if_compile_lazy);
13255  }
13256  sfi_code = LoadBuiltin(CAST(sfi_data));
13257  Goto(&done);
13258 
13259  // Switch on data's instance type.
13260  BIND(&check_instance_type);
13261  TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
13262 
13263  int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
13264  WASM_EXPORTED_FUNCTION_DATA_TYPE,
13265  ASM_WASM_DATA_TYPE,
13266  UNCOMPILED_DATA_WITHOUT_PRE_PARSED_SCOPE_TYPE,
13267  UNCOMPILED_DATA_WITH_PRE_PARSED_SCOPE_TYPE,
13268  FUNCTION_TEMPLATE_INFO_TYPE};
13269  Label check_is_bytecode_array(this);
13270  Label check_is_exported_function_data(this);
13271  Label check_is_asm_wasm_data(this);
13272  Label check_is_uncompiled_data_without_pre_parsed_scope(this);
13273  Label check_is_uncompiled_data_with_pre_parsed_scope(this);
13274  Label check_is_function_template_info(this);
13275  Label check_is_interpreter_data(this);
13276  Label* case_labels[] = {&check_is_bytecode_array,
13277  &check_is_exported_function_data,
13278  &check_is_asm_wasm_data,
13279  &check_is_uncompiled_data_without_pre_parsed_scope,
13280  &check_is_uncompiled_data_with_pre_parsed_scope,
13281  &check_is_function_template_info};
13282  STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
13283  Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
13284  arraysize(case_labels));
13285 
13286  // IsBytecodeArray: Interpret bytecode
13287  BIND(&check_is_bytecode_array);
13288  sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
13289  Goto(&done);
13290 
13291  // IsWasmExportedFunctionData: Use the wrapper code
13292  BIND(&check_is_exported_function_data);
13293  sfi_code = CAST(LoadObjectField(
13294  CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
13295  Goto(&done);
13296 
13297  // IsAsmWasmData: Instantiate using AsmWasmData
13298  BIND(&check_is_asm_wasm_data);
13299  sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
13300  Goto(&done);
13301 
13302  // IsUncompiledDataWithPreParsedScope | IsUncompiledDataWithoutPreParsedScope:
13303  // Compile lazy
13304  BIND(&check_is_uncompiled_data_with_pre_parsed_scope);
13305  Goto(&check_is_uncompiled_data_without_pre_parsed_scope);
13306  BIND(&check_is_uncompiled_data_without_pre_parsed_scope);
13307  sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
13308  Goto(if_compile_lazy ? if_compile_lazy : &done);
13309 
13310  // IsFunctionTemplateInfo: API call
13311  BIND(&check_is_function_template_info);
13312  sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
13313  Goto(&done);
13314 
13315  // IsInterpreterData: Interpret bytecode
13316  BIND(&check_is_interpreter_data);
13317  // This is the default branch, so assert that we have the expected data type.
13318  CSA_ASSERT(this,
13319  Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
13320  sfi_code = CAST(LoadObjectField(
13321  CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
13322  Goto(&done);
13323 
13324  BIND(&done);
13325  return sfi_code.value();
13326 }
13327 
13328 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
13329  Node* shared_info,
13330  Node* context) {
13331  CSA_SLOW_ASSERT(this, IsMap(map));
13332 
13333  Node* const code = GetSharedFunctionInfoCode(shared_info);
13334 
13335  // TODO(ishell): All the callers of this function pass map loaded from
13336  // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
13337  // map parameter.
13338  CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
13339  CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
13340  Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
13341  STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
13342  StoreMapNoWriteBarrier(fun, map);
13343  StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
13344  RootIndex::kEmptyFixedArray);
13345  StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
13346  RootIndex::kEmptyFixedArray);
13347  StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
13348  RootIndex::kManyClosuresCell);
13349  StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
13350  shared_info);
13351  StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
13352  StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
13353  return fun;
13354 }
13355 
13356 Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
13357  StackFrame::Type frame_type) {
13358  return WordEqual(marker_or_function,
13359  IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13360 }
13361 
13362 Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
13363  StackFrame::Type frame_type) {
13364  return WordNotEqual(marker_or_function,
13365  IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
13366 }
13367 
13368 void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
13369  Node* receiver_map,
13370  Label* if_fast,
13371  Label* if_slow) {
13372  VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
13373  VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
13374 
13375  Label loop(this, {&var_object, &var_object_map}), done_loop(this);
13376  Goto(&loop);
13377  BIND(&loop);
13378  {
13379  // Check that there are no elements on the current {object}.
13380  Label if_no_elements(this);
13381  Node* object = var_object.value();
13382  Node* object_map = var_object_map.value();
13383 
13384  // The following relies on the elements only aliasing with JSProxy::target,
13385  // which is a Javascript value and hence cannot be confused with an elements
13386  // backing store.
13387  STATIC_ASSERT(JSObject::kElementsOffset == JSProxy::kTargetOffset);
13388  Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
13389  GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
13390  GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
13391 
13392  // It might still be an empty JSArray.
13393  GotoIfNot(IsJSArrayMap(object_map), if_slow);
13394  Node* object_length = LoadJSArrayLength(object);
13395  Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
13396 
13397  // Continue with the {object}s prototype.
13398  BIND(&if_no_elements);
13399  object = LoadMapPrototype(object_map);
13400  GotoIf(IsNull(object), if_fast);
13401 
13402  // For all {object}s but the {receiver}, check that the cache is empty.
13403  var_object.Bind(object);
13404  object_map = LoadMap(object);
13405  var_object_map.Bind(object_map);
13406  Node* object_enum_length = LoadMapEnumLength(object_map);
13407  Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
13408  }
13409 }
13410 
13411 Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
13412  Label* if_runtime) {
13413  Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
13414  Node* receiver_map = LoadMap(receiver);
13415 
13416  // Check if the enum length field of the {receiver} is properly initialized,
13417  // indicating that there is an enum cache.
13418  Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
13419  Branch(WordEqual(receiver_enum_length,
13420  IntPtrConstant(kInvalidEnumCacheSentinel)),
13421  &if_no_cache, &if_cache);
13422 
13423  BIND(&if_no_cache);
13424  {
13425  // Avoid runtime-call for empty dictionary receivers.
13426  GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
13427  TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
13428  TNode<Smi> length = GetNumberOfElements(properties);
13429  GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
13430  // Check that there are no elements on the {receiver} and its prototype
13431  // chain. Given that we do not create an EnumCache for dict-mode objects,
13432  // directly jump to {if_empty} if there are no elements and no properties
13433  // on the {receiver}.
13434  CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
13435  }
13436 
13437  // Check that there are no elements on the fast {receiver} and its
13438  // prototype chain.
13439  BIND(&if_cache);
13440  CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
13441 
13442  BIND(&if_fast);
13443  return receiver_map;
13444 }
13445 
13446 TNode<IntPtrT> CodeStubAssembler::GetArgumentsLength(CodeStubArguments* args) {
13447  return args->GetLength();
13448 }
13449 
13450 TNode<Object> CodeStubAssembler::GetArgumentValue(CodeStubArguments* args,
13451  TNode<IntPtrT> index) {
13452  return args->GetOptionalArgumentValue(index);
13453 }
13454 
13455 void CodeStubAssembler::Print(const char* s) {
13456  std::string formatted(s);
13457  formatted += "\n";
13458  CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13459  StringConstant(formatted.c_str()));
13460 }
13461 
13462 void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
13463  if (prefix != nullptr) {
13464  std::string formatted(prefix);
13465  formatted += ": ";
13466  Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
13467  formatted.c_str(), TENURED);
13468  CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13469  HeapConstant(string));
13470  }
13471  CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
13472 }
13473 
13474 void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
13475  Label ok(this), stack_check_interrupt(this, Label::kDeferred);
13476 
13477  // The instruction sequence below is carefully crafted to hit our pattern
13478  // matcher for stack checks within instruction selection.
13479  // See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
13480 
13481  TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
13482  TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
13483  MachineType::Pointer(),
13484  ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
13485  TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
13486 
13487  Branch(sp_within_limit, &ok, &stack_check_interrupt);
13488 
13489  BIND(&stack_check_interrupt);
13490  CallRuntime(Runtime::kStackGuard, context);
13491  Goto(&ok);
13492 
13493  BIND(&ok);
13494 }
13495 
13496 void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
13497  Node* context, int slots) {
13498  DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
13499  StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
13500  StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
13501  SmiConstant(slots));
13502 
13503  Node* const empty_scope_info =
13504  LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
13505  StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
13506  empty_scope_info);
13507  StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
13508  UndefinedConstant());
13509  StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
13510  TheHoleConstant());
13511  StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
13512  native_context);
13513 }
13514 
13515 TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
13516  TNode<Number> length) {
13517  TVARIABLE(JSArray, array);
13518  Label allocate_js_array(this);
13519 
13520  Label done(this), next(this), runtime(this, Label::kDeferred);
13521  TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
13522  CSA_ASSERT_BRANCH(this, [=](Label* ok, Label* not_ok) {
13523  BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13524  SmiConstant(0), ok, not_ok);
13525  });
13526  // This check also transitively covers the case where length is too big
13527  // to be representable by a SMI and so is not usable with
13528  // AllocateJSArray.
13529  BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13530  limit, &runtime, &next);
13531 
13532  BIND(&runtime);
13533  {
13534  TNode<Context> native_context = LoadNativeContext(context);
13535  TNode<JSFunction> array_function =
13536  CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
13537  array = CAST(CallRuntime(Runtime::kNewArray, context, array_function,
13538  length, array_function, UndefinedConstant()));
13539  Goto(&done);
13540  }
13541 
13542  BIND(&next);
13543  CSA_ASSERT(this, TaggedIsSmi(length));
13544 
13545  TNode<Map> array_map = CAST(LoadContextElement(
13546  context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
13547 
13548  // TODO(delphick): Consider using
13549  // AllocateUninitializedJSArrayWithElements to avoid initializing an
13550  // array and then writing over it.
13551  array =
13552  AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length, SmiConstant(0),
13553  nullptr, ParameterMode::SMI_PARAMETERS);
13554  Goto(&done);
13555 
13556  BIND(&done);
13557  return array.value();
13558 }
13559 
13560 void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
13561  TNode<Object> array,
13562  TNode<Number> length) {
13563  Label fast(this), runtime(this), done(this);
13564  // There's no need to set the length, if
13565  // 1) the array is a fast JS array and
13566  // 2) the new length is equal to the old length.
13567  // as the set is not observable. Otherwise fall back to the run-time.
13568 
13569  // 1) Check that the array has fast elements.
13570  // TODO(delphick): Consider changing this since it does an an unnecessary
13571  // check for SMIs.
13572  // TODO(delphick): Also we could hoist this to after the array construction
13573  // and copy the args into array in the same way as the Array constructor.
13574  BranchIfFastJSArray(array, context, &fast, &runtime);
13575 
13576  BIND(&fast);
13577  {
13578  TNode<JSArray> fast_array = CAST(array);
13579 
13580  TNode<Smi> length_smi = CAST(length);
13581  TNode<Smi> old_length = LoadFastJSArrayLength(fast_array);
13582  CSA_ASSERT(this, TaggedIsPositiveSmi(old_length));
13583 
13584  // 2) If the created array's length matches the required length, then
13585  // there's nothing else to do. Otherwise use the runtime to set the
13586  // property as that will insert holes into excess elements or shrink
13587  // the backing store as appropriate.
13588  Branch(SmiNotEqual(length_smi, old_length), &runtime, &done);
13589  }
13590 
13591  BIND(&runtime);
13592  {
13593  SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
13594  length);
13595  Goto(&done);
13596  }
13597 
13598  BIND(&done);
13599 }
13600 
13601 void CodeStubAssembler::GotoIfInitialPrototypePropertyModified(
13602  TNode<Map> object_map, TNode<Map> initial_prototype_map, int descriptor,
13603  RootIndex field_name_root_index, Label* if_modified) {
13604  DescriptorIndexAndName index_name{descriptor, field_name_root_index};
13605  GotoIfInitialPrototypePropertiesModified(
13606  object_map, initial_prototype_map,
13607  Vector<DescriptorIndexAndName>(&index_name, 1), if_modified);
13608 }
13609 
13610 void CodeStubAssembler::GotoIfInitialPrototypePropertiesModified(
13611  TNode<Map> object_map, TNode<Map> initial_prototype_map,
13612  Vector<DescriptorIndexAndName> properties, Label* if_modified) {
13613  TNode<Map> prototype_map = LoadMap(LoadMapPrototype(object_map));
13614  GotoIfNot(WordEqual(prototype_map, initial_prototype_map), if_modified);
13615 
13616  if (FLAG_track_constant_fields) {
13617  // With constant field tracking, we need to make sure that important
13618  // properties in the prototype has not been tampered with. We do this by
13619  // checking that their slots in the prototype's descriptor array are still
13620  // marked as const.
13621  TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
13622 
13623  TNode<Uint32T> combined_details;
13624  for (int i = 0; i < properties.length(); i++) {
13625  // Assert the descriptor index is in-bounds.
13626  int descriptor = properties[i].descriptor_index;
13627  CSA_ASSERT(this, Int32LessThan(Int32Constant(descriptor),
13628  LoadNumberOfDescriptors(descriptors)));
13629  // Assert that the name is correct. This essentially checks that
13630  // the descriptor index corresponds to the insertion order in
13631  // the bootstrapper.
13632  CSA_ASSERT(this,
13633  WordEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13634  LoadRoot(properties[i].name_root_index)));
13635 
13636  TNode<Uint32T> details =
13637  DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13638  if (i == 0) {
13639  combined_details = details;
13640  } else {
13641  combined_details = Unsigned(Word32And(combined_details, details));
13642  }
13643  }
13644 
13645  TNode<Uint32T> constness =
13646  DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
13647 
13648  GotoIfNot(
13649  Word32Equal(constness,
13650  Int32Constant(static_cast<int>(PropertyConstness::kConst))),
13651  if_modified);
13652  }
13653 }
13654 
13655 } // namespace internal
13656 } // namespace v8
Definition: libplatform.h:13