V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Pages
builtins-internal-gen.cc
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/api.h"
6 #include "src/builtins/builtins-utils-gen.h"
7 #include "src/builtins/builtins.h"
8 #include "src/code-stub-assembler.h"
9 #include "src/heap/heap-inl.h" // crbug.com/v8/8499
10 #include "src/ic/accessor-assembler.h"
11 #include "src/ic/keyed-store-generic.h"
12 #include "src/macro-assembler.h"
13 #include "src/objects/debug-objects.h"
14 #include "src/objects/shared-function-info.h"
15 #include "src/runtime/runtime.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 template <typename T>
21 using TNode = compiler::TNode<T>;
22 
23 // -----------------------------------------------------------------------------
24 // Interrupt and stack checks.
25 
26 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
27  masm->TailCallRuntime(Runtime::kInterrupt);
28 }
29 
30 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
31  masm->TailCallRuntime(Runtime::kStackGuard);
32 }
33 
34 // -----------------------------------------------------------------------------
35 // TurboFan support builtins.
36 
37 TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
38  Node* object = Parameter(Descriptor::kObject);
39 
40  // Load the {object}s elements.
41  Node* source = LoadObjectField(object, JSObject::kElementsOffset);
42  Node* target = CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
43  StoreObjectField(object, JSObject::kElementsOffset, target);
44  Return(target);
45 }
46 
47 TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
48  Node* object = Parameter(Descriptor::kObject);
49  Node* key = Parameter(Descriptor::kKey);
50  Node* context = Parameter(Descriptor::kContext);
51 
52  Label runtime(this, Label::kDeferred);
53  Node* elements = LoadElements(object);
54  elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
55  key, &runtime);
56  Return(elements);
57 
58  BIND(&runtime);
59  TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
60 }
61 
62 TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
63  Node* object = Parameter(Descriptor::kObject);
64  Node* key = Parameter(Descriptor::kKey);
65  Node* context = Parameter(Descriptor::kContext);
66 
67  Label runtime(this, Label::kDeferred);
68  Node* elements = LoadElements(object);
69  elements =
70  TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
71  Return(elements);
72 
73  BIND(&runtime);
74  TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
75 }
76 
77 TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
78  Node* frame = Parameter(Descriptor::kFrame);
79  TNode<IntPtrT> length = SmiToIntPtr(Parameter(Descriptor::kLength));
80  TNode<IntPtrT> mapped_count =
81  SmiToIntPtr(Parameter(Descriptor::kMappedCount));
82 
83  // Check if we can allocate in new space.
84  ElementsKind kind = PACKED_ELEMENTS;
85  int max_elements = FixedArray::GetMaxLengthForNewSpaceAllocation(kind);
86  Label if_newspace(this), if_oldspace(this, Label::kDeferred);
87  Branch(IntPtrLessThan(length, IntPtrConstant(max_elements)), &if_newspace,
88  &if_oldspace);
89 
90  BIND(&if_newspace);
91  {
92  // Prefer EmptyFixedArray in case of non-positive {length} (the {length}
93  // can be negative here for rest parameters).
94  Label if_empty(this), if_notempty(this);
95  Branch(IntPtrLessThanOrEqual(length, IntPtrConstant(0)), &if_empty,
96  &if_notempty);
97 
98  BIND(&if_empty);
99  Return(EmptyFixedArrayConstant());
100 
101  BIND(&if_notempty);
102  {
103  // Allocate a FixedArray in new space.
104  TNode<FixedArray> result = CAST(AllocateFixedArray(kind, length));
105 
106  // The elements might be used to back mapped arguments. In that case fill
107  // the mapped elements (i.e. the first {mapped_count}) with the hole, but
108  // make sure not to overshoot the {length} if some arguments are missing.
109  TNode<IntPtrT> number_of_holes = IntPtrMin(mapped_count, length);
110  Node* the_hole = TheHoleConstant();
111 
112  // Fill the first elements up to {number_of_holes} with the hole.
113  TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
114  Label loop1(this, &var_index), done_loop1(this);
115  Goto(&loop1);
116  BIND(&loop1);
117  {
118  // Load the current {index}.
119  TNode<IntPtrT> index = var_index.value();
120 
121  // Check if we are done.
122  GotoIf(WordEqual(index, number_of_holes), &done_loop1);
123 
124  // Store the hole into the {result}.
125  StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
126 
127  // Continue with next {index}.
128  var_index = IntPtrAdd(index, IntPtrConstant(1));
129  Goto(&loop1);
130  }
131  BIND(&done_loop1);
132 
133  // Compute the effective {offset} into the {frame}.
134  TNode<IntPtrT> offset = IntPtrAdd(length, IntPtrConstant(1));
135 
136  // Copy the parameters from {frame} (starting at {offset}) to {result}.
137  Label loop2(this, &var_index), done_loop2(this);
138  Goto(&loop2);
139  BIND(&loop2);
140  {
141  // Load the current {index}.
142  TNode<IntPtrT> index = var_index.value();
143 
144  // Check if we are done.
145  GotoIf(WordEqual(index, length), &done_loop2);
146 
147  // Load the parameter at the given {index}.
148  TNode<Object> value =
149  CAST(Load(MachineType::AnyTagged(), frame,
150  TimesPointerSize(IntPtrSub(offset, index))));
151 
152  // Store the {value} into the {result}.
153  StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
154 
155  // Continue with next {index}.
156  var_index = IntPtrAdd(index, IntPtrConstant(1));
157  Goto(&loop2);
158  }
159  BIND(&done_loop2);
160 
161  Return(result);
162  }
163  }
164 
165  BIND(&if_oldspace);
166  {
167  // Allocate in old space (or large object space).
168  TailCallRuntime(Runtime::kNewArgumentsElements, NoContextConstant(),
169  BitcastWordToTagged(frame), SmiFromIntPtr(length),
170  SmiFromIntPtr(mapped_count));
171  }
172 }
173 
174 TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
175  Return(Parameter(Descriptor::kReceiver));
176 }
177 
178 TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
179  Label tailcall_to_shared(this);
180  TNode<Context> context = CAST(Parameter(Descriptor::kContext));
181  TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
182  TNode<Int32T> arg_count =
183  UncheckedCast<Int32T>(Parameter(Descriptor::kJSActualArgumentsCount));
184  TNode<JSFunction> function = CAST(Parameter(Descriptor::kJSTarget));
185 
186  // Check break-at-entry flag on the debug info.
187  TNode<SharedFunctionInfo> shared =
188  CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
189  TNode<Object> maybe_heap_object_or_smi =
190  LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
191  TNode<HeapObject> maybe_debug_info =
192  TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
193  GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
194  &tailcall_to_shared);
195 
196  {
197  TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
198  TNode<Smi> flags =
199  CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
200  GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
201  &tailcall_to_shared);
202 
203  CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
204  Goto(&tailcall_to_shared);
205  }
206 
207  BIND(&tailcall_to_shared);
208  // Tail call into code object on the SharedFunctionInfo.
209  TNode<Code> code = GetSharedFunctionInfoCode(shared);
210  TailCallJSCode(code, context, function, new_target, arg_count);
211 }
212 
214  public:
216  : CodeStubAssembler(state) {}
217 
218  Node* IsMarking() {
219  Node* is_marking_addr = ExternalConstant(
220  ExternalReference::heap_is_marking_flag_address(this->isolate()));
221  return Load(MachineType::Uint8(), is_marking_addr);
222  }
223 
224  Node* IsPageFlagSet(Node* object, int mask) {
225  Node* page = WordAnd(object, IntPtrConstant(~kPageAlignmentMask));
226  Node* flags = Load(MachineType::Pointer(), page,
227  IntPtrConstant(MemoryChunk::kFlagsOffset));
228  return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
229  IntPtrConstant(0));
230  }
231 
232  Node* IsWhite(Node* object) {
233  DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
234  Node* cell;
235  Node* mask;
236  GetMarkBit(object, &cell, &mask);
237  mask = TruncateIntPtrToInt32(mask);
238  // Non-white has 1 for the first bit, so we only need to check for the first
239  // bit.
240  return Word32Equal(Word32And(Load(MachineType::Int32(), cell), mask),
241  Int32Constant(0));
242  }
243 
244  void GetMarkBit(Node* object, Node** cell, Node** mask) {
245  Node* page = WordAnd(object, IntPtrConstant(~kPageAlignmentMask));
246  Node* bitmap = Load(MachineType::Pointer(), page,
247  IntPtrConstant(MemoryChunk::kMarkBitmapOffset));
248 
249  {
250  // Temp variable to calculate cell offset in bitmap.
251  Node* r0;
252  int shift = Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 -
253  Bitmap::kBytesPerCellLog2;
254  r0 = WordShr(object, IntPtrConstant(shift));
255  r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
256  ~(Bitmap::kBytesPerCell - 1)));
257  *cell = IntPtrAdd(bitmap, r0);
258  }
259  {
260  // Temp variable to calculate bit offset in cell.
261  Node* r1;
262  r1 = WordShr(object, IntPtrConstant(kPointerSizeLog2));
263  r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
264  // It seems that LSB(e.g. cl) is automatically used, so no manual masking
265  // is needed. Uncomment the following line otherwise.
266  // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
267  *mask = WordShl(IntPtrConstant(1), r1);
268  }
269  }
270 
271  Node* ShouldSkipFPRegs(Node* mode) {
272  return WordEqual(mode, SmiConstant(kDontSaveFPRegs));
273  }
274 
275  Node* ShouldEmitRememberSet(Node* remembered_set) {
276  return WordEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
277  }
278 
279  void CallCFunction1WithCallerSavedRegistersMode(MachineType return_type,
280  MachineType arg0_type,
281  Node* function, Node* arg0,
282  Node* mode, Label* next) {
283  Label dont_save_fp(this), save_fp(this);
284  Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
285  BIND(&dont_save_fp);
286  {
287  CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
288  arg0, kDontSaveFPRegs);
289  Goto(next);
290  }
291 
292  BIND(&save_fp);
293  {
294  CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
295  arg0, kSaveFPRegs);
296  Goto(next);
297  }
298  }
299 
300  void CallCFunction3WithCallerSavedRegistersMode(
301  MachineType return_type, MachineType arg0_type, MachineType arg1_type,
302  MachineType arg2_type, Node* function, Node* arg0, Node* arg1, Node* arg2,
303  Node* mode, Label* next) {
304  Label dont_save_fp(this), save_fp(this);
305  Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
306  BIND(&dont_save_fp);
307  {
308  CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
309  arg2_type, function, arg0, arg1,
310  arg2, kDontSaveFPRegs);
311  Goto(next);
312  }
313 
314  BIND(&save_fp);
315  {
316  CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
317  arg2_type, function, arg0, arg1,
318  arg2, kSaveFPRegs);
319  Goto(next);
320  }
321  }
322 
323  void InsertToStoreBufferAndGoto(Node* isolate, Node* slot, Node* mode,
324  Label* next) {
325  Node* store_buffer_top_addr =
326  ExternalConstant(ExternalReference::store_buffer_top(this->isolate()));
327  Node* store_buffer_top =
328  Load(MachineType::Pointer(), store_buffer_top_addr);
329  StoreNoWriteBarrier(MachineType::PointerRepresentation(), store_buffer_top,
330  slot);
331  Node* new_store_buffer_top =
332  IntPtrAdd(store_buffer_top, IntPtrConstant(kPointerSize));
333  StoreNoWriteBarrier(MachineType::PointerRepresentation(),
334  store_buffer_top_addr, new_store_buffer_top);
335 
336  Node* test = WordAnd(new_store_buffer_top,
337  IntPtrConstant(Heap::store_buffer_mask_constant()));
338 
339  Label overflow(this);
340  Branch(WordEqual(test, IntPtrConstant(0)), &overflow, next);
341 
342  BIND(&overflow);
343  {
344  Node* function =
345  ExternalConstant(ExternalReference::store_buffer_overflow_function());
346  CallCFunction1WithCallerSavedRegistersMode(MachineType::Int32(),
347  MachineType::Pointer(),
348  function, isolate, mode, next);
349  }
350  }
351 };
352 
353 TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
354  Label generational_wb(this);
355  Label incremental_wb(this);
356  Label exit(this);
357 
358  Node* remembered_set = Parameter(Descriptor::kRememberedSet);
359  Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
360  &incremental_wb);
361 
362  BIND(&generational_wb);
363  {
364  Label test_old_to_new_flags(this);
365  Label store_buffer_exit(this), store_buffer_incremental_wb(this);
366 
367  // When incremental marking is not on, we skip cross generation pointer
368  // checking here, because there are checks for
369  // `kPointersFromHereAreInterestingMask` and
370  // `kPointersToHereAreInterestingMask` in
371  // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
372  // which serves as the cross generation checking.
373  Node* slot = Parameter(Descriptor::kSlot);
374  Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit);
375 
376  BIND(&test_old_to_new_flags);
377  {
378  Node* value = Load(MachineType::Pointer(), slot);
379 
380  // TODO(albertnetymk): Try to cache the page flag for value and object,
381  // instead of calling IsPageFlagSet each time.
382  Node* value_in_new_space =
383  IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask);
384  GotoIfNot(value_in_new_space, &incremental_wb);
385 
386  Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
387  Node* object_in_new_space =
388  IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask);
389  Branch(object_in_new_space, &incremental_wb,
390  &store_buffer_incremental_wb);
391  }
392 
393  BIND(&store_buffer_exit);
394  {
395  Node* isolate_constant =
396  ExternalConstant(ExternalReference::isolate_address(isolate()));
397  Node* fp_mode = Parameter(Descriptor::kFPMode);
398  InsertToStoreBufferAndGoto(isolate_constant, slot, fp_mode, &exit);
399  }
400 
401  BIND(&store_buffer_incremental_wb);
402  {
403  Node* isolate_constant =
404  ExternalConstant(ExternalReference::isolate_address(isolate()));
405  Node* fp_mode = Parameter(Descriptor::kFPMode);
406  InsertToStoreBufferAndGoto(isolate_constant, slot, fp_mode,
407  &incremental_wb);
408  }
409  }
410 
411  BIND(&incremental_wb);
412  {
413  Label call_incremental_wb(this);
414 
415  Node* slot = Parameter(Descriptor::kSlot);
416  Node* value = Load(MachineType::Pointer(), slot);
417 
418  // There are two cases we need to call incremental write barrier.
419  // 1) value_is_white
420  GotoIf(IsWhite(value), &call_incremental_wb);
421 
422  // 2) is_compacting && value_in_EC && obj_isnt_skip
423  // is_compacting = true when is_marking = true
424  GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
425  &exit);
426 
427  Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
428  Branch(
429  IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
430  &exit, &call_incremental_wb);
431 
432  BIND(&call_incremental_wb);
433  {
434  Node* function = ExternalConstant(
435  ExternalReference::incremental_marking_record_write_function());
436  Node* isolate_constant =
437  ExternalConstant(ExternalReference::isolate_address(isolate()));
438  Node* fp_mode = Parameter(Descriptor::kFPMode);
439  Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
440  CallCFunction3WithCallerSavedRegistersMode(
441  MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
442  MachineType::Pointer(), function, object, slot, isolate_constant,
443  fp_mode, &exit);
444  }
445  }
446 
447  BIND(&exit);
448  Return(TrueConstant());
449 }
450 
452  public:
454  : AccessorAssembler(state) {}
455 
456  void DeleteDictionaryProperty(TNode<Object> receiver,
457  TNode<NameDictionary> properties,
458  TNode<Name> name, TNode<Context> context,
459  Label* dont_delete, Label* notfound) {
460  TVARIABLE(IntPtrT, var_name_index);
461  Label dictionary_found(this, &var_name_index);
462  NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
463  &var_name_index, notfound);
464 
465  BIND(&dictionary_found);
466  TNode<IntPtrT> key_index = var_name_index.value();
467  TNode<Uint32T> details =
468  LoadDetailsByKeyIndex<NameDictionary>(properties, key_index);
469  GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
470  dont_delete);
471  // Overwrite the entry itself (see NameDictionary::SetEntry).
472  TNode<HeapObject> filler = TheHoleConstant();
473  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
474  StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
475  StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
476  SKIP_WRITE_BARRIER);
477  StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
478  SmiConstant(0));
479 
480  // Update bookkeeping information (see NameDictionary::ElementRemoved).
481  TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
482  TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
483  SetNumberOfElements<NameDictionary>(properties, new_nof);
484  TNode<Smi> num_deleted =
485  GetNumberOfDeletedElements<NameDictionary>(properties);
486  TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
487  SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
488 
489  // Shrink the dictionary if necessary (see NameDictionary::Shrink).
490  Label shrinking_done(this);
491  TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
492  GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
493  GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
494  CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
495  Goto(&shrinking_done);
496  BIND(&shrinking_done);
497 
498  Return(TrueConstant());
499  }
500 };
501 
502 TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
503  TNode<Object> receiver = CAST(Parameter(Descriptor::kObject));
504  TNode<Object> key = CAST(Parameter(Descriptor::kKey));
505  TNode<Smi> language_mode = CAST(Parameter(Descriptor::kLanguageMode));
506  TNode<Context> context = CAST(Parameter(Descriptor::kContext));
507 
508  VARIABLE(var_index, MachineType::PointerRepresentation());
509  VARIABLE(var_unique, MachineRepresentation::kTagged, key);
510  Label if_index(this), if_unique_name(this), if_notunique(this),
511  if_notfound(this), slow(this);
512 
513  GotoIf(TaggedIsSmi(receiver), &slow);
514  TNode<Map> receiver_map = LoadMap(CAST(receiver));
515  TNode<Int32T> instance_type = LoadMapInstanceType(receiver_map);
516  GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
517  TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
518  &if_notunique);
519 
520  BIND(&if_index);
521  {
522  Comment("integer index");
523  Goto(&slow); // TODO(jkummerow): Implement more smarts here.
524  }
525 
526  BIND(&if_unique_name);
527  {
528  Comment("key is unique name");
529  TNode<Name> unique = CAST(var_unique.value());
530  CheckForAssociatedProtector(unique, &slow);
531 
532  Label dictionary(this), dont_delete(this);
533  GotoIf(IsDictionaryMap(receiver_map), &dictionary);
534 
535  // Fast properties need to clear recorded slots, which can only be done
536  // in C++.
537  Goto(&slow);
538 
539  BIND(&dictionary);
540  {
541  InvalidateValidityCellIfPrototype(receiver_map);
542 
543  TNode<NameDictionary> properties =
544  CAST(LoadSlowProperties(CAST(receiver)));
545  DeleteDictionaryProperty(receiver, properties, unique, context,
546  &dont_delete, &if_notfound);
547  }
548 
549  BIND(&dont_delete);
550  {
551  STATIC_ASSERT(LanguageModeSize == 2);
552  GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
553  &slow);
554  Return(FalseConstant());
555  }
556  }
557 
558  BIND(&if_notunique);
559  {
560  // If the string was not found in the string table, then no object can
561  // have a property with that name.
562  TryInternalizeString(key, &if_index, &var_index, &if_unique_name,
563  &var_unique, &if_notfound, &slow);
564  }
565 
566  BIND(&if_notfound);
567  Return(TrueConstant());
568 
569  BIND(&slow);
570  {
571  TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
572  language_mode);
573  }
574 }
575 
576 TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
577  Node* receiver = Parameter(Descriptor::kReceiver);
578  Node* context = Parameter(Descriptor::kContext);
579 
580  Label if_empty(this), if_runtime(this, Label::kDeferred);
581  Node* receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
582  Return(receiver_map);
583 
584  BIND(&if_empty);
585  Return(EmptyFixedArrayConstant());
586 
587  BIND(&if_runtime);
588  TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
589 }
590 
591 TF_BUILTIN(ForInFilter, CodeStubAssembler) {
592  Node* key = Parameter(Descriptor::kKey);
593  Node* object = Parameter(Descriptor::kObject);
594  Node* context = Parameter(Descriptor::kContext);
595 
596  CSA_ASSERT(this, IsString(key));
597 
598  Label if_true(this), if_false(this);
599  TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
600  Branch(IsTrue(result), &if_true, &if_false);
601 
602  BIND(&if_true);
603  Return(key);
604 
605  BIND(&if_false);
606  Return(UndefinedConstant());
607 }
608 
609 TF_BUILTIN(SameValue, CodeStubAssembler) {
610  Node* lhs = Parameter(Descriptor::kLeft);
611  Node* rhs = Parameter(Descriptor::kRight);
612 
613  Label if_true(this), if_false(this);
614  BranchIfSameValue(lhs, rhs, &if_true, &if_false);
615 
616  BIND(&if_true);
617  Return(TrueConstant());
618 
619  BIND(&if_false);
620  Return(FalseConstant());
621 }
622 
624  public:
626  : CodeStubAssembler(state) {}
627 
628  template <typename Descriptor>
629  void GenerateAdaptorWithExitFrameType(
630  Builtins::ExitFrameType exit_frame_type);
631 };
632 
633 template <typename Descriptor>
634 void InternalBuiltinsAssembler::GenerateAdaptorWithExitFrameType(
635  Builtins::ExitFrameType exit_frame_type) {
636  TNode<JSFunction> target = CAST(Parameter(Descriptor::kTarget));
637  TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
638  TNode<WordT> c_function =
639  UncheckedCast<WordT>(Parameter(Descriptor::kCFunction));
640 
641  // The logic contained here is mirrored for TurboFan inlining in
642  // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
643 
644  // Make sure we operate in the context of the called function (for example
645  // ConstructStubs implemented in C++ will be run in the context of the caller
646  // instead of the callee, due to the way that [[Construct]] is defined for
647  // ordinary functions).
648  TNode<Context> context =
649  CAST(LoadObjectField(target, JSFunction::kContextOffset));
650 
651  // Update arguments count for CEntry to contain the number of arguments
652  // including the receiver and the extra arguments.
653  TNode<Int32T> argc =
654  UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
655  argc = Int32Add(
656  argc,
657  Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
658 
659  TNode<Code> code = HeapConstant(
660  CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
661  exit_frame_type == Builtins::BUILTIN_EXIT));
662 
663  // Unconditionally push argc, target and new target as extra stack arguments.
664  // They will be used by stack frame iterators when constructing stack trace.
665  TailCallStub(CEntry1ArgvOnStackDescriptor{}, // descriptor
666  code, context, // standard arguments for TailCallStub
667  argc, c_function, // register arguments
668  TheHoleConstant(), // additional stack argument 1 (padding)
669  SmiFromInt32(argc), // additional stack argument 2
670  target, // additional stack argument 3
671  new_target); // additional stack argument 4
672 }
673 
674 TF_BUILTIN(AdaptorWithExitFrame, InternalBuiltinsAssembler) {
675  GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::EXIT);
676 }
677 
678 TF_BUILTIN(AdaptorWithBuiltinExitFrame, InternalBuiltinsAssembler) {
679  GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::BUILTIN_EXIT);
680 }
681 
682 TF_BUILTIN(AllocateInNewSpace, CodeStubAssembler) {
683  TNode<IntPtrT> requested_size =
684  UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
685 
686  TailCallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
687  SmiFromIntPtr(requested_size));
688 }
689 
690 TF_BUILTIN(AllocateInOldSpace, CodeStubAssembler) {
691  TNode<IntPtrT> requested_size =
692  UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
693 
694  int flags = AllocateTargetSpace::encode(OLD_SPACE);
695  TailCallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
696  SmiFromIntPtr(requested_size), SmiConstant(flags));
697 }
698 
699 TF_BUILTIN(Abort, CodeStubAssembler) {
700  TNode<Smi> message_id = CAST(Parameter(Descriptor::kMessageOrMessageId));
701  TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
702 }
703 
704 TF_BUILTIN(AbortJS, CodeStubAssembler) {
705  TNode<String> message = CAST(Parameter(Descriptor::kMessageOrMessageId));
706  TailCallRuntime(Runtime::kAbortJS, NoContextConstant(), message);
707 }
708 
709 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
710  MacroAssembler* masm) {
711  Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);
712 }
713 
714 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
715  MacroAssembler* masm) {
716  Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, true);
717 }
718 
719 void Builtins::
720  Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
721  MacroAssembler* masm) {
722  Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvInRegister, false);
723 }
724 
725 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
726  MacroAssembler* masm) {
727  Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, false);
728 }
729 
730 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
731  MacroAssembler* masm) {
732  Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, true);
733 }
734 
735 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
736  MacroAssembler* masm) {
737  Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, false);
738 }
739 
740 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
741  MacroAssembler* masm) {
742  Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, true);
743 }
744 
745 void Builtins::
746  Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
747  MacroAssembler* masm) {
748  Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvInRegister, false);
749 }
750 
751 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
752  MacroAssembler* masm) {
753  Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, false);
754 }
755 
756 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
757  MacroAssembler* masm) {
758  Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
759 }
760 
761 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
762  // CallApiGetterStub only exists as a stub to avoid duplicating code between
763  // here and code-stubs-<arch>.cc. For example, see CallApiFunctionAndReturn.
764  // Here we abuse the instantiated stub to generate code.
765  CallApiGetterStub stub(masm->isolate());
766  stub.Generate(masm);
767 }
768 
769 void Builtins::Generate_CallApiCallback_Argc0(MacroAssembler* masm) {
770  // The common variants of CallApiCallbackStub (i.e. all that are embedded into
771  // the snapshot) are generated as builtins. The rest remain available as code
772  // stubs. Here we abuse the instantiated stub to generate code and avoid
773  // duplication.
774  const int kArgc = 0;
775  CallApiCallbackStub stub(masm->isolate(), kArgc);
776  stub.Generate(masm);
777 }
778 
779 void Builtins::Generate_CallApiCallback_Argc1(MacroAssembler* masm) {
780  // The common variants of CallApiCallbackStub (i.e. all that are embedded into
781  // the snapshot) are generated as builtins. The rest remain available as code
782  // stubs. Here we abuse the instantiated stub to generate code and avoid
783  // duplication.
784  const int kArgc = 1;
785  CallApiCallbackStub stub(masm->isolate(), kArgc);
786  stub.Generate(masm);
787 }
788 
789 // ES6 [[Get]] operation.
790 TF_BUILTIN(GetProperty, CodeStubAssembler) {
791  Node* object = Parameter(Descriptor::kObject);
792  Node* key = Parameter(Descriptor::kKey);
793  Node* context = Parameter(Descriptor::kContext);
794  Label if_notfound(this), if_proxy(this, Label::kDeferred),
795  if_slow(this, Label::kDeferred);
796 
797  CodeStubAssembler::LookupInHolder lookup_property_in_holder =
798  [=](Node* receiver, Node* holder, Node* holder_map,
799  Node* holder_instance_type, Node* unique_name, Label* next_holder,
800  Label* if_bailout) {
801  VARIABLE(var_value, MachineRepresentation::kTagged);
802  Label if_found(this);
803  TryGetOwnProperty(context, receiver, holder, holder_map,
804  holder_instance_type, unique_name, &if_found,
805  &var_value, next_holder, if_bailout);
806  BIND(&if_found);
807  Return(var_value.value());
808  };
809 
810  CodeStubAssembler::LookupInHolder lookup_element_in_holder =
811  [=](Node* receiver, Node* holder, Node* holder_map,
812  Node* holder_instance_type, Node* index, Label* next_holder,
813  Label* if_bailout) {
814  // Not supported yet.
815  Use(next_holder);
816  Goto(if_bailout);
817  };
818 
819  TryPrototypeChainLookup(object, key, lookup_property_in_holder,
820  lookup_element_in_holder, &if_notfound, &if_slow,
821  &if_proxy);
822 
823  BIND(&if_notfound);
824  Return(UndefinedConstant());
825 
826  BIND(&if_slow);
827  TailCallRuntime(Runtime::kGetProperty, context, object, key);
828 
829  BIND(&if_proxy);
830  {
831  // Convert the {key} to a Name first.
832  Node* name = CallBuiltin(Builtins::kToName, context, key);
833 
834  // The {object} is a JSProxy instance, look up the {name} on it, passing
835  // {object} both as receiver and holder. If {name} is absent we can safely
836  // return undefined from here.
837  TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name, object,
838  SmiConstant(OnNonExistent::kReturnUndefined));
839  }
840 }
841 
842 // ES6 [[Set]] operation.
843 TF_BUILTIN(SetProperty, CodeStubAssembler) {
844  TNode<Context> context = CAST(Parameter(Descriptor::kContext));
845  TNode<Object> receiver = CAST(Parameter(Descriptor::kReceiver));
846  TNode<Object> key = CAST(Parameter(Descriptor::kKey));
847  TNode<Object> value = CAST(Parameter(Descriptor::kValue));
848 
849  KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
850  value, LanguageMode::kStrict);
851 }
852 
853 // ES6 CreateDataProperty(), specialized for the case where objects are still
854 // being initialized, and have not yet been made accessible to the user. Thus,
855 // any operation here should be unobservable until after the object has been
856 // returned.
857 TF_BUILTIN(SetPropertyInLiteral, CodeStubAssembler) {
858  TNode<Context> context = CAST(Parameter(Descriptor::kContext));
859  TNode<JSObject> receiver = CAST(Parameter(Descriptor::kReceiver));
860  TNode<Object> key = CAST(Parameter(Descriptor::kKey));
861  TNode<Object> value = CAST(Parameter(Descriptor::kValue));
862 
863  KeyedStoreGenericGenerator::SetPropertyInLiteral(state(), context, receiver,
864  key, value);
865 }
866 
867 } // namespace internal
868 } // namespace v8
Definition: libplatform.h:13