V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
builtins-constructor-gen.cc
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/builtins/builtins-constructor-gen.h"
6 
7 #include "src/ast/ast.h"
8 #include "src/builtins/builtins-call-gen.h"
9 #include "src/builtins/builtins-constructor.h"
10 #include "src/builtins/builtins-utils-gen.h"
11 #include "src/builtins/builtins.h"
12 #include "src/code-factory.h"
13 #include "src/code-stub-assembler.h"
14 #include "src/counters.h"
15 #include "src/interface-descriptors.h"
16 #include "src/macro-assembler.h"
17 #include "src/objects-inl.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
23  Generate_CallOrConstructVarargs(masm,
24  BUILTIN_CODE(masm->isolate(), Construct));
25 }
26 
27 void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
28  Generate_CallOrConstructForwardVarargs(
29  masm, CallOrConstructMode::kConstruct,
30  BUILTIN_CODE(masm->isolate(), Construct));
31 }
32 
33 void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
34  Generate_CallOrConstructForwardVarargs(
35  masm, CallOrConstructMode::kConstruct,
36  BUILTIN_CODE(masm->isolate(), ConstructFunction));
37 }
38 
39 TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
40  TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
41  SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
42  TNode<Object> arguments_list = CAST(Parameter(Descriptor::kArgumentsList));
43  TNode<Context> context = CAST(Parameter(Descriptor::kContext));
44  CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
45 }
46 
47 TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
48  TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
49  SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
50  TNode<Object> spread = CAST(Parameter(Descriptor::kSpread));
51  TNode<Int32T> args_count =
52  UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
53  TNode<Context> context = CAST(Parameter(Descriptor::kContext));
54  CallOrConstructWithSpread(target, new_target, spread, args_count, context);
55 }
56 
57 typedef compiler::Node Node;
58 
59 TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
60  Node* shared_function_info = Parameter(Descriptor::kSharedFunctionInfo);
61  Node* feedback_cell = Parameter(Descriptor::kFeedbackCell);
62  Node* context = Parameter(Descriptor::kContext);
63 
64  CSA_ASSERT(this, IsFeedbackCell(feedback_cell));
65  CSA_ASSERT(this, IsSharedFunctionInfo(shared_function_info));
66 
67  IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
68 
69  // Bump the closure counter encoded the {feedback_cell}s map.
70  {
71  Node* const feedback_cell_map = LoadMap(feedback_cell);
72  Label no_closures(this), one_closure(this), cell_done(this);
73 
74  GotoIf(IsNoFeedbackCellMap(feedback_cell_map), &cell_done);
75  GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
76  GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
77  CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
78  feedback_cell_map, feedback_cell);
79  Goto(&cell_done);
80 
81  BIND(&no_closures);
82  StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
83  Goto(&cell_done);
84 
85  BIND(&one_closure);
86  StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
87  Goto(&cell_done);
88 
89  BIND(&cell_done);
90  }
91 
92  // The calculation of |function_map_index| must be in sync with
93  // SharedFunctionInfo::function_map_index().
94  Node* const flags =
95  LoadObjectField(shared_function_info, SharedFunctionInfo::kFlagsOffset,
96  MachineType::Uint32());
97  Node* const function_map_index = IntPtrAdd(
98  DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
99  IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX));
100  CSA_ASSERT(this, UintPtrLessThanOrEqual(
101  function_map_index,
102  IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
103 
104  // Get the function map in the current native context and set that
105  // as the map of the allocated object.
106  Node* const native_context = LoadNativeContext(context);
107  Node* const function_map =
108  LoadContextElement(native_context, function_map_index);
109 
110  // Create a new closure from the given function info in new space
111  TNode<IntPtrT> instance_size_in_bytes =
112  TimesPointerSize(LoadMapInstanceSizeInWords(function_map));
113  TNode<Object> result = Allocate(instance_size_in_bytes);
114  StoreMapNoWriteBarrier(result, function_map);
115  InitializeJSObjectBodyNoSlackTracking(result, function_map,
116  instance_size_in_bytes,
117  JSFunction::kSizeWithoutPrototype);
118 
119  // Initialize the rest of the function.
120  StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
121  RootIndex::kEmptyFixedArray);
122  StoreObjectFieldRoot(result, JSObject::kElementsOffset,
123  RootIndex::kEmptyFixedArray);
124  {
125  // Set function prototype if necessary.
126  Label done(this), init_prototype(this);
127  Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
128  &done);
129 
130  BIND(&init_prototype);
131  StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
132  RootIndex::kTheHoleValue);
133  Goto(&done);
134  BIND(&done);
135  }
136 
137  STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
138  StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
139  feedback_cell);
140  StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
141  shared_function_info);
142  StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
143  Handle<Code> lazy_builtin_handle(
144  isolate()->builtins()->builtin(Builtins::kCompileLazy), isolate());
145  Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
146  StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
147  Return(result);
148 }
149 
150 TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
151  Node* context = Parameter(Descriptor::kContext);
152  Node* target = Parameter(Descriptor::kTarget);
153  Node* new_target = Parameter(Descriptor::kNewTarget);
154 
155  Label call_runtime(this);
156 
157  Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
158  Return(result);
159 
160  BIND(&call_runtime);
161  TailCallRuntime(Runtime::kNewObject, context, target, new_target);
162 }
163 
164 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
165  Node* target,
166  Node* new_target) {
167  VARIABLE(var_obj, MachineRepresentation::kTagged);
168  Label call_runtime(this), end(this);
169 
170  Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
171  var_obj.Bind(result);
172  Goto(&end);
173 
174  BIND(&call_runtime);
175  var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
176  Goto(&end);
177 
178  BIND(&end);
179  return var_obj.value();
180 }
181 
182 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
183  Node* target,
184  Node* new_target,
185  Label* call_runtime) {
186  CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
187  CSA_ASSERT(this, IsJSReceiver(new_target));
188 
189  // Verify that the new target is a JSFunction.
190  Label fast(this), end(this);
191  GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
192  Goto(call_runtime);
193 
194  BIND(&fast);
195 
196  // Load the initial map and verify that it's in fact a map.
197  Node* initial_map =
198  LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
199  GotoIf(TaggedIsSmi(initial_map), call_runtime);
200  GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
201 
202  // Fall back to runtime if the target differs from the new target's
203  // initial map constructor.
204  Node* new_target_constructor =
205  LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
206  GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
207 
208  VARIABLE(properties, MachineRepresentation::kTagged);
209 
210  Label instantiate_map(this), allocate_properties(this);
211  GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
212  {
213  properties.Bind(EmptyFixedArrayConstant());
214  Goto(&instantiate_map);
215  }
216  BIND(&allocate_properties);
217  {
218  properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity));
219  Goto(&instantiate_map);
220  }
221 
222  BIND(&instantiate_map);
223  return AllocateJSObjectFromMap(initial_map, properties.value(), nullptr,
224  kNone, kWithSlackTracking);
225 }
226 
227 Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
228  Node* scope_info, Node* slots_uint32, Node* context, ScopeType scope_type) {
229  TNode<IntPtrT> slots = Signed(ChangeUint32ToWord(slots_uint32));
230  TNode<IntPtrT> size = ElementOffsetFromIndex(
231  slots, PACKED_ELEMENTS, INTPTR_PARAMETERS, Context::kTodoHeaderSize);
232 
233  // Create a new closure from the given function info in new space
234  TNode<Context> function_context =
235  UncheckedCast<Context>(AllocateInNewSpace(size));
236 
237  RootIndex context_type;
238  switch (scope_type) {
239  case EVAL_SCOPE:
240  context_type = RootIndex::kEvalContextMap;
241  break;
242  case FUNCTION_SCOPE:
243  context_type = RootIndex::kFunctionContextMap;
244  break;
245  default:
246  UNREACHABLE();
247  }
248  // Set up the header.
249  StoreMapNoWriteBarrier(function_context, context_type);
250  TNode<IntPtrT> min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
251  // TODO(ishell): for now, length also includes MIN_CONTEXT_SLOTS.
252  TNode<IntPtrT> length = IntPtrAdd(slots, min_context_slots);
253  StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
254  SmiTag(length));
255  StoreObjectFieldNoWriteBarrier(function_context, Context::kScopeInfoOffset,
256  scope_info);
257  StoreObjectFieldNoWriteBarrier(function_context, Context::kPreviousOffset,
258  context);
259  StoreObjectFieldNoWriteBarrier(function_context, Context::kExtensionOffset,
260  TheHoleConstant());
261  TNode<Context> native_context = LoadNativeContext(context);
262  StoreObjectFieldNoWriteBarrier(function_context,
263  Context::kNativeContextOffset, native_context);
264 
265  // Initialize the varrest of the slots to undefined.
266  TNode<HeapObject> undefined = UndefinedConstant();
267  TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
268  CodeStubAssembler::VariableList vars(0, zone());
269  BuildFastLoop(
270  vars, start_offset, size,
271  [=](Node* offset) {
272  StoreObjectFieldNoWriteBarrier(
273  function_context, UncheckedCast<IntPtrT>(offset), undefined);
274  },
275  kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
276  return function_context;
277 }
278 
279 TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
280  Node* scope_info = Parameter(Descriptor::kScopeInfo);
281  Node* slots = Parameter(Descriptor::kSlots);
282  Node* context = Parameter(Descriptor::kContext);
283  Return(EmitFastNewFunctionContext(scope_info, slots, context,
284  ScopeType::EVAL_SCOPE));
285 }
286 
287 TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
288  Node* scope_info = Parameter(Descriptor::kScopeInfo);
289  Node* slots = Parameter(Descriptor::kSlots);
290  Node* context = Parameter(Descriptor::kContext);
291  Return(EmitFastNewFunctionContext(scope_info, slots, context,
292  ScopeType::FUNCTION_SCOPE));
293 }
294 
295 Node* ConstructorBuiltinsAssembler::EmitCreateRegExpLiteral(
296  Node* feedback_vector, Node* slot, Node* pattern, Node* flags,
297  Node* context) {
298  Label call_runtime(this, Label::kDeferred), end(this);
299 
300  VARIABLE(result, MachineRepresentation::kTagged);
301  TNode<Object> literal_site =
302  CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
303  GotoIf(NotHasBoilerplate(literal_site), &call_runtime);
304  {
305  Node* boilerplate = literal_site;
306  CSA_ASSERT(this, IsJSRegExp(boilerplate));
307  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
308  Node* copy = Allocate(size);
309  for (int offset = 0; offset < size; offset += kPointerSize) {
310  Node* value = LoadObjectField(boilerplate, offset);
311  StoreObjectFieldNoWriteBarrier(copy, offset, value);
312  }
313  result.Bind(copy);
314  Goto(&end);
315  }
316 
317  BIND(&call_runtime);
318  {
319  result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context,
320  feedback_vector, SmiTag(slot), pattern, flags));
321  Goto(&end);
322  }
323 
324  BIND(&end);
325  return result.value();
326 }
327 
328 TF_BUILTIN(CreateRegExpLiteral, ConstructorBuiltinsAssembler) {
329  Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
330  Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
331  Node* pattern = Parameter(Descriptor::kPattern);
332  Node* flags = Parameter(Descriptor::kFlags);
333  Node* context = Parameter(Descriptor::kContext);
334  Node* result =
335  EmitCreateRegExpLiteral(feedback_vector, slot, pattern, flags, context);
336  Return(result);
337 }
338 
339 Node* ConstructorBuiltinsAssembler::EmitCreateShallowArrayLiteral(
340  Node* feedback_vector, Node* slot, Node* context, Label* call_runtime,
341  AllocationSiteMode allocation_site_mode) {
342  Label zero_capacity(this), cow_elements(this), fast_elements(this),
343  return_result(this);
344  VARIABLE(result, MachineRepresentation::kTagged);
345 
346  TNode<Object> maybe_allocation_site =
347  CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
348  GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
349 
350  TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
351  TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
352 
353  ParameterMode mode = OptimalParameterMode();
354  if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
355  return CloneFastJSArray(context, boilerplate, mode, allocation_site);
356  } else {
357  return CloneFastJSArray(context, boilerplate, mode);
358  }
359 }
360 
361 TF_BUILTIN(CreateShallowArrayLiteral, ConstructorBuiltinsAssembler) {
362  Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
363  Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
364  Node* constant_elements = Parameter(Descriptor::kConstantElements);
365  Node* context = Parameter(Descriptor::kContext);
366  Label call_runtime(this, Label::kDeferred);
367  Return(EmitCreateShallowArrayLiteral(feedback_vector, slot, context,
368  &call_runtime,
369  DONT_TRACK_ALLOCATION_SITE));
370 
371  BIND(&call_runtime);
372  {
373  Comment("call runtime");
374  int const flags =
375  AggregateLiteral::kDisableMementos | AggregateLiteral::kIsShallow;
376  Return(CallRuntime(Runtime::kCreateArrayLiteral, context, feedback_vector,
377  SmiTag(slot), constant_elements, SmiConstant(flags)));
378  }
379 }
380 
381 Node* ConstructorBuiltinsAssembler::EmitCreateEmptyArrayLiteral(
382  Node* feedback_vector, Node* slot, Node* context) {
383  // Array literals always have a valid AllocationSite to properly track
384  // elements transitions.
385  TNode<Object> maybe_allocation_site =
386  CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
387  TVARIABLE(AllocationSite, allocation_site);
388 
389  Label create_empty_array(this),
390  initialize_allocation_site(this, Label::kDeferred), done(this);
391  GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
392  {
393  allocation_site = CAST(maybe_allocation_site);
394  Goto(&create_empty_array);
395  }
396  // TODO(cbruni): create the AllocationSite in CSA.
397  BIND(&initialize_allocation_site);
398  {
399  allocation_site =
400  CreateAllocationSiteInFeedbackVector(feedback_vector, SmiTag(slot));
401  Goto(&create_empty_array);
402  }
403 
404  BIND(&create_empty_array);
405  TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
406  TNode<Context> native_context = LoadNativeContext(context);
407  Comment("LoadJSArrayElementsMap");
408  TNode<Map> array_map = LoadJSArrayElementsMap(kind, native_context);
409  TNode<Smi> zero = SmiConstant(0);
410  Comment("Allocate JSArray");
411  TNode<JSArray> result =
412  AllocateJSArray(GetInitialFastElementsKind(), array_map, zero, zero,
413  allocation_site.value(), ParameterMode::SMI_PARAMETERS);
414 
415  Goto(&done);
416  BIND(&done);
417 
418  return result;
419 }
420 
421 TF_BUILTIN(CreateEmptyArrayLiteral, ConstructorBuiltinsAssembler) {
422  Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
423  Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
424  Node* context = Parameter(Descriptor::kContext);
425  Node* result = EmitCreateEmptyArrayLiteral(feedback_vector, slot, context);
426  Return(result);
427 }
428 
429 Node* ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
430  Node* feedback_vector, Node* slot, Label* call_runtime) {
431  TNode<Object> maybe_allocation_site =
432  CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
433  GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
434 
435  TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
436  TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
437  TNode<Map> boilerplate_map = LoadMap(boilerplate);
438  CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
439 
440  VARIABLE(var_properties, MachineRepresentation::kTagged);
441  {
442  Node* bit_field_3 = LoadMapBitField3(boilerplate_map);
443  GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bit_field_3), call_runtime);
444  // Directly copy over the property store for dict-mode boilerplates.
445  Label if_dictionary(this), if_fast(this), done(this);
446  Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field_3), &if_dictionary,
447  &if_fast);
448  BIND(&if_dictionary);
449  {
450  Comment("Copy dictionary properties");
451  var_properties.Bind(CopyNameDictionary(
452  CAST(LoadSlowProperties(boilerplate)), call_runtime));
453  // Slow objects have no in-object properties.
454  Goto(&done);
455  }
456  BIND(&if_fast);
457  {
458  // TODO(cbruni): support copying out-of-object properties.
459  Node* boilerplate_properties = LoadFastProperties(boilerplate);
460  GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
461  var_properties.Bind(EmptyFixedArrayConstant());
462  Goto(&done);
463  }
464  BIND(&done);
465  }
466 
467  VARIABLE(var_elements, MachineRepresentation::kTagged);
468  {
469  // Copy the elements backing store, assuming that it's flat.
470  Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
471  Node* boilerplate_elements = LoadElements(boilerplate);
472  Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
473  &if_copy_elements);
474 
475  BIND(&if_empty_fixed_array);
476  var_elements.Bind(boilerplate_elements);
477  Goto(&done);
478 
479  BIND(&if_copy_elements);
480  CSA_ASSERT(this, Word32BinaryNot(
481  IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
482  ExtractFixedArrayFlags flags;
483  flags |= ExtractFixedArrayFlag::kAllFixedArrays;
484  flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
485  flags |= ExtractFixedArrayFlag::kDontCopyCOW;
486  var_elements.Bind(CloneFixedArray(boilerplate_elements, flags));
487  Goto(&done);
488  BIND(&done);
489  }
490 
491  // Ensure new-space allocation for a fresh JSObject so we can skip write
492  // barriers when copying all object fields.
493  STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
494  TNode<IntPtrT> instance_size =
495  TimesPointerSize(LoadMapInstanceSizeInWords(boilerplate_map));
496  TNode<IntPtrT> allocation_size = instance_size;
497  bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
498  if (needs_allocation_memento) {
499  // Prepare for inner-allocating the AllocationMemento.
500  allocation_size =
501  IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
502  }
503 
504  TNode<HeapObject> copy =
505  UncheckedCast<HeapObject>(AllocateInNewSpace(allocation_size));
506  {
507  Comment("Initialize Literal Copy");
508  // Initialize Object fields.
509  StoreMapNoWriteBarrier(copy, boilerplate_map);
510  StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
511  var_properties.value());
512  StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
513  var_elements.value());
514  }
515 
516  // Initialize the AllocationMemento before potential GCs due to heap number
517  // allocation when copying the in-object properties.
518  if (needs_allocation_memento) {
519  InitializeAllocationMemento(copy, instance_size, allocation_site);
520  }
521 
522  {
523  // Copy over in-object properties.
524  Label continue_with_write_barrier(this), done_init(this);
525  TVARIABLE(IntPtrT, offset, IntPtrConstant(JSObject::kHeaderSize));
526  // Mutable heap numbers only occur on 32-bit platforms.
527  bool may_use_mutable_heap_numbers = !FLAG_unbox_double_fields;
528  {
529  Comment("Copy in-object properties fast");
530  Label continue_fast(this, &offset);
531  Branch(WordEqual(offset.value(), instance_size), &done_init,
532  &continue_fast);
533  BIND(&continue_fast);
534  if (may_use_mutable_heap_numbers) {
535  TNode<Object> field = LoadObjectField(boilerplate, offset.value());
536  Label store_field(this);
537  GotoIf(TaggedIsSmi(field), &store_field);
538  GotoIf(IsMutableHeapNumber(CAST(field)), &continue_with_write_barrier);
539  Goto(&store_field);
540  BIND(&store_field);
541  StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
542  } else {
543  // Copy fields as raw data.
544  TNode<IntPtrT> field =
545  LoadObjectField<IntPtrT>(boilerplate, offset.value());
546  StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
547  }
548  offset = IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize));
549  Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
550  &done_init);
551  }
552 
553  if (!may_use_mutable_heap_numbers) {
554  BIND(&done_init);
555  return copy;
556  }
557  // Continue initializing the literal after seeing the first sub-object
558  // potentially causing allocation. In this case we prepare the new literal
559  // by copying all pending fields over from the boilerplate and emit full
560  // write barriers from here on.
561  BIND(&continue_with_write_barrier);
562  {
563  Comment("Copy in-object properties slow");
564  BuildFastLoop(offset.value(), instance_size,
565  [=](Node* offset) {
566  Node* field = LoadObjectField(boilerplate, offset);
567  StoreObjectFieldNoWriteBarrier(copy, offset, field);
568  },
569  kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
570  Comment("Copy mutable HeapNumber values");
571  BuildFastLoop(offset.value(), instance_size,
572  [=](Node* offset) {
573  Node* field = LoadObjectField(copy, offset);
574  Label copy_mutable_heap_number(this, Label::kDeferred),
575  continue_loop(this);
576  // We only have to clone complex field values.
577  GotoIf(TaggedIsSmi(field), &continue_loop);
578  Branch(IsMutableHeapNumber(field),
579  &copy_mutable_heap_number, &continue_loop);
580  BIND(&copy_mutable_heap_number);
581  {
582  Node* double_value = LoadHeapNumberValue(field);
583  Node* mutable_heap_number =
584  AllocateMutableHeapNumberWithValue(double_value);
585  StoreObjectField(copy, offset, mutable_heap_number);
586  Goto(&continue_loop);
587  }
588  BIND(&continue_loop);
589  },
590  kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
591  Goto(&done_init);
592  }
593  BIND(&done_init);
594  }
595  return copy;
596 }
597 
598 TF_BUILTIN(CreateShallowObjectLiteral, ConstructorBuiltinsAssembler) {
599  Label call_runtime(this);
600  Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
601  Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
602  Node* copy =
603  EmitCreateShallowObjectLiteral(feedback_vector, slot, &call_runtime);
604  Return(copy);
605 
606  BIND(&call_runtime);
607  Node* object_boilerplate_description =
608  Parameter(Descriptor::kObjectBoilerplateDescription);
609  Node* flags = Parameter(Descriptor::kFlags);
610  Node* context = Parameter(Descriptor::kContext);
611  TailCallRuntime(Runtime::kCreateObjectLiteral, context, feedback_vector,
612  SmiTag(slot), object_boilerplate_description, flags);
613 }
614 
615 // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
616 Node* ConstructorBuiltinsAssembler::EmitCreateEmptyObjectLiteral(
617  Node* context) {
618  Node* native_context = LoadNativeContext(context);
619  Node* object_function =
620  LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX);
621  Node* map = LoadObjectField(object_function,
622  JSFunction::kPrototypeOrInitialMapOffset);
623  CSA_ASSERT(this, IsMap(map));
624  // Ensure that slack tracking is disabled for the map.
625  STATIC_ASSERT(Map::kNoSlackTracking == 0);
626  CSA_ASSERT(
627  this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
628  Node* empty_fixed_array = EmptyFixedArrayConstant();
629  Node* result =
630  AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
631  return result;
632 }
633 
634 // ES #sec-object-constructor
635 TF_BUILTIN(ObjectConstructor, ConstructorBuiltinsAssembler) {
636  int const kValueArg = 0;
637  Node* argc =
638  ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
639  CodeStubArguments args(this, argc);
640  Node* context = Parameter(Descriptor::kContext);
641  Node* new_target = Parameter(Descriptor::kJSNewTarget);
642 
643  VARIABLE(var_result, MachineRepresentation::kTagged);
644  Label if_subclass(this, Label::kDeferred), if_notsubclass(this),
645  return_result(this);
646  GotoIf(IsUndefined(new_target), &if_notsubclass);
647  TNode<JSFunction> target = CAST(Parameter(Descriptor::kJSTarget));
648  Branch(WordEqual(new_target, target), &if_notsubclass, &if_subclass);
649 
650  BIND(&if_subclass);
651  {
652  Node* result =
653  CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
654  var_result.Bind(result);
655  Goto(&return_result);
656  }
657 
658  BIND(&if_notsubclass);
659  {
660  Label if_newobject(this, Label::kDeferred), if_toobject(this);
661 
662  Node* value_index = IntPtrConstant(kValueArg);
663  GotoIf(UintPtrGreaterThanOrEqual(value_index, argc), &if_newobject);
664  Node* value = args.AtIndex(value_index);
665  GotoIf(IsNull(value), &if_newobject);
666  Branch(IsUndefined(value), &if_newobject, &if_toobject);
667 
668  BIND(&if_newobject);
669  {
670  Node* result = EmitCreateEmptyObjectLiteral(context);
671  var_result.Bind(result);
672  Goto(&return_result);
673  }
674 
675  BIND(&if_toobject);
676  {
677  Node* result = CallBuiltin(Builtins::kToObject, context, value);
678  var_result.Bind(result);
679  Goto(&return_result);
680  }
681  }
682 
683  BIND(&return_result);
684  args.PopAndReturn(var_result.value());
685 }
686 
687 // ES #sec-number-constructor
688 TF_BUILTIN(NumberConstructor, ConstructorBuiltinsAssembler) {
689  Node* context = Parameter(Descriptor::kContext);
690  Node* argc =
691  ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
692  CodeStubArguments args(this, argc);
693 
694  // 1. If no arguments were passed to this function invocation, let n be +0.
695  VARIABLE(var_n, MachineRepresentation::kTagged, SmiConstant(0));
696  Label if_nloaded(this, &var_n);
697  GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_nloaded);
698 
699  // 2. Else,
700  // a. Let prim be ? ToNumeric(value).
701  // b. If Type(prim) is BigInt, let n be the Number value for prim.
702  // c. Otherwise, let n be prim.
703  Node* value = args.AtIndex(0);
704  var_n.Bind(ToNumber(context, value, BigIntHandling::kConvertToNumber));
705  Goto(&if_nloaded);
706 
707  BIND(&if_nloaded);
708  {
709  // 3. If NewTarget is undefined, return n.
710  Node* n_value = var_n.value();
711  Node* new_target = Parameter(Descriptor::kJSNewTarget);
712  Label return_n(this), constructnumber(this, Label::kDeferred);
713  Branch(IsUndefined(new_target), &return_n, &constructnumber);
714 
715  BIND(&return_n);
716  { args.PopAndReturn(n_value); }
717 
718  BIND(&constructnumber);
719  {
720  // 4. Let O be ? OrdinaryCreateFromConstructor(NewTarget,
721  // "%NumberPrototype%", « [[NumberData]] »).
722  // 5. Set O.[[NumberData]] to n.
723  // 6. Return O.
724 
725  // We are not using Parameter(Descriptor::kJSTarget) and loading the value
726  // from the current frame here in order to reduce register pressure on the
727  // fast path.
728  TNode<JSFunction> target = LoadTargetFromFrame();
729  Node* result =
730  CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
731  StoreObjectField(result, JSValue::kValueOffset, n_value);
732  args.PopAndReturn(result);
733  }
734  }
735 }
736 
737 TF_BUILTIN(GenericConstructorLazyDeoptContinuation,
738  ConstructorBuiltinsAssembler) {
739  Node* result = Parameter(Descriptor::kResult);
740  Return(result);
741 }
742 
743 // https://tc39.github.io/ecma262/#sec-string-constructor
744 TF_BUILTIN(StringConstructor, ConstructorBuiltinsAssembler) {
745  Node* context = Parameter(Descriptor::kContext);
746  Node* argc =
747  ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
748  CodeStubArguments args(this, argc);
749 
750  TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
751 
752  // 1. If no arguments were passed to this function invocation, let s be "".
753  VARIABLE(var_s, MachineRepresentation::kTagged, EmptyStringConstant());
754  Label if_sloaded(this, &var_s);
755  GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_sloaded);
756 
757  // 2. Else,
758  // a. If NewTarget is undefined [...]
759  Node* value = args.AtIndex(0);
760  Label if_tostring(this, &var_s);
761  GotoIfNot(IsUndefined(new_target), &if_tostring);
762 
763  // 2a. [...] and Type(value) is Symbol, return SymbolDescriptiveString(value).
764  GotoIf(TaggedIsSmi(value), &if_tostring);
765  GotoIfNot(IsSymbol(value), &if_tostring);
766  {
767  Node* result =
768  CallRuntime(Runtime::kSymbolDescriptiveString, context, value);
769  args.PopAndReturn(result);
770  }
771 
772  // 2b. Let s be ? ToString(value).
773  BIND(&if_tostring);
774  {
775  var_s.Bind(CallBuiltin(Builtins::kToString, context, value));
776  Goto(&if_sloaded);
777  }
778 
779  // 3. If NewTarget is undefined, return s.
780  BIND(&if_sloaded);
781  {
782  Node* s_value = var_s.value();
783  Label return_s(this), constructstring(this, Label::kDeferred);
784  Branch(IsUndefined(new_target), &return_s, &constructstring);
785 
786  BIND(&return_s);
787  { args.PopAndReturn(s_value); }
788 
789  BIND(&constructstring);
790  {
791  // We are not using Parameter(Descriptor::kJSTarget) and loading the value
792  // from the current frame here in order to reduce register pressure on the
793  // fast path.
794  TNode<JSFunction> target = LoadTargetFromFrame();
795 
796  Node* result =
797  CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
798  StoreObjectField(result, JSValue::kValueOffset, s_value);
799  args.PopAndReturn(result);
800  }
801  }
802 }
803 
804 } // namespace internal
805 } // namespace v8
Definition: libplatform.h:13